00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011
00012
00013
00014
00015
00016
00017
00018
00019
00020
00045 #include <stdio.h>
00046 #include <stdlib.h>
00047 #include <iostream>
00048 #include <math.h>
00049
00050 #include <QDebug>
00051 #include <QThread>
00052
00053 #include <QVApplication>
00054 #include <QVMPlayerReaderBlock>
00055 #include <QVDefaultGUI>
00056 #include <QVImageCanvas>
00057 #include <QVPolylineF>
00058
00059 #include <QVHarrisPointDetector>
00060 #include <qvio.h>
00061 #include <qvip.h>
00062
00063 #ifndef DOXYGEN_IGNORE_THIS
00064
00065 #ifdef OPENCV
00066 #include <opencv/cv.h>
00067
00068
00069
00070 QHash<QPointF, QPointF> openCVDelaunayTriangulation(const QList<QPointF> &selectedPoints)
00071 {
00072
00073 CvMemStorage* storage = cvCreateMemStorage(0);
00074
00075
00076 CvSubdiv2D* subdiv = cvCreateSubdiv2D( CV_SEQ_KIND_SUBDIV2D,
00077 sizeof(*subdiv),
00078 sizeof(CvSubdiv2DPoint),
00079 sizeof(CvQuadEdge2D),
00080 storage);
00081
00082
00083 double minX = std::numeric_limits<double>::max(),
00084 minY = std::numeric_limits<double>::max(),
00085 maxX = std::numeric_limits<double>::min(),
00086 maxY = std::numeric_limits<double>::min();
00087
00088 foreach(QPointF point, selectedPoints)
00089 {
00090 minX = MIN(minX, point.x());
00091 minY = MIN(minY, point.y());
00092 maxX = MAX(maxX, point.x());
00093 maxY = MAX(maxY, point.y());
00094 }
00095
00096 cvInitSubdivDelaunay2D(subdiv, (CvRect){ minX, minY, maxX, maxY });
00097
00098
00099 foreach(QPointF point, selectedPoints)
00100 cvSubdivDelaunay2DInsert(subdiv, (CvPoint2D32f){ point.x(), point.y() });
00101
00102
00103 CvSeqReader reader;
00104 QHash <QPointF, QPointF> correspondences;
00105 cvStartReadSeq((CvSeq *)subdiv->edges, &reader);
00106
00107 for(int i=0; i<subdiv->edges->total; i++)
00108 {
00109 CvQuadEdge2D * edge = (CvQuadEdge2D *) reader.ptr;
00110 if(CV_IS_SET_ELEM(edge))
00111 {
00112 CvSubdiv2DPoint * org = cvSubdiv2DEdgeOrg( (CvSubdiv2DEdge) edge );
00113 CvSubdiv2DPoint * dst = cvSubdiv2DEdgeDst( (CvSubdiv2DEdge) edge );
00114 if(org && dst && (CV_IS_SET_ELEM(org) && ((org)->flags & CV_SUBDIV2D_VIRTUAL_POINT_FLAG)==0))
00115 if ( (org->pt.x > minX) && (org->pt.x < maxX) && (dst->pt.y > minY) && (dst->pt.y < maxY) )
00116 correspondences.insertMulti(QPointF(org->pt.x, org->pt.y), QPointF(dst->pt.x, dst->pt.y));
00117 }
00118 CV_NEXT_SEQ_ELEM(subdiv->edges->elem_size, reader);
00119 }
00120
00121 return correspondences;
00122 }
00123
00124 class DelaunayBlock: public QVProcessingBlock
00125 {
00126 private:
00127 QList<QVPolylineF> map2polyline(const QHash<QPointF, QPointF> &map)
00128 {
00129 QList<QVPolylineF> polylines;
00130
00131 QHashIterator<QPointF, QPointF> iterator(map);
00132
00133 while (iterator.hasNext())
00134 {
00135 iterator.next();
00136 QVPolylineF polyline;
00137 polyline.append(iterator.key());
00138 polyline.append(iterator.value());
00139 polylines.append(polyline);
00140 }
00141
00142 return polylines;
00143 }
00144
00145 public:
00146 DelaunayBlock(const QString name): QVProcessingBlock(name)
00147 {
00148 addProperty< QList < QPointF > >("Feature locations", inputFlag|outputFlag);
00149 addProperty< QList<QVPolylineF> >("Delaunay edges", outputFlag);
00150 }
00151
00152 void iterate()
00153 {
00154
00155 const QList<QPointF> selectedPoints = getPropertyValue< QList< QPointF > >("Feature locations");
00156 timeFlag("Read data");
00157
00158
00159 const QHash<QPointF, QPointF> correspondences = openCVDelaunayTriangulation(selectedPoints);
00160 timeFlag("Generated edges");
00161
00162
00163 const QList<QVPolylineF> delaunayEdges = map2polyline(correspondences);
00164 timeFlag("From correspondences to polylines");
00165
00166 setPropertyValue< QList<QVPolylineF> >("Delaunay edges", delaunayEdges);
00167 }
00168 };
00169
00170 #include <QVYUV4MPEG2WriterBlock>
00171 #include <QVNumericPlot>
00172 int main(int argc, char *argv[])
00173 {
00174 QVApplication app(argc, argv,
00175 "Example program for QVision library. Obtains intrinsic and extrinsic camera parameters."
00176 );
00177
00178 QVMPlayerReaderBlock camera("Video");
00179
00180 QVHarrisPointDetector pointDetector("Harris corners detector");
00181 camera.linkProperty(&pointDetector,"Input image");
00182
00183
00184 DelaunayBlock delaunayBlock("Delaunay");
00185 pointDetector.linkProperty("Feature locations", delaunayBlock, QVProcessingBlock::SynchronousLink);
00186
00187
00188 QVImageCanvas imageCanvas("Delaunay regions");
00189 pointDetector.linkProperty("Input image", imageCanvas);
00190 delaunayBlock.linkProperty("Feature locations", imageCanvas);
00191 delaunayBlock.linkProperty("Delaunay edges", imageCanvas);
00192 imageCanvas.setColor("Delaunay edges", Qt::red);
00193
00194 QVDefaultGUI interface;
00195
00196 return app.exec();
00197 }
00198 #else
00199 int main(int argc, char *argv[])
00200 {
00201 std::cout << "ERROR: OpenCV compatibility was not activated in QVision compilation." << std::endl;
00202 return -1;
00203 }
00204 #endif // OPENCV
00205 #endif // DOXIGEN_IGNORE_THIS
00206