Example #1
0
        public void TestChessboardCalibration()
        {
            Size patternSize = new Size(9, 6);

            Image <Gray, Byte> chessboardImage = EmguAssert.LoadImage <Gray, byte>("left01.jpg");

            Util.VectorOfPointF corners = new Util.VectorOfPointF();
            bool patternWasFound        = CvInvoke.FindChessboardCorners(chessboardImage, patternSize, corners);

            chessboardImage.FindCornerSubPix(
                new PointF[][] { corners.ToArray() },
                new Size(10, 10),
                new Size(-1, -1),
                new MCvTermCriteria(0.05));

            MCvPoint3D32f[]           objectPts = CalcChessboardCorners(patternSize, 1.0f);
            IntrinsicCameraParameters intrisic  = new IntrinsicCameraParameters(8);

            ExtrinsicCameraParameters[] extrinsic;
            double error = CameraCalibration.CalibrateCamera(new MCvPoint3D32f[][] { objectPts }, new PointF[][] { corners.ToArray() },
                                                             chessboardImage.Size, intrisic, CvEnum.CalibType.Default, new MCvTermCriteria(30, 1.0e-10), out extrinsic);

            CvInvoke.DrawChessboardCorners(chessboardImage, patternSize, corners, patternWasFound);
            //CameraCalibration.DrawChessboardCorners(chessboardImage, patternSize, corners);
            Image <Gray, Byte> undistorted = intrisic.Undistort(chessboardImage);
            //UI.ImageViewer.Show(undistorted, String.Format("Reprojection error: {0}", error));
        }
Example #2
0
        public void TestBOWKmeansTrainer()
        {
            Image <Gray, byte> box    = EmguAssert.LoadImage <Gray, byte>("box.png");
            SURF             detector = new SURF(500);
            VectorOfKeyPoint kpts     = new VectorOfKeyPoint();
            Mat descriptors           = new Mat();

            detector.DetectAndCompute(box, null, kpts, descriptors, false);

            BOWKMeansTrainer trainer = new BOWKMeansTrainer(100, new MCvTermCriteria(), 3, CvEnum.KMeansInitType.PPCenters);

            trainer.Add(descriptors);
            Mat vocabulary = new Mat();

            trainer.Cluster(vocabulary);

            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            BOWImgDescriptorExtractor extractor = new BOWImgDescriptorExtractor(detector, matcher);

            extractor.SetVocabulary(vocabulary);

            Mat descriptors2 = new Mat();

            extractor.Compute(box, kpts, descriptors2);
        }
Example #3
0
        public void TestBOWKmeansTrainer2()
        {
            Image <Gray, byte> box    = EmguAssert.LoadImage <Gray, byte>("box.png");
            Brisk            detector = new Brisk(30, 3, 1.0f);
            VectorOfKeyPoint kpts     = new VectorOfKeyPoint();
            Mat descriptors           = new Mat();

            detector.DetectAndCompute(box, null, kpts, descriptors, false);
            Mat descriptorsF = new Mat();

            descriptors.ConvertTo(descriptorsF, CvEnum.DepthType.Cv32F);
            //Matrix<float> descriptorsF = descriptors.Convert<float>();
            BOWKMeansTrainer trainer = new BOWKMeansTrainer(100, new MCvTermCriteria(), 3, CvEnum.KMeansInitType.PPCenters);

            trainer.Add(descriptorsF);
            Mat vocabulary = new Mat();

            trainer.Cluster(vocabulary);

            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            BOWImgDescriptorExtractor extractor = new BOWImgDescriptorExtractor(detector, matcher);
            Mat vocabularyByte = new Mat();

            vocabulary.ConvertTo(vocabularyByte, CvEnum.DepthType.Cv8U);
            extractor.SetVocabulary(vocabularyByte);

            Mat descriptors2 = new Mat();

            extractor.Compute(box, kpts, descriptors2);
        }
Example #4
0
        public void TestChessboardCalibration()
        {
            Size patternSize = new Size(9, 6);

            Image <Gray, Byte> chessboardImage = EmguAssert.LoadImage <Gray, byte>("left01.jpg");

            Util.VectorOfPointF corners = new Util.VectorOfPointF();
            bool patternWasFound        = CvInvoke.FindChessboardCorners(chessboardImage, patternSize, corners);

            chessboardImage.FindCornerSubPix(
                new PointF[][] { corners.ToArray() },
                new Size(10, 10),
                new Size(-1, -1),
                new MCvTermCriteria(0.05));

            MCvPoint3D32f[] objectPts = CalcChessboardCorners(patternSize, 1.0f);

            using (VectorOfVectorOfPoint3D32F ptsVec = new VectorOfVectorOfPoint3D32F(new MCvPoint3D32f[][] { objectPts }))
                using (VectorOfVectorOfPointF imgPtsVec = new VectorOfVectorOfPointF(corners))
                    using (Mat cameraMatrix = new Mat())
                        using (Mat distortionCoeff = new Mat())
                            using (VectorOfMat rotations = new VectorOfMat())
                                using (VectorOfMat translations = new VectorOfMat())
                                {
                                    Mat             calMat  = CvInvoke.InitCameraMatrix2D(ptsVec, imgPtsVec, chessboardImage.Size, 0);
                                    Matrix <double> calMatF = new Matrix <double>(calMat.Rows, calMat.Cols, calMat.NumberOfChannels);
                                    calMat.CopyTo(calMatF);
                                    double error = CvInvoke.CalibrateCamera(ptsVec, imgPtsVec, chessboardImage.Size, cameraMatrix,
                                                                            distortionCoeff,
                                                                            rotations, translations, CalibType.Default, new MCvTermCriteria(30, 1.0e-10));
                                    using (Mat rotation = new Mat())
                                        using (Mat translation = new Mat())
                                            using (VectorOfPoint3D32F vpObject = new VectorOfPoint3D32F(objectPts))
                                            {
                                                CvInvoke.SolvePnPRansac(
                                                    vpObject,
                                                    corners,
                                                    cameraMatrix,
                                                    distortionCoeff,
                                                    rotation,
                                                    translation,
                                                    true);
                                            }

                                    CvInvoke.DrawChessboardCorners(chessboardImage, patternSize, corners, patternWasFound);
                                    using (Mat undistorted = new Mat())
                                    {
                                        CvInvoke.Undistort(chessboardImage, undistorted, cameraMatrix, distortionCoeff);
                                        String title = String.Format("Reprojection error: {0}", error);
                                        //CvInvoke.NamedWindow(title);
                                        //CvInvoke.Imshow(title, undistorted);
                                        //CvInvoke.WaitKey();
                                        //UI.ImageViewer.Show(undistorted, String.Format("Reprojection error: {0}", error));
                                    }
                                }
        }
Example #5
0
        public void TestCalibration()
        {
            Size patternSize          = new Size(9, 6);
            Image <Gray, Byte> left01 = EmguAssert.LoadImage <Gray, byte>("left01.jpg");

            using (Util.VectorOfPointF vec = new Util.VectorOfPointF())
            {
                CvInvoke.FindChessboardCorners(left01, patternSize, vec);
                PointF[] corners = vec.ToArray();
            }
        }
Example #6
0
        public void TestCirclesGrid()
        {
            Size patternSize = new Size(4, 3);
            Image <Gray, Byte> circlesGridImage = EmguAssert.LoadImage <Gray, byte>("circlesGrid.bmp");

            using (SimpleBlobDetector detector = new SimpleBlobDetector())
                using (Util.VectorOfPointF centers = new Util.VectorOfPointF())
                {
                    bool found = CvInvoke.FindCirclesGrid(circlesGridImage, patternSize, centers, CvEnum.CalibCgType.SymmetricGrid | CvEnum.CalibCgType.Clustering, detector);
                    CvInvoke.DrawChessboardCorners(circlesGridImage, patternSize, centers, found);
                    //UI.ImageViewer.Show(circlesGridImage);
                }
        }
Example #7
0
        public void TestGridAdaptedFeatureDetectorRepeatedRun()
        {
            Image <Gray, byte> box          = EmguAssert.LoadImage <Gray, byte>("box.png");
            SURFDetector       surfdetector = new SURFDetector(400);

            GridAdaptedFeatureDetector detector = new GridAdaptedFeatureDetector(surfdetector, 1000, 2, 2);
            VectorOfKeyPoint           kpts1    = new VectorOfKeyPoint();

            detector.DetectRaw(box, kpts1);
            VectorOfKeyPoint kpts2 = new VectorOfKeyPoint();

            detector.DetectRaw(box, kpts2);
            EmguAssert.IsTrue(kpts1.Size == kpts2.Size);
        }
Example #8
0
        public void TestClahe()
        {
            if (CudaInvoke.HasCuda)
            {
                Image <Gray, Byte>     image      = EmguAssert.LoadImage <Gray, Byte>("pedestrian.png");
                CudaImage <Gray, Byte> cudaImage  = new CudaImage <Gray, byte>(image);
                CudaImage <Gray, Byte> cudaResult = new CudaImage <Gray, byte>(cudaImage.Size);

                using (CudaClahe clahe = new CudaClahe(40.0, new Size(8, 8)))
                {
                    Image <Gray, Byte> result = new Image <Gray, byte>(cudaResult.Size);
                    clahe.Apply(cudaImage, cudaResult, null);
                    cudaResult.Download(result);
                    //Emgu.CV.UI.ImageViewer.Show(image.ConcateHorizontal(result));
                }
            }
        }
Example #9
0
        public void TestChessboardCalibration()
        {
            Size patternSize = new Size(9, 6);

            Image <Gray, Byte> chessboardImage = EmguAssert.LoadImage <Gray, byte>("left01.jpg");

            Util.VectorOfPointF corners = new Util.VectorOfPointF();
            bool patternWasFound        = CvInvoke.FindChessboardCorners(chessboardImage, patternSize, corners);

            chessboardImage.FindCornerSubPix(
                new PointF[][] { corners.ToArray() },
                new Size(10, 10),
                new Size(-1, -1),
                new MCvTermCriteria(0.05));

            MCvPoint3D32f[]           objectPts = CalcChessboardCorners(patternSize, 1.0f);
            IntrinsicCameraParameters intrisic  = new IntrinsicCameraParameters(8);

            ExtrinsicCameraParameters[] extrinsic;

            using (VectorOfVectorOfPoint3D32F ptsVec = new VectorOfVectorOfPoint3D32F(new MCvPoint3D32f[][] { objectPts }))
                using (VectorOfVectorOfPointF imgPtsVec = new VectorOfVectorOfPointF(corners))
                {
                    Mat             calMat  = CvInvoke.InitCameraMatrix2D(ptsVec, imgPtsVec, chessboardImage.Size, 0);
                    Matrix <double> calMatF = new Matrix <double>(calMat.Rows, calMat.Cols, calMat.NumberOfChannels);
                    calMat.CopyTo(calMatF);
                }

            double error = CameraCalibration.CalibrateCamera(new MCvPoint3D32f[][] { objectPts }, new PointF[][] { corners.ToArray() },
                                                             chessboardImage.Size, intrisic, CvEnum.CalibType.Default, new MCvTermCriteria(30, 1.0e-10), out extrinsic);

            CvInvoke.DrawChessboardCorners(chessboardImage, patternSize, corners, patternWasFound);
            //CameraCalibration.DrawChessboardCorners(chessboardImage, patternSize, corners);
            Image <Gray, Byte> undistorted = intrisic.Undistort(chessboardImage);

            //UI.ImageViewer.Show(undistorted, String.Format("Reprojection error: {0}", error));

            Mat[] rotationVectors, translationVectors;
            CvInvoke.CalibrateCamera(new MCvPoint3D32f[][] { objectPts }, new PointF[][] { corners.ToArray() },
                                     chessboardImage.Size, intrisic.IntrinsicMatrix, intrisic.DistortionCoeffs, CalibType.Default,
                                     new MCvTermCriteria(30, 1.0e-10),
                                     out rotationVectors, out translationVectors);
        }
Example #10
0
        public void TestDetectorColor()
        {
            Image <Bgr, byte>  box  = EmguAssert.LoadImage <Bgr, byte>("box.png");
            Image <Gray, byte> gray = box.Convert <Gray, Byte>();

            SURFDetector surf = new SURFDetector(400);
            OpponentColorDescriptorExtractor opponentSurf = new OpponentColorDescriptorExtractor(surf);
            SIFTDetector sift = new SIFTDetector();
            OpponentColorDescriptorExtractor opponentSift = new OpponentColorDescriptorExtractor(sift);

            //using (Util.VectorOfKeyPoint kpts = surf.DetectKeyPointsRaw(gray, null))
            using (Util.VectorOfKeyPoint kpts = new VectorOfKeyPoint())
            {
                sift.DetectRaw(gray, kpts);
                for (int i = 1; i < 2; i++)
                {
                    using (Mat surfDescriptors = new Mat())
                    {
                        opponentSurf.Compute(box, kpts, surfDescriptors);
                        //EmguAssert.IsTrue(surfDescriptors.Width == (surf.SURFParams.Extended == 0 ? 64 : 128) * 3);
                    }

                    //TODO: Find out why the following test fails
                    using (Mat siftDescriptors = new Mat())
                    {
                        sift.Compute(gray, kpts, siftDescriptors);
                        EmguAssert.IsTrue(siftDescriptors.Cols == sift.GetDescriptorSize());
                    }

                    int siftDescriptorSize = sift.GetDescriptorSize();
                    using (Mat siftDescriptors = new Mat())
                    {
                        opponentSift.Compute(box, kpts, siftDescriptors);
                        EmguAssert.IsTrue(siftDescriptors.Cols == siftDescriptorSize * 3);
                    }
                }
            }
        }
Example #11
0
        public void TestSURFDetector2()
        {
            //Trace.WriteLine("Size of MCvSURFParams: " + Marshal.SizeOf(typeof(MCvSURFParams)));
            Image <Gray, byte> box = EmguAssert.LoadImage <Gray, byte>("box.png");
            SURF detector          = new SURF(400);

            Stopwatch        watch = Stopwatch.StartNew();
            VectorOfKeyPoint vp1   = new VectorOfKeyPoint();
            Mat descriptors1       = new Mat();

            detector.DetectAndCompute(box, null, vp1, descriptors1, false);
            watch.Stop();
            EmguAssert.WriteLine(String.Format("Time used: {0} milliseconds.", watch.ElapsedMilliseconds));

            watch.Reset();
            watch.Start();
            MKeyPoint[] keypoints = detector.Detect(box, null);
            //ImageFeature<float>[] features2 = detector.Compute(box, keypoints);
            watch.Stop();
            EmguAssert.WriteLine(String.Format("Time used: {0} milliseconds.", watch.ElapsedMilliseconds));

            watch.Reset();
            watch.Start();
            //MCvSURFParams p = detector.SURFParams;

            //SURFFeature[] features3 = box.ExtractSURF(ref p);
            //watch.Stop();
            //EmguAssert.WriteLine(String.Format("Time used: {0} milliseconds.", watch.ElapsedMilliseconds));

            // EmguAssert.IsTrue(features1.Length == features2.Length);
            //EmguAssert.IsTrue(features2.Length == features3.Length);

            PointF[] pts =
#if NETFX_CORE
                Extensions.
#else
                Array.
#endif
                ConvertAll <MKeyPoint, PointF>(keypoints, delegate(MKeyPoint mkp)
            {
                return(mkp.Point);
            });
            //SURFFeature[] features = box.ExtractSURF(pts, null, ref detector);
            //int count = features.Length;

            /*
             * for (int i = 0; i < features1.Length; i++)
             * {
             * Assert.AreEqual(features1[i].KeyPoint.Point, features2[i].KeyPoint.Point);
             * float[] d1 = features1[i].Descriptor;
             * float[] d2 = features2[i].Descriptor;
             *
             * for (int j = 0; j < d1.Length; j++)
             *    Assert.AreEqual(d1[j], d2[j]);
             * }*/

            foreach (MKeyPoint kp in keypoints)
            {
                box.Draw(new CircleF(kp.Point, kp.Size), new Gray(255), 1);
            }
        }
Example #12
0
        public static bool TestFeature2DTracker(Feature2D keyPointDetector, Feature2D descriptorGenerator)
        {
            //for (int k = 0; k < 1; k++)
            {
                Feature2D feature2D = null;
                if (keyPointDetector == descriptorGenerator)
                {
                    feature2D = keyPointDetector as Feature2D;
                }

                Mat modelImage = EmguAssert.LoadMat("box.png");
                //Image<Gray, Byte> modelImage = new Image<Gray, byte>("stop.jpg");
                //modelImage = modelImage.Resize(400, 400, true);

                //modelImage._EqualizeHist();

                #region extract features from the object image
                Stopwatch        stopwatch      = Stopwatch.StartNew();
                VectorOfKeyPoint modelKeypoints = new VectorOfKeyPoint();
                Mat modelDescriptors            = new Mat();
                if (feature2D != null)
                {
                    feature2D.DetectAndCompute(modelImage, null, modelKeypoints, modelDescriptors, false);
                }
                else
                {
                    keyPointDetector.DetectRaw(modelImage, modelKeypoints);
                    descriptorGenerator.Compute(modelImage, modelKeypoints, modelDescriptors);
                }
                stopwatch.Stop();
                EmguAssert.WriteLine(String.Format("Time to extract feature from model: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                #endregion

                //Image<Gray, Byte> observedImage = new Image<Gray, byte>("traffic.jpg");
                Image <Gray, Byte> observedImage = EmguAssert.LoadImage <Gray, byte>("box_in_scene.png");
                //Image<Gray, Byte> observedImage = modelImage.Rotate(45, new Gray(0.0));
                //image = image.Resize(400, 400, true);

                //observedImage._EqualizeHist();
                #region extract features from the observed image
                stopwatch.Reset();
                stopwatch.Start();
                VectorOfKeyPoint observedKeypoints = new VectorOfKeyPoint();
                using (Mat observedDescriptors = new Mat())
                {
                    if (feature2D != null)
                    {
                        feature2D.DetectAndCompute(observedImage, null, observedKeypoints, observedDescriptors, false);
                    }
                    else
                    {
                        keyPointDetector.DetectRaw(observedImage, observedKeypoints);
                        descriptorGenerator.Compute(observedImage, observedKeypoints, observedDescriptors);
                    }

                    stopwatch.Stop();
                    EmguAssert.WriteLine(String.Format("Time to extract feature from image: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                    #endregion

                    //Merge the object image and the observed image into one big image for display
                    Image <Gray, Byte> res = modelImage.ToImage <Gray, Byte>().ConcateVertical(observedImage);

                    Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
                    PointF[]  pts  = new PointF[] {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };

                    Mat homography = null;

                    stopwatch.Reset();
                    stopwatch.Start();

                    int          k  = 2;
                    DistanceType dt = modelDescriptors.Depth == CvEnum.DepthType.Cv8U ? DistanceType.Hamming : DistanceType.L2;
                    //using (Matrix<int> indices = new Matrix<int>(observedDescriptors.Rows, k))
                    //using (Matrix<float> dist = new Matrix<float>(observedDescriptors.Rows, k))
                    using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                        using (BFMatcher matcher = new BFMatcher(dt))
                        {
                            //ParamDef[] parameterDefs = matcher.GetParams();
                            matcher.Add(modelDescriptors);
                            matcher.KnnMatch(observedDescriptors, matches, k, null);

                            Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                            mask.SetTo(new MCvScalar(255));
                            //mask.SetValue(255);
                            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                            if (nonZeroCount >= 4)
                            {
                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeypoints, observedKeypoints, matches, mask, 1.5, 20);
                                if (nonZeroCount >= 4)
                                {
                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeypoints, observedKeypoints, matches, mask, 2);
                                }
                            }
                        }
                    stopwatch.Stop();
                    EmguAssert.WriteLine(String.Format("Time for feature matching: {0} milli-sec", stopwatch.ElapsedMilliseconds));

                    bool success = false;
                    if (homography != null)
                    {
                        PointF[] points = pts.Clone() as PointF[];
                        points = CvInvoke.PerspectiveTransform(points, homography);
                        //homography.ProjectPoints(points);

                        for (int i = 0; i < points.Length; i++)
                        {
                            points[i].Y += modelImage.Height;
                        }

                        res.DrawPolyline(
#if NETFX_CORE
                            Extensions.
#else
                            Array.
#endif
                            ConvertAll <PointF, Point>(points, Point.Round), true, new Gray(255.0), 5);

                        success = true;
                    }
                    //Emgu.CV.UI.ImageViewer.Show(res);
                    return(success);
                }



                /*
                 * stopwatch.Reset(); stopwatch.Start();
                 * //set the initial region to be the whole image
                 * using (Image<Gray, Single> priorMask = new Image<Gray, float>(observedImage.Size))
                 * {
                 * priorMask.SetValue(1.0);
                 * homography = tracker.CamShiftTrack(
                 *    observedFeatures,
                 *    (RectangleF)observedImage.ROI,
                 *    priorMask);
                 * }
                 * Trace.WriteLine(String.Format("Time for feature tracking: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                 *
                 * if (homography != null) //set the initial tracking window to be the whole image
                 * {
                 * PointF[] points = pts.Clone() as PointF[];
                 * homography.ProjectPoints(points);
                 *
                 * for (int i = 0; i < points.Length; i++)
                 *    points[i].Y += modelImage.Height;
                 * res.DrawPolyline(Array.ConvertAll<PointF, Point>(points, Point.Round), true, new Gray(255.0), 5);
                 * return true;
                 * }
                 * else
                 * {
                 * return false;
                 * }*/
            }
        }