Ejemplo n.º 1
0
        public void UpdateModel <T>(T model)
        {
            this.model = model as BriefModel;
            _brief     = new BriefDescriptorExtractor(

                );
        }
Ejemplo n.º 2
0
        private void Btn_findCorner_Click(object sender, EventArgs e)
        {
            var bitmap = this.picSrc.GetFirstRegionRect();
            var image  = new Image <Bgr, byte>(bitmap);

            //Mat mat_threshold = new Mat();
            //int myThreshold = 200;
            //CvInvoke.Threshold(image, mat_threshold, myThreshold, 255, Emgu.CV.CvEnum.ThresholdType.BinaryInv);
            //new CommonUse().SaveMat(mat_threshold, "角检测的二值化前期");
            //LineSegment2D
            BriefDescriptorExtractor extractor = new BriefDescriptorExtractor();
            FastDetector             detector  = new FastDetector((int)this.numericUpDown1.Value);
            var points = detector.Detect(image);

            //detector.

            //CvInvoke.DrawChessboardCorners(image, new Size(1, 1), points, true);
            for (int i = 0; i < points.Length; i++)
            {
                //if (points[i].Angle < 60)
                //{
                //    continue;
                //}
                var tmpPoint = new Point((int)points[i].Point.X, (int)points[i].Point.Y);
                CvInvoke.Circle(image, tmpPoint, 1, new MCvScalar(0, 0, 255));
            }

            this.picTarget.LoadImage(image.ToBitmap());
        }
Ejemplo n.º 3
0
        private static void BForceMatcherSample()
        {
            var src1 = new Mat("data/match1.png");
            var src2 = new Mat("data/match2.png");

            var gray1 = new Mat();
            var gray2 = new Mat();

            Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray);
            Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray);

            var fast = new FastFeatureDetector(10);
            var descriptorExtractor = new BriefDescriptorExtractor(32);

            var descriptors1 = new Mat();
            var descriptors2 = new Mat();

            KeyPoint[] keypoints1 = fast.Run(gray1, null);
            descriptorExtractor.Compute(gray1, ref keypoints1, descriptors1);

            KeyPoint[] keypoints2 = fast.Run(gray2, null);
            descriptorExtractor.Compute(gray2, ref keypoints2, descriptors2);

            // Match descriptor vectors
            var bfMatcher = new BFMatcher(NormType.L2, false);

            DMatch[][] bfMatches = bfMatcher.KnnMatch(descriptors1, descriptors2, 3, null, false);
            bfMatches.ToString();

            var view = new Mat();

            Cv2.DrawMatches(src1, keypoints1, src2, keypoints2, bfMatches, view);
            Window.ShowImages(view);
        }
Ejemplo n.º 4
0
        public void TestGFTTDetector()
        {
            GFTTDetector             keyPointDetector    = new GFTTDetector(1000, 0.01, 1, 3, false, 0.04);
            BriefDescriptorExtractor descriptorGenerator = new BriefDescriptorExtractor(32);

            TestFeature2DTracker(keyPointDetector, descriptorGenerator);
        }
Ejemplo n.º 5
0
        public void TestMSER()
        {
            MSERDetector             keyPointDetector    = new MSERDetector();
            BriefDescriptorExtractor descriptorGenerator = new BriefDescriptorExtractor(32);

            //ParamDef[] parameters = keyPointDetector.GetParams();
            TestFeature2DTracker(keyPointDetector, descriptorGenerator);
        }
Ejemplo n.º 6
0
        public void TestStar()
        {
            StarDetector keyPointDetector = new StarDetector();

            BriefDescriptorExtractor descriptorGenerator = new BriefDescriptorExtractor(32);

            TestFeature2DTracker(keyPointDetector, descriptorGenerator);
        }
Ejemplo n.º 7
0
        public void TestFAST()
        {
            FastDetector fast = new FastDetector(10, true);
            //GridAdaptedFeatureDetector fastGrid = new GridAdaptedFeatureDetector(fast, 2000, 4, 4);
            BriefDescriptorExtractor brief = new BriefDescriptorExtractor(32);

            //ParamDef[] parameters = fastGrid.GetParams();
            EmguAssert.IsTrue(TestFeature2DTracker(fast, brief), "Unable to find homography matrix");
        }
Ejemplo n.º 8
0
        public static int FindMatch2(Mat modelImage, TemplateContainer.ImageData template, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
        {
            Stopwatch watch;

            homography = null;
            BriefDescriptorExtractor descriptor = new BriefDescriptorExtractor();

            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();

            using (UMat uModelImage = modelImage.GetUMat(AccessType.Read))
            //using (UMat uObservedImage = template.image.Mat.GetUMat(AccessType.Read))
            {
                //extract features from the object image
                Mat modelDescriptors = new Mat();
                featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false);

                watch = Stopwatch.StartNew();

                // extract features from the observed image

                //featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
                observedKeyPoints   = template.keyPointsSurf;
                observedDescriptors = template.descriptorSurf;
                // Bruteforce, slower but more accurate
                // You can use KDTree for faster matching with slight loss in accuracy
                using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams())
                    using (Emgu.CV.Flann.SearchParams sp = new SearchParams())
                        using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp))
                        {
                            matcher.Add(modelDescriptors);

                            matcher.KnnMatch(observedDescriptors, matches, k, null);
                            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                            mask.SetTo(new MCvScalar(255));
                            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

                            nonZeroCount = CvInvoke.CountNonZero(mask);
                            if (nonZeroCount >= 10)
                            {
                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
                                                                                           matches, mask, 1.8, 18);
                                if (nonZeroCount >= 12)
                                {
                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
                                                                                                          observedKeyPoints, matches, mask, 2);
                                }
                            }
                        }
                watch.Stop();
            }
            matchTime = watch.ElapsedMilliseconds;
            return(nonZeroCount);
        }
Ejemplo n.º 9
0
 public FastDrawer()
 {
     homography          = null;
     CPU                 = new FastDetector(45);
     matches             = new VectorOfVectorOfDMatch();
     descriptor          = new BriefDescriptorExtractor();
     k                   = 2;
     uniquenessThreshold = 0.85;
     modelDescriptors    = new Mat();
     observedDescriptors = new Mat();
     matcher             = new BFMatcher(DistanceType.L2);
     result              = new Mat();
 }
Ejemplo n.º 10
0
 public void Clear()
 {
     k = 2;
     uniquenessThreshold = 0.8;
     hessianThresh       = 300;
     CPU                 = new SURF(hessianThresh);
     matcher             = new BFMatcher(DistanceType.L2);
     descriptor          = new BriefDescriptorExtractor();
     homography          = null;
     matches             = new VectorOfVectorOfDMatch();
     modelDescriptors    = new Mat();
     observedDescriptors = new Mat();
     result              = new Mat();
 }
Ejemplo n.º 11
0
        static void Main(string[] args)
        {
            var img1 = new Mat(@"..\..\Images\left.png", ImreadModes.GrayScale);

            Cv2.ImShow("Left", img1);
            Cv2.WaitKey(1); // do events

            var img2 = new Mat(@"..\..\Images\right.png", ImreadModes.GrayScale);

            Cv2.ImShow("Right", img2);
            Cv2.WaitKey(1); // do events


            // detecting keypoints
            // FastFeatureDetector, StarDetector, SIFT, SURF, ORB, BRISK, MSER, GFTTDetector, DenseFeatureDetector, SimpleBlobDetector
            // SURF = Speeded Up Robust Features
            var detector   = SURF.Create(hessianThreshold: 400); //A good default value could be from 300 to 500, depending from the image contrast.
            var keypoints1 = detector.Detect(img1);
            var keypoints2 = detector.Detect(img2);

            // computing descriptors, BRIEF, FREAK
            // BRIEF = Binary Robust Independent Elementary Features
            var extractor    = BriefDescriptorExtractor.Create();
            var descriptors1 = new Mat();
            var descriptors2 = new Mat();

            extractor.Compute(img1, ref keypoints1, descriptors1);
            extractor.Compute(img2, ref keypoints2, descriptors2);

            // matching descriptors
            var matcher = new BFMatcher();
            var matches = matcher.Match(descriptors1, descriptors2);

            // drawing the results
            var imgMatches = new Mat();

            Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, matches, imgMatches);
            Cv2.ImShow("Matches", imgMatches);
            Cv2.WaitKey(1); // do events


            Cv2.WaitKey(0);

            Cv2.DestroyAllWindows();
            img1.Dispose();
            img2.Dispose();
        }
Ejemplo n.º 12
0
        public void TestBruteForceHammingDistance()
        {
            if (CudaInvoke.HasCuda)
            {
                Image <Gray, byte>       box   = new Image <Gray, byte>("box.png");
                FastDetector             fast  = new FastDetector(100, true);
                BriefDescriptorExtractor brief = new BriefDescriptorExtractor(32);

                #region extract features from the object image
                Stopwatch        stopwatch      = Stopwatch.StartNew();
                VectorOfKeyPoint modelKeypoints = new VectorOfKeyPoint();
                fast.DetectRaw(box, modelKeypoints);
                Mat modelDescriptors = new Mat();
                brief.Compute(box, modelKeypoints, modelDescriptors);
                stopwatch.Stop();
                Trace.WriteLine(String.Format("Time to extract feature from model: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                #endregion

                Image <Gray, Byte> observedImage = new Image <Gray, byte>("box_in_scene.png");

                #region extract features from the observed image
                stopwatch.Reset(); stopwatch.Start();
                VectorOfKeyPoint observedKeypoints = new VectorOfKeyPoint();
                fast.DetectRaw(observedImage, observedKeypoints);
                Mat observedDescriptors = new Mat();
                brief.Compute(observedImage, observedKeypoints, observedDescriptors);
                stopwatch.Stop();
                Trace.WriteLine(String.Format("Time to extract feature from image: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                #endregion

                Mat homography = null;
                using (GpuMat <Byte> gpuModelDescriptors = new GpuMat <byte>(modelDescriptors)) //initialization of GPU code might took longer time.
                {
                    stopwatch.Reset(); stopwatch.Start();
                    CudaBFMatcher hammingMatcher = new CudaBFMatcher(DistanceType.Hamming);

                    //BFMatcher hammingMatcher = new BFMatcher(BFMatcher.DistanceType.Hamming, modelDescriptors);
                    int            k        = 2;
                    Matrix <int>   trainIdx = new Matrix <int>(observedKeypoints.Size, k);
                    Matrix <float> distance = new Matrix <float>(trainIdx.Size);

                    using (GpuMat <Byte> gpuObservedDescriptors = new GpuMat <byte>(observedDescriptors))
                        //using (GpuMat<int> gpuTrainIdx = new GpuMat<int>(trainIdx.Rows, trainIdx.Cols, 1, true))
                        //using (GpuMat<float> gpuDistance = new GpuMat<float>(distance.Rows, distance.Cols, 1, true))
                        using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                        {
                            Stopwatch w2 = Stopwatch.StartNew();
                            //hammingMatcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k);
                            hammingMatcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k, null, true);
                            w2.Stop();
                            Trace.WriteLine(String.Format("Time for feature matching (excluding data transfer): {0} milli-sec",
                                                          w2.ElapsedMilliseconds));
                            //gpuTrainIdx.Download(trainIdx);
                            //gpuDistance.Download(distance);


                            Mat mask = new Mat(distance.Rows, 1, DepthType.Cv8U, 1);
                            mask.SetTo(new MCvScalar(255));
                            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                            if (nonZeroCount >= 4)
                            {
                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeypoints, observedKeypoints,
                                                                                           matches, mask, 1.5, 20);
                                if (nonZeroCount >= 4)
                                {
                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeypoints,
                                                                                                          observedKeypoints, matches, mask, 2);
                                }
                                nonZeroCount = CvInvoke.CountNonZero(mask);
                            }

                            stopwatch.Stop();
                            Trace.WriteLine(String.Format("Time for feature matching (including data transfer): {0} milli-sec",
                                                          stopwatch.ElapsedMilliseconds));
                        }
                }

                if (homography != null)
                {
                    Rectangle rect = box.ROI;
                    PointF[]  pts  = new PointF[] {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };

                    PointF[] points = CvInvoke.PerspectiveTransform(pts, homography);
                    //homography.ProjectPoints(points);

                    //Merge the object image and the observed image into one big image for display
                    Image <Gray, Byte> res = box.ConcateVertical(observedImage);

                    for (int i = 0; i < points.Length; i++)
                    {
                        points[i].Y += box.Height;
                    }
                    res.DrawPolyline(Array.ConvertAll <PointF, Point>(points, Point.Round), true, new Gray(255.0), 5);
                    //ImageViewer.Show(res);
                }
            }
        }
Ejemplo n.º 13
0
 public void TestFAST()
 {
    FastDetector fast = new FastDetector(10, true);
    //GridAdaptedFeatureDetector fastGrid = new GridAdaptedFeatureDetector(fast, 2000, 4, 4);
    BriefDescriptorExtractor brief = new BriefDescriptorExtractor(32);
    //ParamDef[] parameters = fastGrid.GetParams();
    EmguAssert.IsTrue(TestFeature2DTracker(fast, brief), "Unable to find homography matrix");
 }
Ejemplo n.º 14
0
 // Use this for initialization
 public void Init(int threshold)
 {
     _detector   = new FastDetector(threshold, true, FastDetector.DetectorType.Type7_12);
     _descriptor = new BriefDescriptorExtractor();
     Homography  = Matrix4x4.identity;
 }
Ejemplo n.º 15
0
        public static Image <Bgr, Byte> FAST(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage)
        {
            bool isFound = false;

            long      matchTime;
            Stopwatch watch;

            HomographyMatrix homography = null;

            FastDetector     fastCPU = new FastDetector(10, true);
            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;

            BriefDescriptorExtractor descriptor = new BriefDescriptorExtractor();

            Matrix <byte> mask;
            int           k = 2;
            double        uniquenessThreshold = 0.8;

            watch = Stopwatch.StartNew();

            //extract features from the object image
            modelKeyPoints = fastCPU.DetectKeyPointsRaw(modelImage, null);
            Matrix <Byte> modelDescriptors = descriptor.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

            // extract features from the observed image
            observedKeyPoints = fastCPU.DetectKeyPointsRaw(observedImage, null);
            Matrix <Byte>            observedDescriptors = descriptor.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
            BruteForceMatcher <Byte> matcher             = new BruteForceMatcher <Byte>(DistanceType.L2);

            matcher.Add(modelDescriptors);

            indices = new Matrix <int>(observedDescriptors.Rows, k);
            using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
            {
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
                mask = new Matrix <byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(
                        modelKeyPoints, observedKeyPoints, indices, mask, 2);
                }
            }

            watch.Stop();

            //Draw the matched keypoints
            Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            #region draw the projected region on the image
            if (homography != null)
            {  //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                if (CvInvoke.cvCountNonZero(mask) >= 10)
                {
                    isFound = true;
                }


                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.LightGreen), 5);
            }
            #endregion

            matchTime = watch.ElapsedMilliseconds;
            _richTextBox1.Clear();
            _richTextBox1.AppendText("objek ditemukan: " + isFound + "\n");
            _richTextBox1.AppendText("waktu pendeteksian FAST: " + matchTime + "ms\n");
            _richTextBox1.AppendText("fitur model yang terdeteksi: " + modelKeyPoints.Size + "\n");
            _richTextBox1.AppendText("match yang ditemukan: " + CvInvoke.cvCountNonZero(mask).ToString());

            return(result);
        }
Ejemplo n.º 16
0
        public static Image <Bgr, byte> Draw(Image <Gray, byte> modelImage, Image <Gray, byte> observedImage)
        {
            Mat              homography     = null;
            FastDetector     fastCpu        = new FastDetector(10, true);
            VectorOfKeyPoint modelKeyPoints = new VectorOfKeyPoint();
            VectorOfKeyPoint observedPoints = new VectorOfKeyPoint();

            BriefDescriptorExtractor descriptors = new BriefDescriptorExtractor();

            UMat modelDescriptors          = new UMat();
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();
            Mat    mask;
            int    k = 2;
            double uniquenessThreshold = 0.8;

            try
            {
                //extract features from object image(不能直接使用fastCpu 提取关键点特征。)
                fastCpu.DetectRaw(modelImage, modelKeyPoints, null);
                descriptors.DetectAndCompute(modelImage, null, modelKeyPoints, modelDescriptors, false);

                //fastCpu.DetectAndCompute(modelImage, null, modelKeyPoints, descriptors, false);
            }
            catch (Exception e)
            {
                Console.Write("debug" + e.Message);
            }
            finally {
                Console.Write("");
            }



            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            matcher.Add(modelDescriptors);

            using (Matrix <float> dist = new Matrix <float>(observedImage.Rows, k)) {
                matcher.KnnMatch(modelDescriptors, matches, k, null);
                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));
                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
            }
            int nonZeroCount = CvInvoke.CountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedPoints, matches, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedPoints, matches, mask, 2);
                }
            }
            Mat result = new Mat();

            //Draw the matched keypoints
            Features2DToolbox.DrawKeypoints(modelImage, modelKeyPoints, result, new Bgr(255, 255, 255), Features2DToolbox.KeypointDrawType.Default);

            #region draw the projected region on the image
            if (homography != null)
            {
                Rectangle rect = modelImage.ROI;
                //与point 的区别是不是一个就是f
                PointF[] pts = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                pts = CvInvoke.PerspectiveTransform(pts, homography);
                //将一种类型的数组转换成另一种类型
                Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);

                using (VectorOfPoint vp = new VectorOfPoint(points))
                {
                    //画出一个或多个多边形曲线
                    CvInvoke.Polylines(modelImage, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                }
            }
            #endregion

            return(result.ToImage <Bgr, byte>());
        }
Ejemplo n.º 17
0
    public Image <Bgr, Byte> ObjectDetector(Image <Bgr, Byte> modelImage, string filepath)
    {
        Stopwatch                watch;
        HomographyMatrix         homography = null;
        SURFDetector             surfCPU    = new SURFDetector(500, false);
        FastDetector             fastCPU    = new FastDetector(10, true);
        VectorOfKeyPoint         modelKeyPoints;
        BriefDescriptorExtractor descriptor = new BriefDescriptorExtractor();
        Image <Gray, byte>       grayImage  = new Image <Gray, Byte>(filepath);

        modelKeyPoints = fastCPU.DetectKeyPointsRaw(grayImage, null);
        Matrix <byte> modelDescriptors = descriptor.ComputeDescriptorsRaw(grayImage, null, modelKeyPoints);

        Image <Bgr, Byte> result = Features2DToolbox.DrawKeypoints(grayImage, modelKeyPoints, new Bgr(0, 0, 255), Features2DToolbox.KeypointDrawType.DRAW_RICH_KEYPOINTS);

        result.Save("C:\\Users\\Sandeep\\Documents\\What_Are_Those\\Assets\\picture645.jpg");
        //Image<Bgr, Byte> result = modelImage;

        MKeyPoint[]   modelpoints = modelKeyPoints.ToArray();
        List <PointF> points      = new List <PointF>();
        //List<PointF> boundarypointsList = new List<PointF>();
        Dictionary <float, float> boundaryPoints           = new Dictionary <float, float>();
        Dictionary <float, float> boundaryPointshorizontal = new Dictionary <float, float>();
        Dictionary <float, float> boundaryPointsModified   = new Dictionary <float, float>();
        Dictionary <float, float> boundaryPointsRed        = new Dictionary <float, float>();

        for (int i = 0; i < modelpoints.Length; i++)
        {
            points.Add(modelpoints[i].Point);
            //print("X is " + points.ToArray()[i].X + "Y is " + points.ToArray()[i].Y);
        }
        points.Sort((a, b) => a.X.CompareTo(b.X));
        float x = points.ToArray()[0].X;
        float y = points.ToArray()[0].Y;
        float nextx, nexty;
        float miny = grayImage.Height;
        float maxx = grayImage.Width;

        for (int i = 0; i < points.ToArray().Length - 1; i++)
        {
            x     = points.ToArray()[i].X;
            y     = points.ToArray()[i].Y;
            nextx = points.ToArray()[i + 1].X;
            nexty = points.ToArray()[i + 1].Y;
            if (x == nextx)
            {
                miny = Mathf.Min(y, nexty);
            }
            else
            {
                boundaryPoints.Add(x, miny);

                //boundarypointsList.Add(new PointF(x, miny));
            }
            //print("X is " + points.ToArray()[i].X + " Y is " + points.ToArray()[i].Y);
        }
        int lastindex = points.ToArray().Length - 1;

        if (x != points.ToArray()[lastindex].X)
        {
            PointF lastpoint = points.ToArray()[lastindex];
            boundaryPoints.Add(lastpoint.X, lastpoint.Y);
        }
        points.Sort((a, b) => a.Y.CompareTo(b.Y));
        for (int i = 0; i < points.ToArray().Length - 1; i++)
        {
            x     = points.ToArray()[i].X;
            y     = points.ToArray()[i].Y;
            nextx = points.ToArray()[i + 1].X;
            nexty = points.ToArray()[i + 1].Y;
            if (y == nexty)
            {
                maxx = Mathf.Max(x, nextx);
            }
            else
            {
                boundaryPointshorizontal.Add(y, maxx);

                //boundarypointsList.Add(new PointF(x, miny));
            }
            //print("X is " + points.ToArray()[i].X + " Y is " + points.ToArray()[i].Y);
        }
        lastindex = points.ToArray().Length - 1;
        if (y != points.ToArray()[lastindex].Y)
        {
            PointF lastpoint = points.ToArray()[lastindex];
            boundaryPointshorizontal.Add(lastpoint.X, lastpoint.Y);
        }
        var min  = boundaryPoints.ElementAt(0);
        var max  = boundaryPoints.ElementAt(0);
        var hmax = boundaryPoints.ElementAt(0);

        for (int i = 0; i < boundaryPoints.Count; i++)
        {
            var   item      = boundaryPoints.ElementAt(i);
            float itemKey   = item.Key;
            float itemValue = item.Value;
            if (itemValue < min.Value)
            {
                min = item;
            }
            if (itemValue > max.Value || max.Value == result.Rows)
            {
                max = item;
            }
            //print("X is " + itemKey + " Y is " + itemValue);
        }
        for (int i = 0; i < boundaryPointshorizontal.Count; i++)
        {
            var   item      = boundaryPointshorizontal.ElementAt(i);
            float itemKey   = item.Key;
            float itemValue = item.Value;
            if (itemValue < min.Value)
            {
                min = item;
            }
            if (itemValue > hmax.Value || hmax.Value == result.Cols)
            {
                hmax = item;
            }
            // print("horizontal Y is " + itemKey + " horizontal X is " + itemValue);
        }
        //print("MIN is " + min.Key + " " + min.Value);
        //print("MAX is " + max.Key + " " + max.Value);
        //print("HMAX is " + hmax.Key + " " + hmax.Value);

        float prev = boundaryPoints.ElementAt(0).Value;
        int   mid  = 0;

        for (int i = 0; i < boundaryPoints.ElementAt(0).Key; i++)
        {
            boundaryPointsModified[(float)i] = boundaryPoints.ElementAt(0).Value;
        }
        for (int i = 0; i < boundaryPoints.Count && boundaryPoints.ElementAt(i).Key != boundaryPointshorizontal.ElementAt(1).Value; i++)
        {
            var   item      = boundaryPoints.ElementAt(i);
            float itemKey   = item.Key;
            float itemValue = item.Value;

            //print("itemKey "+itemKey+ " itemValue " + itemValue + " prev " + prev);

            if (itemValue > prev)
            {
                boundaryPointsModified[itemKey] = prev;
            }
            else if ((prev - itemValue < 80 && prev != result.Rows) || (prev == result.Rows && prev - itemValue > 0))
            {
                boundaryPointsModified[itemKey] = itemValue;
                prev = itemValue;
            }
            else
            {
                boundaryPointsModified[itemKey] = prev;
            }
            mid = i;
        }
        for (int i = mid + 1; i < boundaryPoints.Count; i++)
        {
            var   item      = boundaryPoints.ElementAt(i);
            float itemKey   = item.Key;
            float itemValue = item.Value;
            boundaryPointsModified[itemKey] = 0;
        }
        for (int i = 0; i < boundaryPointsModified.Count - 1; i++)
        {
            var item      = boundaryPointsModified.ElementAt(i);
            var itemKey   = item.Key;
            var itemValue = item.Value;

            //print("X modified is " + itemKey + " Y modified is " + itemValue);
        }



        byte[,,] data       = result.Data;
        byte[,,] data_model = modelImage.Data;

        int xstop = (int)boundaryPointsModified.ElementAt(0).Key;
        int ystop = (int)boundaryPointsModified.ElementAt(2).Value;



        /*     print("xstop is " + xstop + " ystop is "+ystop);
         *    for (int i = 0; i <= xstop; i++)
         *    {
         *        for (int j = 0; j <= ystop; j++)
         *        {
         *        data_model[j, i, 0] = 255;
         *        data_model[j, i, 1] = 255;
         *        data_model[j, i, 2] = 255;
         *        }
         *    }
         *    modelImage.Data = data_model; */



        for (int run = 19; run >= 0; run--)
        {
            for (int i = 0; i <= modelImage.Cols - 1; i++)
            {
                for (int j = 0; j <= modelImage.Rows - 1; j++)
                {
                    if (boundaryPoints.ContainsKey((float)i))
                    {
                        float stoppingPoint = boundaryPointsModified[(float)i];
                        //print("Stoppping Point is " + stoppingPoint);
                        if ((float)j <= stoppingPoint)
                        {
                            //print("j is "+j+" i is "+i+" red "+result[j, i].Red);
                            data_model[j, i, 0] = 246;
                            data_model[j, i, 1] = 246;
                            data_model[j, i, 2] = 246;
                        }

                        /*    else if (i == 600 || i == 612){
                         *      data[j, i, 0] = 255;
                         *      data[j, i, 1] = 0;
                         *      data[j, i, 2] = 0;
                         *  } */
                    }
                    else
                    {
                        float stoppingPoint = 0;
                        //print(" i is " + i);
                        if (i < boundaryPointsModified.Count)
                        {
                            stoppingPoint = boundaryPointsModified.ElementAt(i).Value;
                        }
                        //print("Stoppping Point is " + stoppingPoint);

                        if ((float)j <= stoppingPoint)
                        {
                            //print("j is "+j+" i is "+i+" red "+result[j, i].Red);
                            data_model[j, i, 0] = 246;
                            data_model[j, i, 1] = 246;
                            data_model[j, i, 2] = 246;
                        }
                    }
                }
            }
            modelImage.Data = data_model;
        }

        //  for (int run = 19; run >= 0; run--)
        //  {

        if (min.Key < mid)
        {
            mid = (int)min.Value;
        }

        //print("mid is " + mid);
        for (int i = result.Cols - 1; i >= mid; i--)
        {
            for (int j = 0; j <= result.Rows - 1; j++)
            {
                //      if (boundaryPointshorizontal.ContainsKey((float)i))
                //      {
                //float startingPoint = boundaryPointshorizontal[(float)i];
                // print("Stoppping Point is " + stoppingPoint);
                /*startingPoint <= j */

                /*            if (data[j, i, 2] < 180)
                 *          {
                 *              data[j, i, 0] = 255;
                 *              data[j, i, 1] = 255;
                 *              data[j, i, 2] = 255;
                 *
                 *          }
                 *          else
                 *          {
                 *          break;
                 *          } */

                if (data[j, i, 2] >= 240)
                {
                    boundaryPointsRed.Add(i, j);
                    //print("i is " + i + " j is " + j);
                    break;
                }


                //             }
            }
        }
        //result.Data = data;
        //     }

        int maxredx = 0;
        int maxredy = 0;

        for (int run = 19; run >= 0; run--)
        {
            for (int i = result.Cols - 1; i >= mid; i--)
            {
                for (int j = 0; j <= result.Rows - 1; j++)
                {
                    if (boundaryPointsRed.ContainsKey(i))
                    {
                        if (i > maxredx)
                        {
                            maxredx = i;
                        }
                        if (j > maxredy)
                        {
                            maxredy = j;
                        }
                        float stoppingPoint = boundaryPointsRed[i];

                        if ((float)j <= stoppingPoint /* && i != 600 && i != 612 */)
                        {
                            //print("j is "+j+" i is "+i+" red "+result[j, i].Red);
                            data_model[j, i, 0] = 246;
                            data_model[j, i, 1] = 246;
                            data_model[j, i, 2] = 246;
                        }
                    }
                }
            }
            modelImage.Data = data_model;
        }

        for (int run = 19; run >= 0; run--)
        {
            for (int i = maxredy; i >= 0; i--)
            {
                for (int j = result.Cols - 1; j >= maxredx; j--)
                {
                    data_model[i, j, 0] = 246;
                    data_model[i, j, 1] = 246;
                    data_model[i, j, 2] = 246;
                }
            }
            modelImage.Data = data_model;
        }


        for (int run = 19; run >= 0; run--)
        {
            for (int i = result.Rows - 1; i >= max.Value; i--)
            {
                for (int j = 0; j <= result.Cols - 1; j++)
                {
                    data_model[i, j, 0] = 246;
                    data_model[i, j, 1] = 246;
                    data_model[i, j, 2] = 246;
                }
            }
            modelImage.Data = data_model;
        }

        for (int run = 19; run >= 0; run--)
        {
            for (int i = result.Cols - 1; i >= hmax.Value; i--)
            {
                for (int j = 0; j <= result.Rows - 1; j++)
                {
                    data_model[j, i, 0] = 246;
                    data_model[j, i, 1] = 246;
                    data_model[j, i, 2] = 246;
                }
            }
            modelImage.Data = data_model;
        }



        return(modelImage);
    }
Ejemplo n.º 18
0
    public Image <Bgr, Byte> Drawtwo(Image <Gray, Byte> modelImage, Image <Gray, byte> observedImage)
    {
        HomographyMatrix homography = null;

        FastDetector     fastCPU = new FastDetector(10, true);
        VectorOfKeyPoint modelKeyPoints;
        VectorOfKeyPoint observedKeyPoints;
        Matrix <int>     indices;

        BriefDescriptorExtractor descriptor = new BriefDescriptorExtractor();

        Matrix <byte> mask;
        int           k = 2;
        double        uniquenessThreshold = 0.8;

        //extract features from the object image
        modelKeyPoints = fastCPU.DetectKeyPointsRaw(modelImage, null);
        Matrix <Byte> modelDescriptors = descriptor.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

        // extract features from the observed image
        observedKeyPoints = fastCPU.DetectKeyPointsRaw(observedImage, null);
        Matrix <Byte>            observedDescriptors = descriptor.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
        BruteForceMatcher <Byte> matcher             = new BruteForceMatcher <Byte>(DistanceType.L2);

        matcher.Add(modelDescriptors);

        indices = new Matrix <int>(observedDescriptors.Rows, k);
        using (Matrix <float> dist = new Matrix <float>(observedDescriptors.Rows, k))
        {
            matcher.KnnMatch(observedDescriptors, indices, dist, k, null);
            mask = new Matrix <byte>(dist.Rows, 1);
            mask.SetValue(255);
            Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
        }

        nonZeroCount = CvInvoke.cvCountNonZero(mask);
        //print("nonZeroCount is "+nonZeroCount);
        if (nonZeroCount >= 4)
        {
            nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
            if (nonZeroCount >= 4)
            {
                homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(
                    modelKeyPoints, observedKeyPoints, indices, mask, 2);
            }
        }

        //Draw the matched keypoints
        Image <Bgr, Byte> result = Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                 indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

        #region draw the projected region on the image
        if (homography != null)
        {  //draw a rectangle along the projected model
            Rectangle rect = modelImage.ROI;
            PointF[]  pts  = new PointF[] {
                new PointF(rect.Left, rect.Bottom),
                new PointF(rect.Right, rect.Bottom),
                new PointF(rect.Right, rect.Top),
                new PointF(rect.Left, rect.Top)
            };
            homography.ProjectPoints(pts);
            //area = Math.Abs((rect.Top - rect.Bottom) * (rect.Right - rect.Left));
            result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(System.Drawing.Color.Red), 5);
        }
        #endregion



        return(result);
    }
Ejemplo n.º 19
0
        public static void FindMatches(Mat modelImage, Mat observedImage, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, MatchingTechnique matchingTechnique, float keyPointFilter = 1, double detectorParameter = -1)
        {
            int    k = 2;
            double uniquenessThreshold = 0.8;

            homography        = null;
            modelKeyPoints    = new VectorOfKeyPoint();
            observedKeyPoints = new VectorOfKeyPoint();


            Feature2D    detector;
            Feature2D    descriptor;
            DistanceType distanceType;

            if (matchingTechnique == MatchingTechnique.FAST)
            {
                if (detectorParameter <= 0)
                {
                    detectorParameter = 20;
                }

                detector     = new FastDetector((int)detectorParameter);
                descriptor   = new BriefDescriptorExtractor();
                distanceType = DistanceType.Hamming;
            }
            else if (matchingTechnique == MatchingTechnique.ORB)
            {
                if (detectorParameter <= 0)
                {
                    detectorParameter = 100000;
                }

                detector     = new ORBDetector((int)detectorParameter);
                descriptor   = detector;
                distanceType = DistanceType.Hamming;
            }
            else if (matchingTechnique == MatchingTechnique.SURF)
            {
                if (detectorParameter <= 0)
                {
                    detectorParameter = 300;
                }

                detector     = new SURF(detectorParameter);
                descriptor   = detector;
                distanceType = DistanceType.L2;
            }
            else
            {
                throw new NotImplementedException($"{matchingTechnique} not supported.");
            }

            // Extract features from model image.
            UMat modelDescriptors = new UMat();

            detector.DetectRaw(modelImage, modelKeyPoints, null);
            Console.WriteLine($"modelKeyPoints: {modelKeyPoints.Size}");
            if (keyPointFilter < 2)
            {
                modelKeyPoints = GetBestKeypointsPercent(modelKeyPoints, keyPointFilter);
            }
            else
            {
                modelKeyPoints = GetBestKeypointsCount(modelKeyPoints, (int)keyPointFilter);
            }
            descriptor.Compute(modelImage, modelKeyPoints, modelDescriptors);

            // Extract features from observed image.
            UMat observedDescriptors = new UMat();

            detector.DetectRaw(observedImage, observedKeyPoints, null);
            Console.WriteLine($"observedKeyPoints: {observedKeyPoints.Size}");
            if (keyPointFilter < 2)
            {
                observedKeyPoints = GetBestKeypointsPercent(observedKeyPoints, keyPointFilter);
            }
            else
            {
                observedKeyPoints = GetBestKeypointsCount(observedKeyPoints, (int)keyPointFilter);
            }
            descriptor.Compute(observedImage, observedKeyPoints, observedDescriptors);

            // Match keypoints.
            BFMatcher matcher = new BFMatcher(distanceType);

            matcher.Add(modelDescriptors);
            matcher.KnnMatch(observedDescriptors, matches, k, null);

            mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
            mask.SetTo(new MCvScalar(255));
            Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);

            int nonZeroCount = CvInvoke.CountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2);
                }
            }
        }
Ejemplo n.º 20
0
      public void TestBruteForceHammingDistance()
      {
         if (CudaInvoke.HasCuda)
         {
            Image<Gray, byte> box = new Image<Gray, byte>("box.png");
            FastDetector fast = new FastDetector(100, true);
            BriefDescriptorExtractor brief = new BriefDescriptorExtractor(32);

            #region extract features from the object image
            Stopwatch stopwatch = Stopwatch.StartNew();
            VectorOfKeyPoint modelKeypoints = new VectorOfKeyPoint();
            fast.DetectRaw(box, modelKeypoints);
            Mat modelDescriptors = new Mat();
            brief.Compute(box, modelKeypoints, modelDescriptors);
            stopwatch.Stop();
            Trace.WriteLine(String.Format("Time to extract feature from model: {0} milli-sec", stopwatch.ElapsedMilliseconds));
            #endregion

            Image<Gray, Byte> observedImage = new Image<Gray, byte>("box_in_scene.png");

            #region extract features from the observed image
            stopwatch.Reset(); stopwatch.Start();
            VectorOfKeyPoint observedKeypoints = new VectorOfKeyPoint();
            fast.DetectRaw(observedImage, observedKeypoints);
            Mat observedDescriptors = new Mat();
            brief.Compute(observedImage, observedKeypoints, observedDescriptors);
            stopwatch.Stop();
            Trace.WriteLine(String.Format("Time to extract feature from image: {0} milli-sec", stopwatch.ElapsedMilliseconds));
            #endregion

            Mat homography = null;
            using (GpuMat<Byte> gpuModelDescriptors = new GpuMat<byte>(modelDescriptors)) //initialization of GPU code might took longer time.
            {
               stopwatch.Reset(); stopwatch.Start();
               CudaBFMatcher hammingMatcher = new CudaBFMatcher(DistanceType.Hamming);

               //BFMatcher hammingMatcher = new BFMatcher(BFMatcher.DistanceType.Hamming, modelDescriptors);
               int k = 2;
               Matrix<int> trainIdx = new Matrix<int>(observedKeypoints.Size, k);
               Matrix<float> distance = new Matrix<float>(trainIdx.Size);

               using (GpuMat<Byte> gpuObservedDescriptors = new GpuMat<byte>(observedDescriptors))
               //using (GpuMat<int> gpuTrainIdx = new GpuMat<int>(trainIdx.Rows, trainIdx.Cols, 1, true))
               //using (GpuMat<float> gpuDistance = new GpuMat<float>(distance.Rows, distance.Cols, 1, true))
               using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
               {
                  Stopwatch w2 = Stopwatch.StartNew();
                  //hammingMatcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k);
                  hammingMatcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k, null, true);
                  w2.Stop();
                  Trace.WriteLine(String.Format("Time for feature matching (excluding data transfer): {0} milli-sec",
                     w2.ElapsedMilliseconds));
                  //gpuTrainIdx.Download(trainIdx);
                  //gpuDistance.Download(distance);


                  Mat mask = new Mat(distance.Rows, 1, DepthType.Cv8U, 1);
                  mask.SetTo(new MCvScalar(255));
                  Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

                  int nonZeroCount = CvInvoke.CountNonZero(mask);
                  if (nonZeroCount >= 4)
                  {
                     nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeypoints, observedKeypoints,
                        matches, mask, 1.5, 20);
                     if (nonZeroCount >= 4)
                        homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeypoints,
                           observedKeypoints, matches, mask, 2);
                     nonZeroCount = CvInvoke.CountNonZero(mask);
                  }

                  stopwatch.Stop();
                  Trace.WriteLine(String.Format("Time for feature matching (including data transfer): {0} milli-sec",
                     stopwatch.ElapsedMilliseconds));
               }
            }

            if (homography != null)
            {
               Rectangle rect = box.ROI;
               PointF[] pts = new PointF[] { 
               new PointF(rect.Left, rect.Bottom),
               new PointF(rect.Right, rect.Bottom),
               new PointF(rect.Right, rect.Top),
               new PointF(rect.Left, rect.Top)};

               PointF[] points = CvInvoke.PerspectiveTransform(pts, homography);
               //homography.ProjectPoints(points);

               //Merge the object image and the observed image into one big image for display
               Image<Gray, Byte> res = box.ConcateVertical(observedImage);

               for (int i = 0; i < points.Length; i++)
                  points[i].Y += box.Height;
               res.DrawPolyline(Array.ConvertAll<PointF, Point>(points, Point.Round), true, new Gray(255.0), 5);
               //ImageViewer.Show(res);
            }
         }
      }