コード例 #1
0
        public static UMat Run(Mat img)
        {
            var modelKeyPoints = new VectorOfKeyPoint();
            var result         = new UMat();

            using (UMat uModelImage = img.ToUMat(AccessType.Read))
            {
                FastDetector fastCPU          = new FastDetector(10, true);
                UMat         modelDescriptors = new UMat();
                fastCPU.DetectRaw(uModelImage, modelKeyPoints);
                Features2DToolbox.DrawKeypoints(img, modelKeyPoints, result, new Bgr(Color.Red), Features2DToolbox.KeypointDrawType.NotDrawSinglePoints);
            }

            return(result);
        }
コード例 #2
0
        public void TestBruteForceHammingDistance()
        {
            if (CudaInvoke.HasCuda)
            {
                Image <Gray, byte>       box   = new Image <Gray, byte>("box.png");
                FastDetector             fast  = new FastDetector(100, true);
                BriefDescriptorExtractor brief = new BriefDescriptorExtractor(32);

                #region extract features from the object image
                Stopwatch        stopwatch      = Stopwatch.StartNew();
                VectorOfKeyPoint modelKeypoints = new VectorOfKeyPoint();
                fast.DetectRaw(box, modelKeypoints);
                Mat modelDescriptors = new Mat();
                brief.Compute(box, modelKeypoints, modelDescriptors);
                stopwatch.Stop();
                Trace.WriteLine(String.Format("Time to extract feature from model: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                #endregion

                Image <Gray, Byte> observedImage = new Image <Gray, byte>("box_in_scene.png");

                #region extract features from the observed image
                stopwatch.Reset(); stopwatch.Start();
                VectorOfKeyPoint observedKeypoints = new VectorOfKeyPoint();
                fast.DetectRaw(observedImage, observedKeypoints);
                Mat observedDescriptors = new Mat();
                brief.Compute(observedImage, observedKeypoints, observedDescriptors);
                stopwatch.Stop();
                Trace.WriteLine(String.Format("Time to extract feature from image: {0} milli-sec", stopwatch.ElapsedMilliseconds));
                #endregion

                Mat homography = null;
                using (GpuMat <Byte> gpuModelDescriptors = new GpuMat <byte>(modelDescriptors)) //initialization of GPU code might took longer time.
                {
                    stopwatch.Reset(); stopwatch.Start();
                    CudaBFMatcher hammingMatcher = new CudaBFMatcher(DistanceType.Hamming);

                    //BFMatcher hammingMatcher = new BFMatcher(BFMatcher.DistanceType.Hamming, modelDescriptors);
                    int            k        = 2;
                    Matrix <int>   trainIdx = new Matrix <int>(observedKeypoints.Size, k);
                    Matrix <float> distance = new Matrix <float>(trainIdx.Size);

                    using (GpuMat <Byte> gpuObservedDescriptors = new GpuMat <byte>(observedDescriptors))
                        //using (GpuMat<int> gpuTrainIdx = new GpuMat<int>(trainIdx.Rows, trainIdx.Cols, 1, true))
                        //using (GpuMat<float> gpuDistance = new GpuMat<float>(distance.Rows, distance.Cols, 1, true))
                        using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
                        {
                            Stopwatch w2 = Stopwatch.StartNew();
                            //hammingMatcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k);
                            hammingMatcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k, null, true);
                            w2.Stop();
                            Trace.WriteLine(String.Format("Time for feature matching (excluding data transfer): {0} milli-sec",
                                                          w2.ElapsedMilliseconds));
                            //gpuTrainIdx.Download(trainIdx);
                            //gpuDistance.Download(distance);


                            Mat mask = new Mat(distance.Rows, 1, DepthType.Cv8U, 1);
                            mask.SetTo(new MCvScalar(255));
                            Features2DToolbox.VoteForUniqueness(matches, 0.8, mask);

                            int nonZeroCount = CvInvoke.CountNonZero(mask);
                            if (nonZeroCount >= 4)
                            {
                                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeypoints, observedKeypoints,
                                                                                           matches, mask, 1.5, 20);
                                if (nonZeroCount >= 4)
                                {
                                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeypoints,
                                                                                                          observedKeypoints, matches, mask, 2);
                                }
                                nonZeroCount = CvInvoke.CountNonZero(mask);
                            }

                            stopwatch.Stop();
                            Trace.WriteLine(String.Format("Time for feature matching (including data transfer): {0} milli-sec",
                                                          stopwatch.ElapsedMilliseconds));
                        }
                }

                if (homography != null)
                {
                    Rectangle rect = box.ROI;
                    PointF[]  pts  = new PointF[] {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };

                    PointF[] points = CvInvoke.PerspectiveTransform(pts, homography);
                    //homography.ProjectPoints(points);

                    //Merge the object image and the observed image into one big image for display
                    Image <Gray, Byte> res = box.ConcateVertical(observedImage);

                    for (int i = 0; i < points.Length; i++)
                    {
                        points[i].Y += box.Height;
                    }
                    res.DrawPolyline(Array.ConvertAll <PointF, Point>(points, Point.Round), true, new Gray(255.0), 5);
                    //ImageViewer.Show(res);
                }
            }
        }
コード例 #3
0
ファイル: FeatureTracker.cs プロジェクト: red-pencil/ISWC18
    public void AddFrame(Image <Gray, byte> frame)
    {
        Mat observedDescriptors = new Mat();
        Mat mask;
        VectorOfKeyPoint observedKeyPoints = new VectorOfKeyPoint();


        if (_isFirst)
        {
            _detector.DetectRaw(frame, modelKeyPoints);
            _descriptor.Compute(frame, modelKeyPoints, _modelDescriptors);
            if (modelKeyPoints.Size == 0)
            {
                return;
            }

            _width  = frame.Width;
            _height = frame.Height;

            _matcher = new BFMatcher(DistanceType.L2);
            _matcher.Add(_modelDescriptors);
            _isFirst = false;
            return;
        }
        else
        {
            _detector.DetectRaw(frame, observedKeyPoints);
            _descriptor.Compute(frame, observedKeyPoints, observedDescriptors);
        }

        _matches.Clear();
        _matcher.KnnMatch(observedDescriptors, _matches, k, null);

        _matcher = new BFMatcher(DistanceType.L2);         //clear it
        _matcher.Add(observedDescriptors);

        mask = new  Mat(_matches.Size, 1, Emgu.CV.CvEnum.DepthType.Cv8U, 1);
        mask.SetTo(new MCvScalar(255));
        Features2DToolbox.VoteForUniqueness(_matches, uniquenessThresh, mask);

        Stopwatch stopwatch = Stopwatch.StartNew();

        stopwatch.Reset();
        stopwatch.Start();
        int nonZeroCount = CvInvoke.CountNonZero(mask);

        if (nonZeroCount >= 4)
        {
            nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, _matches, mask, 1.5, 20);

            if (nonZeroCount >= 4)
            {
                _homography   = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, _matches, mask, 2);
                double[,] arr = new double[3, 3];
                _homography.CopyTo(arr);
                Homography.SetRow(0, new Vector4((float)arr[0, 0], (float)arr[0, 1], 0, 0));
                Homography.SetRow(1, new Vector4((float)arr[1, 0], (float)arr[1, 1], 0, 0));
                Homography.SetRow(2, new Vector4(0, 0, 1, 0));

                Translation.Set((float)arr [0, 2] / (float)_width, (float)arr [1, 2] / (float)_height, 0);
            }
        }
        stopwatch.Stop();
        UnityEngine.Debug.Log("Matcher required time:" + stopwatch.ElapsedMilliseconds + " Count: " + nonZeroCount + "/" + _matches.Size);

        List <int> kp = new List <int>();

        _matchesPointsA.Clear();
        _matchesPointsB.Clear();

        for (int i = 0; i < _matches.Size / 2 - 1; i += 2)
        {
            if (_matches [i] [0].Distance < _matches [i] [1].Distance * 0.7f)
            {
                try{
                    int idx = _matches [i] [0].TrainIdx;
                    _matchesPointsA.Add(new Vector2(modelKeyPoints [idx].Point.X, modelKeyPoints [idx].Point.Y));
                    idx = _matches [i] [0].QueryIdx;
                    if (idx < observedKeyPoints.Size)
                    {
                        _matchesPointsB.Add(new Vector2(observedKeyPoints [idx].Point.X, observedKeyPoints [idx].Point.Y));
                    }
                    else
                    {
                        UnityEngine.Debug.Log("Exceed length!");
                    }
                }catch (Exception e)
                {
                    UnityEngine.Debug.Log(e.Message);
                }
            }
            //	kp.Add (_matches [i][0].ImgIdx);
        }        /**/

        /*
         * for (int i = 0; i < observedKeyPoints.Size; ++i) {
         *      _matchesPoints.Add (new Vector2 (observedKeyPoints [i].Point.X, observedKeyPoints [i].Point.Y));
         * }*/

        if (ShowImage)
        {
            ShowImage = false;

            Image <Bgr, Byte> result = frame.Mat.ToImage <Bgr, Byte> ();
            //	Features2DToolbox.DrawMatches (frame, modelKeyPoints, _storedImage, observedKeyPoints, _matches, result, new MCvScalar (255, 255, 255), new MCvScalar (0, 0, 255), mask, Features2DToolbox.KeypointDrawType.Default);

            var kpts = observedKeyPoints.ToArray();
            for (int i = 0; i < kpts.Length; ++i)
            {
                var p = kpts [i];
                result.Draw(new CircleF(p.Point, p.Size), new Bgr(255, 0, 0), 1);
            }

            //Emgu.CV.UI.ImageViewer.Show(result,"Result");
        }

        modelKeyPoints    = observedKeyPoints;
        _modelDescriptors = observedDescriptors;

        _storedImage = frame.Mat.Clone();
    }
コード例 #4
0
        public static Image <Bgr, byte> Draw(Image <Gray, byte> modelImage, Image <Gray, byte> observedImage)
        {
            Mat              homography     = null;
            FastDetector     fastCpu        = new FastDetector(10, true);
            VectorOfKeyPoint modelKeyPoints = new VectorOfKeyPoint();
            VectorOfKeyPoint observedPoints = new VectorOfKeyPoint();

            BriefDescriptorExtractor descriptors = new BriefDescriptorExtractor();

            UMat modelDescriptors          = new UMat();
            VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch();
            Mat    mask;
            int    k = 2;
            double uniquenessThreshold = 0.8;

            try
            {
                //extract features from object image(不能直接使用fastCpu 提取关键点特征。)
                fastCpu.DetectRaw(modelImage, modelKeyPoints, null);
                descriptors.DetectAndCompute(modelImage, null, modelKeyPoints, modelDescriptors, false);

                //fastCpu.DetectAndCompute(modelImage, null, modelKeyPoints, descriptors, false);
            }
            catch (Exception e)
            {
                Console.Write("debug" + e.Message);
            }
            finally {
                Console.Write("");
            }



            BFMatcher matcher = new BFMatcher(DistanceType.L2);

            matcher.Add(modelDescriptors);

            using (Matrix <float> dist = new Matrix <float>(observedImage.Rows, k)) {
                matcher.KnnMatch(modelDescriptors, matches, k, null);
                mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1);
                mask.SetTo(new MCvScalar(255));
                Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask);
            }
            int nonZeroCount = CvInvoke.CountNonZero(mask);

            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedPoints, matches, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                {
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedPoints, matches, mask, 2);
                }
            }
            Mat result = new Mat();

            //Draw the matched keypoints
            Features2DToolbox.DrawKeypoints(modelImage, modelKeyPoints, result, new Bgr(255, 255, 255), Features2DToolbox.KeypointDrawType.Default);

            #region draw the projected region on the image
            if (homography != null)
            {
                Rectangle rect = modelImage.ROI;
                //与point 的区别是不是一个就是f
                PointF[] pts = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                pts = CvInvoke.PerspectiveTransform(pts, homography);
                //将一种类型的数组转换成另一种类型
                Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round);

                using (VectorOfPoint vp = new VectorOfPoint(points))
                {
                    //画出一个或多个多边形曲线
                    CvInvoke.Polylines(modelImage, vp, true, new MCvScalar(255, 0, 0, 255), 5);
                }
            }
            #endregion

            return(result.ToImage <Bgr, byte>());
        }