Пример #1
0
 public StopSignDetector(Image <Bgr, Byte> stopSignModel)
 {
     _detector = new SURFDetector(500, false);
     using (Image <Gray, Byte> redMask = GetRedPixelMask(stopSignModel))
     {
         ImageFeature <float>[] features = _detector.DetectFeatures(redMask, null);
         if (features.Length == 0)
         {
             throw new Exception("No image feature has been found in the stop sign model");
         }
         _tracker = new Features2DTracker <float>(features);
     }
     _octagonStorage = new MemStorage();
     _octagon        = new Contour <Point>(_octagonStorage);
     _octagon.PushMulti(new Point[] {
         new Point(1, 0),
         new Point(2, 0),
         new Point(3, 1),
         new Point(3, 2),
         new Point(2, 3),
         new Point(1, 3),
         new Point(0, 2),
         new Point(0, 1)
     },
                        Emgu.CV.CvEnum.BACK_OR_FRONT.FRONT);
 }
Пример #2
0
 private void CreateSurfaceTracker()
 {
     surfaceParameters = new SURFDetector(500, false);
     using (Image <Bgr, Byte> stopSignModel = new Image <Bgr, Byte>(Properties.Resources.SignModel))
         using (Image <Gray, Byte> redMask = GetRedPixelMask(stopSignModel))
         {
             featureTracker = new Features2DTracker <float>(surfaceParameters.DetectFeatures(redMask, null));
         }
 }
        private void button2_Click(object sender, EventArgs e)
        {
            Application.Idle += new EventHandler(delegate(object sender1, EventArgs e1)
            {
                observedImage = camera.QueryFrame().Convert <Rgb, byte>();

                //  Stopwatch watch = Stopwatch.StartNew();
                // extract features from the observed image
                ImageFeature <float>[] imageFeatures = surfParam.DetectFeatures(observedImage.Convert <Gray, byte>(), null);

                Features2DTracker <float> .MatchedImageFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2);
                matchedFeatures             = Features2DTracker <float> .VoteForUniqueness(matchedFeatures, 0.8);
                matchedFeatures             = Features2DTracker <float> .VoteForSizeAndOrientation(matchedFeatures, 1.5, 20);
                HomographyMatrix homography = Features2DTracker <float> .GetHomographyMatrixFromMatchedFeatures(matchedFeatures);
                // watch.Stop();

                //Merge the object image and the observed image into one image for display
                //**** Image<Rgb, Byte> res = modelImage.ConcateVertical(observedImage);


                Image <Rgb, Byte> res = modelImage.ConcateHorizontal(observedImage);


                //#region draw lines between the matched features
                //foreach (Features2DTracker<float>.MatchedImageFeature matchedFeature in matchedFeatures)
                //{
                //    PointF p = matchedFeature.ObservedFeature.KeyPoint.Point;
                //    p.Y += modelImage.Height;
                //    res.Draw(new LineSegment2DF(matchedFeature.SimilarFeatures[0].Feature.KeyPoint.Point, p), new Rgb(0,255,0), 1);

                //}
                //#endregion


                #region draw the project region on the image
                if (homography != null)
                {   //draw a rectangle along the projected model
                    Rectangle rect = modelImage.ROI;
                    PointF[] pts   = new PointF[] {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };
                    homography.ProjectPoints(pts);

                    //for (int i = 0; i < pts.Length; i++)
                    //   pts[i].Y += modelImage.Height;

                    observedImage.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Rgb(255, 20, 100), 5);
                }
                #endregion

                imageBox1.Image = observedImage;
            });
        }
        public Homography()
        {
            InitializeComponent();
            camera        = new Capture(@"F:\Working\Final phase\DataSet\20150409_13-34-33.asf");
            surfParam     = new SURFDetector(500, false);
            modelImage    = new Image <Rgb, byte>(@"C:\Users\ahmed nady\Pictures\modelImage.bmp");
            modelFeatures = surfParam.DetectFeatures(modelImage.Convert <Gray, byte>(), null);

            //Create a Feature Tracker
            tracker = new Features2DTracker <float>(modelFeatures);
        }
Пример #5
0
        public Surf()
        {
            InitializeComponent();
            camera     = new Capture(0);
            surfParam  = new SURFDetector(500, false);
            modelImage = new Image <Rgb, byte>(@"C:\Users\ahmed nady\Desktop\hand.jpg");
            //extract features from the object image
            modelFeatures = surfParam.DetectFeatures(modelImage.Convert <Gray, byte>(), null);

            //Create a Feature Tracker
            tracker = new Features2DTracker <float>(modelFeatures);
        }
        public SurfStopSignDetector()
        {
            detector = new SURFDetector(500, false);


            Image <Bgr, Byte>  stopSignModel = new Image <Bgr, Byte>("models\\stop-sign-model.png");
            Image <Gray, Byte> redMask       = GetRedPixelMask(stopSignModel);

            tracker = new Features2DTracker <float>(detector.DetectFeatures(redMask, null));

            octagonStorage = new MemStorage();
            octagon        = new Contour <Point>(octagonStorage);
            octagon.PushMulti(new Point[] { new Point(1, 0), new Point(2, 0), new Point(3, 1), new Point(3, 2),
                                            new Point(2, 3), new Point(1, 3), new Point(0, 2), new Point(0, 1) }, Emgu.CV.CvEnum.BACK_OR_FRONT.FRONT);
        }
        private void button1_Click(object sender, EventArgs e)
        {
            if (capture)
            {
                button1.Text = "start";
                //extract features from the object image
                modelFeatures = surfParam.DetectFeatures(modelImage.Convert <Gray, byte>(), null);

                //Create a Feature Tracker
                tracker = new Features2DTracker <float>(modelFeatures);

                Application.Idle -= ProcessFrame;
            }
            else
            {
                button1.Text = "pause";

                Application.Idle += ProcessFrame;
            }
            capture = !capture;
        }
Пример #8
0
        void ProcessFrame(object sender, EventArgs arg)
        {
            observedImage = camera.QueryFrame().Convert <Rgb, byte>();


            // extract features from the observed image
            ImageFeature <float>[] imageFeatures = surfParam.DetectFeatures(observedImage.Convert <Gray, byte>(), null);

            Features2DTracker <float> .MatchedImageFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2);
            matchedFeatures = Features2DTracker <float> .VoteForUniqueness(matchedFeatures, 0.8);

            matchedFeatures = Features2DTracker <float> .VoteForSizeAndOrientation(matchedFeatures, 1.5, 20);

            HomographyMatrix homography = Features2DTracker <float> .GetHomographyMatrixFromMatchedFeatures(matchedFeatures);


            Image <Rgb, Byte> res = modelImage.ConcateHorizontal(observedImage);


            #region draw the project region on the image
            if (homography != null)
            {       //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                //for (int i = 0; i < pts.Length; i++)
                //   pts[i].Y += modelImage.Height;

                observedImage.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Rgb(255, 20, 100), 5);
            }
            #endregion
            imageBox2.Image = res;
            imageBox1.Image = observedImage;
        }
Пример #9
0
        public DetectorResult Process(Image <Bgr, byte> rawFrame, Image <Gray, byte> grayFrame)
        {
            var surfParam = new SurfDetector(900, true);

            var modelImage    = new Image <Gray, byte>("iphone\\signin.jpg");
            var modelFeatures = surfParam.DetectFeatures(modelImage, null);
            var tracker       = new Features2DTracker(modelFeatures);


            var imageFeatures    = surfParam.DetectFeatures(grayFrame, null);
            var homographyMatrix = tracker.Detect(imageFeatures, 100.0);

            Image <Bgr, Byte> processedImage = modelImage.Convert <Bgr, Byte>().ConcateVertical(rawFrame);

            if (homographyMatrix != null)
            {
                var rect = modelImage.ROI;
                var pts  = new[]
                {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homographyMatrix.ProjectPoints(pts);

                for (int i = 0; i < pts.Length; i++)
                {
                    pts[i].Y += modelImage.Height;
                }

                processedImage.DrawPolyline(Array.ConvertAll(pts, Point.Round), true, new Bgr(Color.DarkOrange), 1);
            }
            return(new DetectorResult()
            {
                RawImage = rawFrame, ProcessedImage = processedImage
            });
        }
Пример #10
0
 public StopSignDetector(Image <Bgr, Byte> stopSignModel)
 {
     _detector = new SURFDetector(500, false);
     using (Image <Gray, Byte> redMask = GetRedPixelMask(stopSignModel))
     {
         ImageFeature <float>[] temp = _detector.DetectFeatures(redMask, null);
         _tracker = new Features2DTracker <float>(temp);
     }
     _octagonStorage = new MemStorage();
     _octagon        = new Contour <Point>(_octagonStorage);
     _octagon.PushMulti(new Point[] {
         new Point(1, 0),
         new Point(2, 0),
         new Point(3, 1),
         new Point(3, 2),
         new Point(2, 3),
         new Point(1, 3),
         new Point(0, 2),
         new Point(0, 1)
     },
                        Emgu.CV.CvEnum.BACK_OR_FRONT.FRONT
                        );
 }
Пример #11
0
        private void ProcessFrame(object sender, EventArgs arg)
        {
            Image <Bgr, Byte>  frame         = _capture.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            Image <Gray, Byte> grayframe     = frame.Convert <Gray, Byte>();
            Image <Gray, Byte> modelImage    = new Image <Gray, byte>("DataPlate/" + 10 + ".jpg");
            Image <Gray, Byte> observedImage = grayframe;
            Stopwatch          watch;
            HomographyMatrix   homography = null;
            SURFDetector       surfCPU    = new SURFDetector(500, false);
            VectorOfKeyPoint   modelKeyPoints;
            VectorOfKeyPoint   observedKeyPoints;
            Matrix <int>       indices;
            Matrix <float>     dist;
            Matrix <byte>      mask;


            if (GpuInvoke.HasCuda)
            {
                GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
                using (GpuImage <Gray, Byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage))

                    #region SURF
                    //extract features from the object image
                    using (GpuMat <float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat <float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (GpuBruteForceMatcher matcher = new GpuBruteForceMatcher(GpuBruteForceMatcher.DistanceType.L2))
                            {
                                modelKeyPoints = new VectorOfKeyPoint();
                                surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuImage <Gray, Byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage))
                                    using (GpuMat <float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat <float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, 2, 1))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuMatchIndices.Size, 1))
                                                {
                                                    observedKeyPoints = new VectorOfKeyPoint();
                                                    surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);
                                                    matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, 2, null);
                                                    indices = new Matrix <int>(gpuMatchIndices.Size);
                                                    dist    = new Matrix <float>(indices.Size);
                                                    gpuMatchIndices.Download(indices);
                                                    gpuMatchDist.Download(dist);

                                                    mask = new Matrix <byte>(dist.Rows, 1);

                                                    mask.SetValue(255);

                                                    Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                                                    int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                                                    if (nonZeroCount >= 4)
                                                    {
                                                        nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                                        if (nonZeroCount >= 4)
                                                        {
                                                            homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                                                        }
                                                    }

                                                    watch.Stop();
                                                }
                            }
                #endregion
            }
            else
            {
                //extract features from the object image
                modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
                //MKeyPoint[] kpts = modelKeyPoints.ToArray();
                Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);
                watch = Stopwatch.StartNew();

                // extract features from the observed image
                observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
                Matrix <float>    observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);
                BruteForceMatcher matcher             = new BruteForceMatcher(BruteForceMatcher.DistanceType.L2F32);
                matcher.Add(modelDescriptors);
                int k = 2;
                indices = new Matrix <int>(observedDescriptors.Rows, k);

                dist = new Matrix <float>(observedDescriptors.Rows, k);
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);

                mask = new Matrix <byte>(dist.Rows, 1);

                mask.SetValue(255);

                Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 20)
                {
                    nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 20)
                    {
                        homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                        XMLData();
                    }
                    else
                    {
                        textBox1.Text = string.Empty;
                        textBox2.Text = string.Empty;
                        textBox3.Text = string.Empty;
                        textBox4.Text = string.Empty;
                        textBox5.Text = string.Empty;
                    }
                }
                watch.Stop();
                #region draw the projected region on the image
                if (homography != null)
                {  //draw a rectangle along the projected model
                    Rectangle rect = modelImage.ROI;
                    PointF[]  pts  = new PointF[] {
                        new PointF(rect.Left, rect.Bottom),
                        new PointF(rect.Right, rect.Bottom),
                        new PointF(rect.Right, rect.Top),
                        new PointF(rect.Left, rect.Top)
                    };
                    homography.ProjectPoints(pts);
                    frame.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 2);
                }
                #endregion
                CaptureImageBox.Image = frame;
                DataImageBox.Image    = modelImage;
            }
        }
Пример #12
0
        static void Run()
        {
            SURFDetector surfParam = new SURFDetector(500, false);

            Image <Gray, Byte> modelImage = new Image <Gray, byte>("box.png");

            //extract features from the object image
            ImageFeature[] modelFeatures = surfParam.DetectFeatures(modelImage, null);

            //Create a Feature Tracker
            Features2DTracker tracker = new Features2DTracker(modelFeatures);

            Image <Gray, Byte> observedImage = new Image <Gray, byte>("box_in_scene.png");

            Stopwatch watch = Stopwatch.StartNew();

            // extract features from the observed image
            ImageFeature[] imageFeatures = surfParam.DetectFeatures(observedImage, null);

            Features2DTracker.MatchedImageFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2, 20);
            matchedFeatures = Features2DTracker.VoteForUniqueness(matchedFeatures, 0.8);
            matchedFeatures = Features2DTracker.VoteForSizeAndOrientation(matchedFeatures, 1.5, 20);
            HomographyMatrix homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures);

            watch.Stop();

            //Merge the object image and the observed image into one image for display
            Image <Gray, Byte> res = modelImage.ConcateVertical(observedImage);

            #region draw lines between the matched features
            foreach (Features2DTracker.MatchedImageFeature matchedFeature in matchedFeatures)
            {
                PointF p = matchedFeature.ObservedFeature.KeyPoint.Point;
                p.Y += modelImage.Height;
                res.Draw(new LineSegment2DF(matchedFeature.SimilarFeatures[0].Feature.KeyPoint.Point, p), new Gray(0), 1);
            }
            #endregion

            #region draw the project region on the image
            if (homography != null)
            { //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                for (int i = 0; i < pts.Length; i++)
                {
                    pts[i].Y += modelImage.Height;
                }

                res.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
            }
            #endregion

            ImageViewer.Show(res, String.Format("Matched in {0} milliseconds", watch.ElapsedMilliseconds));
        }
Пример #13
0
        //public static string DoImageRegonition(string FullFeaFName, Stream ImgStream)

        public static Dictionary <string, string> DoImageRegonition(string[] SevenFeas, string[] CocaFeas, Stream ImgStream)
        {
            Dictionary <string, string> regres = new Dictionary <string, string>(4);
            StringBuilder sb       = new StringBuilder();
            const int     Seven_DV = 400;
            const int     Coca_DV  = 300;

            try
            {
                SIFTDetector siftdector = new SIFTDetector();
                //the following code is unnecessary because we will extract the feature first.
                // this other way this image is pre-transformed to gray-scale.
                //the following codes are needed to be refactory
                // Image<Gray, Byte> modelImage = new Image<Gray, byte>(FullMoldeImg);
                //Image<Gray, Byte> modelImage = new Image<Gray, byte>(FullMoldeImgName);
                BinaryFormatter _bf      = new BinaryFormatter();
                int             sevenlen = SevenFeas.Length;
                int             cocalen  = CocaFeas.Length;
                //initial the dictionary variable

                regres.Add("seven", "no");
                regres.Add("coca", "no");
                regres.Add("ma", "none");
                regres.Add("excep", "none");

                //Initialize the image that to be comparased
                Image <Gray, Byte> observedImage = GetCVImage(ImgStream);
                MKeyPoint[]        objmkps       = siftdector.DetectKeyPoints(observedImage);
                ImageFeature[]     imageFeatures = siftdector.ComputeDescriptors(observedImage, objmkps);

                //PointF[] _obimgPA = GetPointFfromFeatures(imageFeatures, imageFeatures.Length);
                //int _obimgPN = _obimgPA.Length;

                //Doing seven matching

                for (int idx = 0; idx < sevenlen; idx++)
                {
                    //Get the feature file
                    Stream stream = File.Open(SevenFeas[idx], FileMode.Open);
                    //Deserilizing the file to get the feature
                    ImageFeature[] sevenFeatures = (ImageFeature[])_bf.Deserialize(stream);
                    stream.Dispose();
                    int slen = sevenFeatures.Length;
                    //PointF[] sevenPA = GetPointFfromFeatures(sevenFeatures, _obimgPN);


                    //set up the tractor
                    Features2DTracker seventrac = new Features2DTracker(sevenFeatures);
                    ////Doing seven matching
                    Features2DTracker.MatchedImageFeature[] sevenmatchedfea = seventrac.MatchFeature(imageFeatures, 2, 20);
                    sevenmatchedfea = Features2DTracker.VoteForUniqueness(sevenmatchedfea, 0.8);
                    sevenmatchedfea = Features2DTracker.VoteForSizeAndOrientation(sevenmatchedfea, 1.5, 20);

                    //Get matching result matrix
                    HomographyMatrix sevenhomography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(sevenmatchedfea);
                    //Matrix<float>  sevenhomography =  CameraCalibration.FindHomography(sevenPA,_obimgPA,HOMOGRAPHY_METHOD.RANSAC,3).Convert<float>();
                    //sevenmatchedfea.
                    //fill result into dictionary variable
                    if (sevenhomography != null)
                    {
                        if (Math.Abs(sevenhomography.Sum) > Seven_DV)
                        {
                            regres["seven"] = "yes";
                        }

                        sb.Append("ssum:");
                        sb.Append(sevenhomography.Sum.ToString());
                        //sb.Append("| sidx:");
                        // sb.Append(idx.ToString());

                        break;
                    }
                }

                //Doing Coca image matching
                for (int idx2 = 0; idx2 < cocalen; idx2++)
                {
                    //Get the feature file
                    Stream stream = File.Open(CocaFeas[idx2], FileMode.Open);
                    //Deserilizing the file to get the feature
                    ImageFeature[] cocaFeatures = (ImageFeature[])_bf.Deserialize(stream);
                    stream.Dispose();
                    //PointF[] cocaPA = GetPointFfromFeatures(cocaFeatures, _obimgPN);
                    //cocaFeatures.CopyTo(cocaPA, 0);

                    //Matrix<float> cocahomography = CameraCalibration.FindHomography(cocaPA, _obimgPA, HOMOGRAPHY_METHOD.RANSAC, 3).Convert<float>();
                    //set up the tractor
                    Features2DTracker cocatrac = new Features2DTracker(cocaFeatures);
                    ////Doing seven matching
                    Features2DTracker.MatchedImageFeature[] cocamatchedfea = cocatrac.MatchFeature(imageFeatures, 2, 20);
                    cocamatchedfea = Features2DTracker.VoteForUniqueness(cocamatchedfea, 0.8);
                    cocamatchedfea = Features2DTracker.VoteForSizeAndOrientation(cocamatchedfea, 1.5, 20);
                    //Get matching result matrix
                    HomographyMatrix cocahomography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(cocamatchedfea);
                    //fill result into dictionary variable
                    if (cocahomography != null)
                    {
                        if (Math.Abs(cocahomography.Sum) > Coca_DV)
                        {
                            regres["coca"] = "yes";
                        }
                        sb.Append("#csum:");
                        sb.Append(cocahomography.Sum.ToString());
                        //sb.Append(",cidx:");
                        //sb.Append(idx2.ToString());
                        break;
                    }
                }
            }
            catch (Exception err)
            {
                regres["excep"] = err.Message;
                Console.WriteLine(err.Message);
            }
            if (sb.Length > 0)
            {
                regres["ma"] = sb.ToString();
                sb           = null;
            }
            return(regres);
        }
Пример #14
0
        public List <ImageRecord> QueryImage(string queryImagePath, SurfSettings surfSetting = null)
        {
            List <ImageRecord> rtnImageList = new List <ImageRecord>();

            var observerFeatureSets = SurfRepository.GetSurfRecordList();

            #region Surf Dectator Region
            double hessianThresh       = 500;
            double uniquenessThreshold = 0.8;
            int    minGoodMatchPercent = 50;

            if (surfSetting != null)
            {
                hessianThresh       = surfSetting.HessianThresh.Value;
                uniquenessThreshold = surfSetting.UniquenessThreshold.Value;
                minGoodMatchPercent = surfSetting.GoodMatchThreshold.Value;
            }

            SURFDetector surfDectector = new SURFDetector(hessianThresh, false);
            #endregion

            using (Image <Gray, byte> modelImage = new Image <Gray, byte>(queryImagePath))
            {
                ImageFeature <float>[] modelFeatures = surfDectector.DetectFeatures(modelImage, null);

                if (modelFeatures.Length < 4)
                {
                    throw new InvalidOperationException("Model image didn't have any significant features to detect");
                }

                Features2DTracker <float> tracker = new Features2DTracker <float>(modelFeatures);
                foreach (var surfRecord in observerFeatureSets)
                {
                    string queryImageName = System.IO.Path.GetFileName(queryImagePath);
                    string modelImageName = surfRecord.ImageName;

                    Features2DTracker <float> .MatchedImageFeature[] matchedFeatures = tracker.MatchFeature(surfRecord.observerFeatures, 2);

                    Features2DTracker <float> .MatchedImageFeature[] uniqueFeatures = Features2DTracker <float> .VoteForUniqueness(matchedFeatures, uniquenessThreshold);

                    Features2DTracker <float> .MatchedImageFeature[] uniqueRotOriFeatures = Features2DTracker <float> .VoteForSizeAndOrientation(uniqueFeatures, 1.5, 20);

                    int goodMatchCount = 0;
                    goodMatchCount = uniqueRotOriFeatures.Length;
                    bool isMatch = false;

                    double totalnumberOfModelFeature = modelFeatures.Length;
                    double matchPercentage           = ((totalnumberOfModelFeature - (double)goodMatchCount) / totalnumberOfModelFeature);
                    matchPercentage = (1 - matchPercentage) * 100;
                    matchPercentage = Math.Round(matchPercentage);
                    if (matchPercentage >= minGoodMatchPercent)
                    {
                        HomographyMatrix homography =
                            Features2DTracker <float> .GetHomographyMatrixFromMatchedFeatures(uniqueRotOriFeatures);

                        if (homography != null)
                        {
                            isMatch = homography.IsValid(5);
                            if (isMatch)
                            {
                                surfRecord.Distance = matchPercentage;
                                rtnImageList.Add((ImageRecord)surfRecord);
                            }
                        }
                    }

                    //bool isMatch = false;
                    //if (uniqueFeatures.Length > 4)
                    //{
                    //    HomographyMatrix homography =
                    //        Features2DTracker<float>.GetHomographyMatrixFromMatchedFeatures(uniqueRotOriFeatures);
                    //    if (homography != null)
                    //    {
                    //        isMatch = homography.IsValid(5);
                    //    }
                    //}

                    //if (isMatch)
                    //{
                    //    surfRecord.Distance = goodMatchCount;
                    //    rtnImageList.Add((ImageRecord)surfRecord);
                    //}

                    //int goodMatchCount = 0;
                    //foreach (Features2DTracker<float>.MatchedImageFeature ms in matchedFeatures)
                    //{
                    //    if (ms.SimilarFeatures[0].Distance < uniquenessThreshold)
                    //        goodMatchCount++;
                    //}



                    //double totalnumberOfModelFeature = modelFeatures.Length;
                    //double matchPercentage = ((totalnumberOfModelFeature - (double)goodMatchCount) / totalnumberOfModelFeature);
                    //matchPercentage = (1 - matchPercentage) * 100;
                    //matchPercentage = Math.Round(matchPercentage);
                    //if (matchPercentage >= minGoodMatchPercent)
                    //{
                    //    surfRecord.Distance = matchPercentage;
                    //    rtnImageList.Add((ImageRecord)surfRecord);
                    //}
                }
            }
            rtnImageList = rtnImageList.OrderByDescending(x => x.Distance).ToList();
            return(rtnImageList);
        }
Пример #15
0
        static void Run()
        {
            Image <Gray, Byte> modelImage    = new Image <Gray, byte>("box.png");
            Image <Gray, Byte> observedImage = new Image <Gray, byte>("box_in_scene.png");
            Stopwatch          watch;
            HomographyMatrix   homography = null;

            SURFDetector surfCPU = new SURFDetector(500, false);

            VectorOfKeyPoint modelKeyPoints;
            VectorOfKeyPoint observedKeyPoints;
            Matrix <int>     indices;
            Matrix <float>   dist;
            Matrix <byte>    mask;

            if (GpuInvoke.HasCuda)
            {
                GpuSURFDetector surfGPU = new GpuSURFDetector(surfCPU.SURFParams, 0.01f);
                using (GpuImage <Gray, Byte> gpuModelImage = new GpuImage <Gray, byte>(modelImage))
                    //extract features from the object image
                    using (GpuMat <float> gpuModelKeyPoints = surfGPU.DetectKeyPointsRaw(gpuModelImage, null))
                        using (GpuMat <float> gpuModelDescriptors = surfGPU.ComputeDescriptorsRaw(gpuModelImage, null, gpuModelKeyPoints))
                            using (GpuBruteForceMatcher matcher = new GpuBruteForceMatcher(GpuBruteForceMatcher.DistanceType.L2))
                            {
                                modelKeyPoints = new VectorOfKeyPoint();
                                surfGPU.DownloadKeypoints(gpuModelKeyPoints, modelKeyPoints);
                                watch = Stopwatch.StartNew();

                                // extract features from the observed image
                                using (GpuImage <Gray, Byte> gpuObservedImage = new GpuImage <Gray, byte>(observedImage))
                                    using (GpuMat <float> gpuObservedKeyPoints = surfGPU.DetectKeyPointsRaw(gpuObservedImage, null))
                                        using (GpuMat <float> gpuObservedDescriptors = surfGPU.ComputeDescriptorsRaw(gpuObservedImage, null, gpuObservedKeyPoints))
                                            using (GpuMat <int> gpuMatchIndices = new GpuMat <int>(gpuObservedDescriptors.Size.Height, 2, 1))
                                                using (GpuMat <float> gpuMatchDist = new GpuMat <float>(gpuMatchIndices.Size, 1))
                                                {
                                                    observedKeyPoints = new VectorOfKeyPoint();
                                                    surfGPU.DownloadKeypoints(gpuObservedKeyPoints, observedKeyPoints);

                                                    matcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, gpuMatchIndices, gpuMatchDist, 2, null);

                                                    indices = new Matrix <int>(gpuMatchIndices.Size);
                                                    dist    = new Matrix <float>(indices.Size);
                                                    gpuMatchIndices.Download(indices);
                                                    gpuMatchDist.Download(dist);

                                                    mask = new Matrix <byte>(dist.Rows, 1);

                                                    mask.SetValue(255);

                                                    Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                                                    int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                                                    if (nonZeroCount >= 4)
                                                    {
                                                        nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                                                        if (nonZeroCount >= 4)
                                                        {
                                                            homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                                                        }
                                                    }

                                                    watch.Stop();
                                                }
                            }
            }
            else
            {
                //extract features from the object image
                modelKeyPoints = surfCPU.DetectKeyPointsRaw(modelImage, null);
                //MKeyPoint[] kpts = modelKeyPoints.ToArray();
                Matrix <float> modelDescriptors = surfCPU.ComputeDescriptorsRaw(modelImage, null, modelKeyPoints);

                watch = Stopwatch.StartNew();

                // extract features from the observed image
                observedKeyPoints = surfCPU.DetectKeyPointsRaw(observedImage, null);
                Matrix <float> observedDescriptors = surfCPU.ComputeDescriptorsRaw(observedImage, null, observedKeyPoints);

                BruteForceMatcher matcher = new BruteForceMatcher(BruteForceMatcher.DistanceType.L2F32);
                matcher.Add(modelDescriptors);
                int k = 2;
                indices = new Matrix <int>(observedDescriptors.Rows, k);
                dist    = new Matrix <float>(observedDescriptors.Rows, k);
                matcher.KnnMatch(observedDescriptors, indices, dist, k, null);

                mask = new Matrix <byte>(dist.Rows, 1);

                mask.SetValue(255);

                Features2DTracker.VoteForUniqueness(dist, 0.8, mask);

                int nonZeroCount = CvInvoke.cvCountNonZero(mask);
                if (nonZeroCount >= 4)
                {
                    nonZeroCount = Features2DTracker.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, indices, mask, 1.5, 20);
                    if (nonZeroCount >= 4)
                    {
                        homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, indices, mask, 3);
                    }
                }

                watch.Stop();
            }

            //Draw the matched keypoints
            Image <Bgr, Byte> result = Features2DTracker.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
                                                                     indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DTracker.KeypointDrawType.NOT_DRAW_SINGLE_POINTS);

            #region draw the projected region on the image
            if (homography != null)
            { //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                result.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Bgr(Color.Red), 5);
            }
            #endregion

            ImageViewer.Show(result, String.Format("Matched using {0} in {1} milliseconds", GpuInvoke.HasCuda ? "GPU" : "CPU", watch.ElapsedMilliseconds));
        }