示例#1
0
 private void DrawLines(Image<Gray, byte> modelImage, SURFTracker.MatchedSURFFeature[] matchedFeatures, Image<Bgr, byte> processedImage)
 {
     foreach (SURFTracker.MatchedSURFFeature matchedFeature in matchedFeatures)
     {
         PointF p = matchedFeature.ObservedFeature.Point.pt;
         p.Y += modelImage.Height;
         processedImage.Draw(new LineSegment2DF(matchedFeature.ObservedFeature.Point.pt, p), new Bgr(Color.DarkOrange), 1);
     }
 }
示例#2
0
 public StopSignDetector()
 {
     _surfParam = new MCvSURFParams(500, false);
     using (Image <Bgr, Byte> stopSignModel = new Image <Bgr, Byte>("stop-sign-model.png"))
         using (Image <Gray, Byte> redMask = GetRedPixelMask(stopSignModel))
         {
             _tracker = new SURFTracker(redMask.ExtractSURF(ref _surfParam));
         }
     _octagonStorage = new MemStorage();
     _octagon        = new Contour <Point>(_octagonStorage);
     _octagon.PushMulti(new Point[] {
         new Point(1, 0),
         new Point(2, 0),
         new Point(3, 1),
         new Point(3, 2),
         new Point(2, 3),
         new Point(1, 3),
         new Point(0, 2),
         new Point(0, 1)
     },
                        Emgu.CV.CvEnum.BACK_OR_FRONT.FRONT);
 }
示例#3
0
        private static Image <Bgr, byte> ProcessSurf(Image <Gray, byte> modelImage, Image <Gray, byte> grayFrame, Image <Bgr, byte> rawFrame)
        {
            SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref _surfParam);
            SURFFeature[] imageFeatures = grayFrame.ExtractSURF(ref _surfParam);

            SURFTracker tracker = new SURFTracker(modelFeatures);


            SURFTracker.MatchedSURFFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2, 20);
            matchedFeatures = SURFTracker.VoteForUniqueness(matchedFeatures, 0.8);
            matchedFeatures = SURFTracker.VoteForSizeAndOrientation(matchedFeatures, 1.5, 20);
            HomographyMatrix homography = SURFTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures);

            //Image<Bgr, Byte> processedImage = modelImage.Convert<Bgr, Byte>().ConcateVertical(rawFrame);
            var processedImage = rawFrame;

            //DrawLines(modelImage, matchedFeatures, processedImage);

            if (homography != null)
            {
                //draw a rectangle along the projected model
                var rect = modelImage.ROI;
                var pts  = new PointF[]
                {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                //for (int i = 0; i < pts.Length; i++)
                //    pts[i].Y += modelImage.Height;

                processedImage.DrawPolyline(Array.ConvertAll(pts, Point.Round), true, new Bgr(Color.DarkOrange), 1);
            }
            return(processedImage);
        }
示例#4
0
        private static Image<Bgr, byte> ProcessSurf(Image<Gray, byte> modelImage, Image<Gray, byte> grayFrame, Image<Bgr, byte> rawFrame)
        {
            SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref _surfParam);
            SURFFeature[] imageFeatures = grayFrame.ExtractSURF(ref _surfParam);

            SURFTracker tracker = new SURFTracker(modelFeatures);

            SURFTracker.MatchedSURFFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2, 20);
            matchedFeatures = SURFTracker.VoteForUniqueness(matchedFeatures, 0.8);
            matchedFeatures = SURFTracker.VoteForSizeAndOrientation(matchedFeatures, 1.5, 20);
            HomographyMatrix homography = SURFTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures);

            //Image<Bgr, Byte> processedImage = modelImage.Convert<Bgr, Byte>().ConcateVertical(rawFrame);
            var processedImage = rawFrame;
            //DrawLines(modelImage, matchedFeatures, processedImage);

            if (homography != null)
            {
                //draw a rectangle along the projected model
                var rect = modelImage.ROI;
                var pts = new PointF[]
                                   {
                                       new PointF(rect.Left, rect.Bottom),
                                       new PointF(rect.Right, rect.Bottom),
                                       new PointF(rect.Right, rect.Top),
                                       new PointF(rect.Left, rect.Top)
                                   };
                homography.ProjectPoints(pts);

                //for (int i = 0; i < pts.Length; i++)
                //    pts[i].Y += modelImage.Height;

                processedImage.DrawPolyline(Array.ConvertAll(pts, Point.Round), true, new Bgr(Color.DarkOrange), 1);
            }
            return processedImage;
        }
        public Image<Gray, Byte> Run(int hessianTresh, bool extended, 
            int neighbours, int emax, 
            double uniquenessThreshold,
            double scaleIncrement, int rotBins, 
            bool show)
        {
            SURFDetector detector = new SURFDetector(hessianTresh, extended); //hessianThresh=500, extended=false

            Image<Bgr, Byte> modelImageBgr = new Image<Bgr, byte>(@"images\640x480\3_purple_oval_full_cropped.bmp");//.Convert<Gray, Byte>();
            Image<Gray, Byte> modelImage = modelImageBgr.Convert<Gray, Byte>();
            //extract features from the object image
            SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref detector);

            Image<Gray, Byte> observedImage = new Image<Gray, byte>(@"images\640x480\scene1.png");
            // extract features from the observed image
            SURFFeature[] imageFeatures = observedImage.ExtractSURF(ref detector);

            //Create a SURF Tracker using k-d Tree
            SURFTracker tracker = new SURFTracker(modelFeatures);
            //Comment out above and uncomment below if you wish to use spill-tree instead
            //SURFTracker tracker = new SURFTracker(modelFeatures, 50, .7, .1);

            SURFTracker.MatchedSURFFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, neighbours, emax); //neighbours=2, emax=20
            matchedFeatures = SURFTracker.VoteForUniqueness(matchedFeatures, uniquenessThreshold);//uniquenessThreshold=0.8
            matchedFeatures = SURFTracker.VoteForSizeAndOrientation(matchedFeatures, scaleIncrement, rotBins); //scaleIncrement=1.5, rotBins=20
            HomographyMatrix homography = SURFTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures);

            //Merge the object image and the observed image into one image for display
            Image<Gray, Byte> res = modelImage.ConcateHorizontal(observedImage);

            #region draw lines between the matched features
            foreach (SURFTracker.MatchedSURFFeature matchedFeature in matchedFeatures)
            {
                PointF p = matchedFeature.ObservedFeature.Point.pt;
                p.X += modelImage.Width;
                res.Draw(new LineSegment2DF(matchedFeature.SimilarFeatures[0].Feature.Point.pt, p), new Gray(0), 1);
            }
            #endregion

            #region draw the project region on the image
            if (homography != null)
            {  //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[] pts = new PointF[] {
               new PointF(rect.Left, rect.Bottom),
               new PointF(rect.Right, rect.Bottom),
               new PointF(rect.Right, rect.Top),
               new PointF(rect.Left, rect.Top)};
                homography.ProjectPoints(pts);

                for (int i = 0; i < pts.Length; i++)
                    pts[i].Y += modelImage.Height;

                res.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
            }
            #endregion

            if (show)
            {
                ImageViewer.Show(res);
            }

            return res;
        }
示例#6
0
        static void Run()
        {
            MCvSURFParams surfParam = new MCvSURFParams(500, false);

            Image <Gray, Byte> modelImage = new Image <Gray, byte>("box.png");

            //extract features from the object image
            SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref surfParam);

            //Create a SURF Tracker
            SURFTracker tracker = new SURFTracker(modelFeatures);

            Image <Gray, Byte> observedImage = new Image <Gray, byte>("box_in_scene.png");

            Stopwatch watch = Stopwatch.StartNew();

            // extract features from the observed image
            SURFFeature[] imageFeatures = observedImage.ExtractSURF(ref surfParam);

            SURFTracker.MatchedSURFFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2, 20);
            matchedFeatures = SURFTracker.VoteForUniqueness(matchedFeatures, 0.8);
            matchedFeatures = SURFTracker.VoteForSizeAndOrientation(matchedFeatures, 1.5, 20);
            HomographyMatrix homography = SURFTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures);

            watch.Stop();

            //Merge the object image and the observed image into one image for display
            Image <Gray, Byte> res = modelImage.ConcateVertical(observedImage);

            #region draw lines between the matched features
            foreach (SURFTracker.MatchedSURFFeature matchedFeature in matchedFeatures)
            {
                PointF p = matchedFeature.ObservedFeature.Point.pt;
                p.Y += modelImage.Height;
                res.Draw(new LineSegment2DF(matchedFeature.SimilarFeatures[0].Feature.Point.pt, p), new Gray(0), 1);
            }
            #endregion

            #region draw the project region on the image
            if (homography != null)
            { //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                for (int i = 0; i < pts.Length; i++)
                {
                    pts[i].Y += modelImage.Height;
                }

                res.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
            }
            #endregion

            ImageViewer.Show(res, String.Format("Matched in {0} milliseconds", watch.ElapsedMilliseconds));
        }