public StopSignDetector(Image <Bgr, Byte> stopSignModel)
 {
     _detector = new SURFDetector(500, false);
     using (Image <Gray, Byte> redMask = GetRedPixelMask(stopSignModel))
     {
         ImageFeature <float>[] features = _detector.DetectFeatures(redMask, null);
         if (features.Length == 0)
         {
             throw new Exception("No image feature has been found in the stop sign model");
         }
         _tracker = new Features2DTracker <float>(features);
     }
     _octagonStorage = new MemStorage();
     _octagon        = new Contour <Point>(_octagonStorage);
     _octagon.PushMulti(new Point[] {
         new Point(1, 0),
         new Point(2, 0),
         new Point(3, 1),
         new Point(3, 2),
         new Point(2, 3),
         new Point(1, 3),
         new Point(0, 2),
         new Point(0, 1)
     },
                        Emgu.CV.CvEnum.BACK_OR_FRONT.FRONT);
 }
        private int GetMatchedFeatureCount(Image <Gray, Byte> contourImage)
        {
            Features2DTracker <float> .MatchedImageFeature[] matchedFeatures;
            try
            {
                //return 20;
                ImageFeature <float>[] features = surfaceParameters.DetectFeatures(contourImage, null);
                matchedFeatures = featureTracker.MatchFeature(features, 2);
            }
            catch (AccessViolationException)
            {
                Console.WriteLine("Access Violation Exception encountered");
                return(0);
            }
            catch (Exception)
            {
                return(0);
            }

            int matchedFeatureCount = 0;

            foreach (Features2DTracker <float> .MatchedImageFeature feature in matchedFeatures)
            {
                if (feature.SimilarFeatures[0].Distance < 0.5)
                {
                    matchedFeatureCount++;
                }
            }

            return(matchedFeatureCount);
        }
示例#3
0
        public void IndexFiles(FileInfo[] imageFiles, System.ComponentModel.BackgroundWorker IndexBgWorker,
                               Action <string> logWriter,
                               SurfSettings surfSetting = null)
        {
            #region Surf Dectator Region
            double hessianThresh       = 500;
            double uniquenessThreshold = 0.8;

            if (surfSetting != null)
            {
                hessianThresh       = surfSetting.HessianThresh.Value;
                uniquenessThreshold = surfSetting.UniquenessThreshold.Value;
            }

            SURFDetector surfDectector = new SURFDetector(hessianThresh, false);
            #endregion

            List <SURFRecord2> surfRecord2List = new List <SURFRecord2>();
            Stopwatch          sw1, sw2;

            sw1 = Stopwatch.StartNew();
            logWriter("Index started...");
            int totalFileCount = imageFiles.Length;
            for (int i = 0; i < totalFileCount; i++)
            {
                var fi = imageFiles[i];
                using (Image <Gray, byte> observerImage = new Image <Gray, byte>(fi.FullName))
                {
                    ImageFeature <float>[] observerFeatures = surfDectector.DetectFeatures(observerImage, null);

                    if (observerFeatures.Length > 4)
                    {
                        SURFRecord2 record = new SURFRecord2
                        {
                            Id               = i,
                            ImageName        = fi.Name,
                            ImagePath        = fi.FullName,
                            observerFeatures = observerFeatures
                        };
                        surfRecord2List.Add(record);
                    }
                    else
                    {
                        Debug.WriteLine(fi.Name + " skip from index, because it didn't have significant feature");
                    }
                }
                IndexBgWorker.ReportProgress(i);
            }
            SurfRepository.AddSURFRecord2List(surfRecord2List);
            sw1.Stop();
            logWriter(string.Format("Index Complete, it tooked {0} ms. Saving Repository...", sw1.ElapsedMilliseconds));

            sw2 = Stopwatch.StartNew();
            SurfRepository.SaveRepository(SurfAlgo.Linear);
            sw2.Stop();

            logWriter(string.Format("Index tooked {0} ms. Saving Repository tooked {1} ms", sw1.ElapsedMilliseconds, sw2.ElapsedMilliseconds));
        }
        private void FindStopSign(Image <Bgr, byte> img, List <Image <Gray, Byte> > stopSignList, List <Rectangle> boxList, Contour <Point> contours)
        {
            for (; contours != null; contours = contours.HNext)
            {
                contours.ApproxPoly(contours.Perimeter * 0.02, 0, contours.Storage);
                if (contours.Area > 200)
                {
                    double ratio = CvInvoke.cvMatchShapes(_octagon, contours, Emgu.CV.CvEnum.CONTOURS_MATCH_TYPE.CV_CONTOURS_MATCH_I3, 0);

                    if (ratio > 0.1) //not a good match of contour shape
                    {
                        Contour <Point> child = contours.VNext;
                        if (child != null)
                        {
                            FindStopSign(img, stopSignList, boxList, child);
                        }
                        continue;
                    }

                    Rectangle box = contours.BoundingRectangle;

                    Image <Gray, Byte> candidate;
                    using (Image <Bgr, Byte> tmp = img.Copy(box))
                        candidate = tmp.Convert <Gray, byte>();

                    //set the value of pixels not in the contour region to zero
                    using (Image <Gray, Byte> mask = new Image <Gray, byte>(box.Size))
                    {
                        mask.Draw(contours, new Gray(255), new Gray(255), 0, -1, new Point(-box.X, -box.Y));

                        double mean = CvInvoke.cvAvg(candidate, mask).v0;
                        candidate._ThresholdBinary(new Gray(mean), new Gray(255.0));
                        candidate._Not();
                        mask._Not();
                        candidate.SetValue(0, mask);
                    }

                    ImageFeature[] features = _detector.DetectFeatures(candidate, null);

                    Features2DTracker.MatchedImageFeature[] matchedFeatures = _tracker.MatchFeature(features, 2);

                    int goodMatchCount = 0;
                    foreach (Features2DTracker.MatchedImageFeature ms in matchedFeatures)
                    {
                        if (ms.SimilarFeatures[0].Distance < 0.5)
                        {
                            goodMatchCount++;
                        }
                    }

                    if (goodMatchCount >= 10)
                    {
                        boxList.Add(box);
                        stopSignList.Add(candidate);
                    }
                }
            }
        }
 private void CreateSurfaceTracker()
 {
     surfaceParameters = new SURFDetector(500, false);
     using (Image <Bgr, Byte> stopSignModel = new Image <Bgr, Byte>(Properties.Resources.SignModel))
         using (Image <Gray, Byte> redMask = GetRedPixelMask(stopSignModel))
         {
             featureTracker = new Features2DTracker <float>(surfaceParameters.DetectFeatures(redMask, null));
         }
 }
        public Homography()
        {
            InitializeComponent();
            camera        = new Capture(@"F:\Working\Final phase\DataSet\20150409_13-34-33.asf");
            surfParam     = new SURFDetector(500, false);
            modelImage    = new Image <Rgb, byte>(@"C:\Users\ahmed nady\Pictures\modelImage.bmp");
            modelFeatures = surfParam.DetectFeatures(modelImage.Convert <Gray, byte>(), null);

            //Create a Feature Tracker
            tracker = new Features2DTracker <float>(modelFeatures);
        }
        private void button1_Click(object sender, EventArgs e)
        {
            if (capture)
            {
                button1.Text = "start";
                //extract features from the object image
                modelFeatures = surfParam.DetectFeatures(modelImage.Convert <Gray, byte>(), null);

                //Create a Feature Tracker
                tracker = new Features2DTracker <float>(modelFeatures);

                Application.Idle -= ProcessFrame;
            }
            else
            {
                button1.Text = "pause";

                Application.Idle += ProcessFrame;
            }
            capture = !capture;
        }
示例#8
0
        public Surf()
        {
            InitializeComponent();
            camera     = new Capture(0);
            surfParam  = new SURFDetector(500, false);
            modelImage = new Image <Rgb, byte>(@"C:\Users\ahmed nady\Desktop\hand.jpg");
            //extract features from the object image
            modelFeatures = surfParam.DetectFeatures(modelImage.Convert <Gray, byte>(), null);

            //Create a Feature Tracker
            tracker = new Features2DTracker <float>(modelFeatures);
        }
示例#9
0
        void ProcessFrame(object sender, EventArgs arg)
        {
            observedImage = camera.QueryFrame().Convert <Rgb, byte>();


            // extract features from the observed image
            ImageFeature <float>[] imageFeatures = surfParam.DetectFeatures(observedImage.Convert <Gray, byte>(), null);

            Features2DTracker <float> .MatchedImageFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2);
            matchedFeatures = Features2DTracker <float> .VoteForUniqueness(matchedFeatures, 0.8);

            matchedFeatures = Features2DTracker <float> .VoteForSizeAndOrientation(matchedFeatures, 1.5, 20);

            HomographyMatrix homography = Features2DTracker <float> .GetHomographyMatrixFromMatchedFeatures(matchedFeatures);


            Image <Rgb, Byte> res = modelImage.ConcateHorizontal(observedImage);


            #region draw the project region on the image
            if (homography != null)
            {       //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                //for (int i = 0; i < pts.Length; i++)
                //   pts[i].Y += modelImage.Height;

                observedImage.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Rgb(255, 20, 100), 5);
            }
            #endregion
            imageBox2.Image = res;
            imageBox1.Image = observedImage;
        }
        public SurfStopSignDetector()
        {
            detector = new SURFDetector(500, false);


            Image <Bgr, Byte>  stopSignModel = new Image <Bgr, Byte>("models\\stop-sign-model.png");
            Image <Gray, Byte> redMask       = GetRedPixelMask(stopSignModel);

            tracker = new Features2DTracker <float>(detector.DetectFeatures(redMask, null));

            octagonStorage = new MemStorage();
            octagon        = new Contour <Point>(octagonStorage);
            octagon.PushMulti(new Point[] { new Point(1, 0), new Point(2, 0), new Point(3, 1), new Point(3, 2),
                                            new Point(2, 3), new Point(1, 3), new Point(0, 2), new Point(0, 1) }, Emgu.CV.CvEnum.BACK_OR_FRONT.FRONT);
        }
示例#11
0
 public StopSignDetector(Image <Bgr, Byte> stopSignModel)
 {
     _detector = new SURFDetector(500, false);
     using (Image <Gray, Byte> redMask = GetRedPixelMask(stopSignModel))
     {
         ImageFeature <float>[] temp = _detector.DetectFeatures(redMask, null);
         _tracker = new Features2DTracker <float>(temp);
     }
     _octagonStorage = new MemStorage();
     _octagon        = new Contour <Point>(_octagonStorage);
     _octagon.PushMulti(new Point[] {
         new Point(1, 0),
         new Point(2, 0),
         new Point(3, 1),
         new Point(3, 2),
         new Point(2, 3),
         new Point(1, 3),
         new Point(0, 2),
         new Point(0, 1)
     },
                        Emgu.CV.CvEnum.BACK_OR_FRONT.FRONT
                        );
 }
示例#12
0
        static void Run()
        {
            SURFDetector surfParam = new SURFDetector(500, false);

            Image <Gray, Byte> modelImage = new Image <Gray, byte>("box.png");

            //extract features from the object image
            ImageFeature[] modelFeatures = surfParam.DetectFeatures(modelImage, null);

            //Create a Feature Tracker
            Features2DTracker tracker = new Features2DTracker(modelFeatures);

            Image <Gray, Byte> observedImage = new Image <Gray, byte>("box_in_scene.png");

            Stopwatch watch = Stopwatch.StartNew();

            // extract features from the observed image
            ImageFeature[] imageFeatures = surfParam.DetectFeatures(observedImage, null);

            Features2DTracker.MatchedImageFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2, 20);
            matchedFeatures = Features2DTracker.VoteForUniqueness(matchedFeatures, 0.8);
            matchedFeatures = Features2DTracker.VoteForSizeAndOrientation(matchedFeatures, 1.5, 20);
            HomographyMatrix homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures);

            watch.Stop();

            //Merge the object image and the observed image into one image for display
            Image <Gray, Byte> res = modelImage.ConcateVertical(observedImage);

            #region draw lines between the matched features
            foreach (Features2DTracker.MatchedImageFeature matchedFeature in matchedFeatures)
            {
                PointF p = matchedFeature.ObservedFeature.KeyPoint.Point;
                p.Y += modelImage.Height;
                res.Draw(new LineSegment2DF(matchedFeature.SimilarFeatures[0].Feature.KeyPoint.Point, p), new Gray(0), 1);
            }
            #endregion

            #region draw the project region on the image
            if (homography != null)
            { //draw a rectangle along the projected model
                Rectangle rect = modelImage.ROI;
                PointF[]  pts  = new PointF[] {
                    new PointF(rect.Left, rect.Bottom),
                    new PointF(rect.Right, rect.Bottom),
                    new PointF(rect.Right, rect.Top),
                    new PointF(rect.Left, rect.Top)
                };
                homography.ProjectPoints(pts);

                for (int i = 0; i < pts.Length; i++)
                {
                    pts[i].Y += modelImage.Height;
                }

                res.DrawPolyline(Array.ConvertAll <PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
            }
            #endregion

            ImageViewer.Show(res, String.Format("Matched in {0} milliseconds", watch.ElapsedMilliseconds));
        }
示例#13
0
        public List <ImageRecord> QueryImage(string queryImagePath, SurfSettings surfSetting = null)
        {
            List <ImageRecord> rtnImageList = new List <ImageRecord>();

            var observerFeatureSets = SurfRepository.GetSurfRecordList();

            #region Surf Dectator Region
            double hessianThresh       = 500;
            double uniquenessThreshold = 0.8;
            int    minGoodMatchPercent = 50;

            if (surfSetting != null)
            {
                hessianThresh       = surfSetting.HessianThresh.Value;
                uniquenessThreshold = surfSetting.UniquenessThreshold.Value;
                minGoodMatchPercent = surfSetting.GoodMatchThreshold.Value;
            }

            SURFDetector surfDectector = new SURFDetector(hessianThresh, false);
            #endregion

            using (Image <Gray, byte> modelImage = new Image <Gray, byte>(queryImagePath))
            {
                ImageFeature <float>[] modelFeatures = surfDectector.DetectFeatures(modelImage, null);

                if (modelFeatures.Length < 4)
                {
                    throw new InvalidOperationException("Model image didn't have any significant features to detect");
                }

                Features2DTracker <float> tracker = new Features2DTracker <float>(modelFeatures);
                foreach (var surfRecord in observerFeatureSets)
                {
                    string queryImageName = System.IO.Path.GetFileName(queryImagePath);
                    string modelImageName = surfRecord.ImageName;

                    Features2DTracker <float> .MatchedImageFeature[] matchedFeatures = tracker.MatchFeature(surfRecord.observerFeatures, 2);

                    Features2DTracker <float> .MatchedImageFeature[] uniqueFeatures = Features2DTracker <float> .VoteForUniqueness(matchedFeatures, uniquenessThreshold);

                    Features2DTracker <float> .MatchedImageFeature[] uniqueRotOriFeatures = Features2DTracker <float> .VoteForSizeAndOrientation(uniqueFeatures, 1.5, 20);

                    int goodMatchCount = 0;
                    goodMatchCount = uniqueRotOriFeatures.Length;
                    bool isMatch = false;

                    double totalnumberOfModelFeature = modelFeatures.Length;
                    double matchPercentage           = ((totalnumberOfModelFeature - (double)goodMatchCount) / totalnumberOfModelFeature);
                    matchPercentage = (1 - matchPercentage) * 100;
                    matchPercentage = Math.Round(matchPercentage);
                    if (matchPercentage >= minGoodMatchPercent)
                    {
                        HomographyMatrix homography =
                            Features2DTracker <float> .GetHomographyMatrixFromMatchedFeatures(uniqueRotOriFeatures);

                        if (homography != null)
                        {
                            isMatch = homography.IsValid(5);
                            if (isMatch)
                            {
                                surfRecord.Distance = matchPercentage;
                                rtnImageList.Add((ImageRecord)surfRecord);
                            }
                        }
                    }

                    //bool isMatch = false;
                    //if (uniqueFeatures.Length > 4)
                    //{
                    //    HomographyMatrix homography =
                    //        Features2DTracker<float>.GetHomographyMatrixFromMatchedFeatures(uniqueRotOriFeatures);
                    //    if (homography != null)
                    //    {
                    //        isMatch = homography.IsValid(5);
                    //    }
                    //}

                    //if (isMatch)
                    //{
                    //    surfRecord.Distance = goodMatchCount;
                    //    rtnImageList.Add((ImageRecord)surfRecord);
                    //}

                    //int goodMatchCount = 0;
                    //foreach (Features2DTracker<float>.MatchedImageFeature ms in matchedFeatures)
                    //{
                    //    if (ms.SimilarFeatures[0].Distance < uniquenessThreshold)
                    //        goodMatchCount++;
                    //}



                    //double totalnumberOfModelFeature = modelFeatures.Length;
                    //double matchPercentage = ((totalnumberOfModelFeature - (double)goodMatchCount) / totalnumberOfModelFeature);
                    //matchPercentage = (1 - matchPercentage) * 100;
                    //matchPercentage = Math.Round(matchPercentage);
                    //if (matchPercentage >= minGoodMatchPercent)
                    //{
                    //    surfRecord.Distance = matchPercentage;
                    //    rtnImageList.Add((ImageRecord)surfRecord);
                    //}
                }
            }
            rtnImageList = rtnImageList.OrderByDescending(x => x.Distance).ToList();
            return(rtnImageList);
        }