static void Run() { MCvSURFParams surfParam = new MCvSURFParams(500, false); Image<Gray, Byte> modelImage = new Image<Gray, byte>("box.png"); //extract features from the object image SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref surfParam); //Create a SURF Tracker SURFTracker tracker = new SURFTracker(modelFeatures); Image<Gray, Byte> observedImage = new Image<Gray, byte>("box_in_scene.png"); Stopwatch watch = Stopwatch.StartNew(); // extract features from the observed image SURFFeature[] imageFeatures = observedImage.ExtractSURF(ref surfParam); SURFTracker.MatchedSURFFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2, 20); matchedFeatures = SURFTracker.VoteForUniqueness(matchedFeatures, 0.8); matchedFeatures = SURFTracker.VoteForSizeAndOrientation(matchedFeatures, 1.5, 20); HomographyMatrix homography = SURFTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures); watch.Stop(); //Merge the object image and the observed image into one image for display Image<Gray, Byte> res = modelImage.ConcateVertical(observedImage); #region draw lines between the matched features foreach (SURFTracker.MatchedSURFFeature matchedFeature in matchedFeatures) { PointF p = matchedFeature.ObservedFeature.Point.pt; p.Y += modelImage.Height; res.Draw(new LineSegment2DF(matchedFeature.SimilarFeatures[0].Feature.Point.pt, p), new Gray(0), 1); } #endregion #region draw the project region on the image if (homography != null) { //draw a rectangle along the projected model Rectangle rect = modelImage.ROI; PointF[] pts = new PointF[] { new PointF(rect.Left, rect.Bottom), new PointF(rect.Right, rect.Bottom), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Top)}; homography.ProjectPoints(pts); for (int i = 0; i < pts.Length; i++) pts[i].Y += modelImage.Height; res.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5); } #endregion ImageViewer.Show(res, String.Format("Matched in {0} milliseconds", watch.ElapsedMilliseconds)); }
public StopSignDetector() { _surfParam = new MCvSURFParams(500, false); using (Image<Bgr, Byte> stopSignModel = new Image<Bgr, Byte>("stop-sign-model.png")) using (Image<Gray, Byte> redMask = GetRedPixelMask(stopSignModel)) { _tracker = new SURFTracker(redMask.ExtractSURF(ref _surfParam)); } _octagonStorage = new MemStorage(); _octagon = new Contour<Point>(_octagonStorage); _octagon.PushMulti(new Point[] { new Point(1, 0), new Point(2, 0), new Point(3, 1), new Point(3, 2), new Point(2, 3), new Point(1, 3), new Point(0, 2), new Point(0, 1)}, Emgu.CV.CvEnum.BACK_OR_FRONT.FRONT); }
private static bool MatchImages(Image<Gray, Byte> observedImage, Bitmap reference) { MCvSURFParams surfParam = new MCvSURFParams(500, false); Image<Gray, Byte> modelImage = new Image<Gray, byte>(reference); //extract features from the object image SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref surfParam); //Create a SURF Tracker SURFTracker tracker = new SURFTracker(modelFeatures); // extract features from the observed image SURFFeature[] imageFeatures = observedImage.ExtractSURF(ref surfParam); SURFTracker.MatchedSURFFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2, 20); matchedFeatures = SURFTracker.VoteForUniqueness(matchedFeatures, accuracy); matchedFeatures = SURFTracker.VoteForSizeAndOrientation(matchedFeatures, 1.5, 20); HomographyMatrix homography = SURFTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures); if (homography != null) { return true; } else { return false; } }
public void TestSURF() { for (int k = 0; k < 1; k++) { Image<Gray, Byte> modelImage = new Image<Gray, byte>("box.png"); //Image<Gray, Byte> modelImage = new Image<Gray, byte>("stop.jpg"); //modelImage = modelImage.Resize(400, 400, true); //modelImage._EqualizeHist(); #region extract features from the object image Stopwatch stopwatch = Stopwatch.StartNew(); MCvSURFParams param1 = new MCvSURFParams(500, false); SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref param1); SURFTracker tracker = new SURFTracker(modelFeatures); stopwatch.Stop(); Trace.WriteLine(String.Format("Time to extract feature from model: {0} milli-sec", stopwatch.ElapsedMilliseconds)); #endregion //Image<Gray, Byte> observedImage = new Image<Gray, byte>("traffic.jpg"); Image<Gray, Byte> observedImage = new Image<Gray, byte>("box_in_scene.png"); //Image<Gray, Byte> observedImage = modelImage.Rotate(45, new Gray(0.0)); //Image<Gray, Byte> observedImage = new Image<Gray, byte>("left.jpg"); //image = image.Resize(400, 400, true); //observedImage._EqualizeHist(); #region extract features from the observed image stopwatch.Reset(); stopwatch.Start(); MCvSURFParams param2 = new MCvSURFParams(500, false); SURFFeature[] observedFeatures = observedImage.ExtractSURF(ref param2); stopwatch.Stop(); Trace.WriteLine(String.Format("Time to extract feature from image: {0} milli-sec", stopwatch.ElapsedMilliseconds)); #endregion //Merge the object image and the observed image into one big image for display Image<Gray, Byte> res = modelImage.ConcateVertical(observedImage); Rectangle rect = modelImage.ROI; PointF[] pts = new PointF[] { new PointF(rect.Left, rect.Bottom), new PointF(rect.Right, rect.Bottom), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Top)}; HomographyMatrix homography; stopwatch.Reset(); stopwatch.Start(); homography = tracker.Detect(observedFeatures, 0.8); stopwatch.Stop(); Trace.WriteLine(String.Format("Time for feature matching: {0} milli-sec", stopwatch.ElapsedMilliseconds)); if (homography != null) { PointF[] points = pts.Clone() as PointF[]; homography.ProjectPoints(points); for (int i = 0; i < points.Length; i++) points[i].Y += modelImage.Height; res.DrawPolyline(Array.ConvertAll<PointF, Point>(points, Point.Round), true, new Gray(255.0), 5); } stopwatch.Reset(); stopwatch.Start(); //set the initial region to be the whole image using (Image<Gray, Single> priorMask = new Image<Gray, float>(observedImage.Size)) { priorMask.SetValue(1.0); homography = tracker.CamShiftTrack( observedFeatures, (RectangleF)observedImage.ROI, priorMask); } Trace.WriteLine(String.Format("Time for feature tracking: {0} milli-sec", stopwatch.ElapsedMilliseconds)); if (homography != null) //set the initial tracking window to be the whole image { PointF[] points = pts.Clone() as PointF[]; homography.ProjectPoints(points); for (int i = 0; i < points.Length; i++) points[i].Y += modelImage.Height; res.DrawPolyline(Array.ConvertAll<PointF, Point>(points, Point.Round), true, new Gray(255.0), 5); } } }