public MatchFeature ( Emgu.CV.Features2D.ImageFeature observedFeatures, int k, int emax ) : MatchedImageFeature[] | ||
observedFeatures | Emgu.CV.Features2D.ImageFeature | The Image feature from the observed image |
k | int | The number of neighbors to find |
emax | int | For k-d tree only: the maximum number of leaves to visit. |
return | MatchedImageFeature[] |
static void Run() { SURFDetector surfParam = new SURFDetector(350, false); Image<Gray, Byte> modelImage = new Image<Gray, byte>("box.png"); //extract features from the object image ImageFeature[] modelFeatures = surfParam.DetectFeatures(modelImage, null); //Create a Feature Tracker Features2DTracker tracker = new Features2DTracker(modelFeatures); Image<Gray, Byte> observedImage = new Image<Gray, byte>("box_in_scene.png"); Stopwatch watch = Stopwatch.StartNew(); // extract features from the observed image ImageFeature[] imageFeatures = surfParam.DetectFeatures(observedImage, null); Features2DTracker.MatchedImageFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2, 20); matchedFeatures = Features2DTracker.VoteForUniqueness(matchedFeatures, 0.8); matchedFeatures = Features2DTracker.VoteForSizeAndOrientation(matchedFeatures, 1.5, 20); HomographyMatrix homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures); watch.Stop(); //Merge the object image and the observed image into one image for display Image<Gray, Byte> res = modelImage.ConcateVertical(observedImage); #region draw lines between the matched features foreach (Features2DTracker.MatchedImageFeature matchedFeature in matchedFeatures) { PointF p = matchedFeature.ObservedFeature.KeyPoint.Point; p.Y += modelImage.Height; res.Draw(new LineSegment2DF(matchedFeature.SimilarFeatures[0].Feature.KeyPoint.Point, p), new Gray(0), 1); } #endregion #region draw the project region on the image if (homography != null) { //draw a rectangle along the projected model Rectangle rect = modelImage.ROI; PointF[] pts = new PointF[] { new PointF(rect.Left, rect.Bottom), new PointF(rect.Right, rect.Bottom), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Top)}; homography.ProjectPoints(pts); for (int i = 0; i < pts.Length; i++) pts[i].Y += modelImage.Height; res.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5); } #endregion ImageViewer.Show(res, String.Format("Matched in {0} milliseconds", watch.ElapsedMilliseconds)); }
public List<ImageRecord> QueryImage(string queryImagePath, SurfSettings surfSetting = null) { List<ImageRecord> rtnImageList = new List<ImageRecord>(); var observerFeatureSets = SurfRepository.GetSurfRecordList(); #region Surf Dectator Region double hessianThresh = 500; double uniquenessThreshold = 0.8; int minGoodMatchPercent = 50; if (surfSetting != null) { hessianThresh = surfSetting.HessianThresh.Value; uniquenessThreshold = surfSetting.UniquenessThreshold.Value; minGoodMatchPercent = surfSetting.GoodMatchThreshold.Value; } SURFDetector surfDectector = new SURFDetector(hessianThresh, false); #endregion using (Image<Gray, byte> modelImage = new Image<Gray, byte>(queryImagePath)) { ImageFeature<float>[] modelFeatures = surfDectector.DetectFeatures(modelImage, null); if (modelFeatures.Length < 4) throw new InvalidOperationException("Model image didn't have any significant features to detect"); Features2DTracker<float> tracker = new Features2DTracker<float>(modelFeatures); foreach (var surfRecord in observerFeatureSets) { string queryImageName = System.IO.Path.GetFileName(queryImagePath); string modelImageName = surfRecord.ImageName; Features2DTracker<float>.MatchedImageFeature[] matchedFeatures = tracker.MatchFeature(surfRecord.observerFeatures, 2); Features2DTracker<float>.MatchedImageFeature[] uniqueFeatures = Features2DTracker<float>.VoteForUniqueness(matchedFeatures, uniquenessThreshold); Features2DTracker<float>.MatchedImageFeature[] uniqueRotOriFeatures = Features2DTracker<float>.VoteForSizeAndOrientation(uniqueFeatures, 1.5, 20); int goodMatchCount = 0; goodMatchCount = uniqueRotOriFeatures.Length; bool isMatch = false; double totalnumberOfModelFeature = modelFeatures.Length; double matchPercentage = ((totalnumberOfModelFeature - (double)goodMatchCount) / totalnumberOfModelFeature); matchPercentage = (1 - matchPercentage) * 100; matchPercentage = Math.Round(matchPercentage); if (matchPercentage >= minGoodMatchPercent) { HomographyMatrix homography = Features2DTracker<float>.GetHomographyMatrixFromMatchedFeatures(uniqueRotOriFeatures); if (homography != null) { isMatch = homography.IsValid(5); if (isMatch) { surfRecord.Distance = matchPercentage; rtnImageList.Add((ImageRecord)surfRecord); } } } //bool isMatch = false; //if (uniqueFeatures.Length > 4) //{ // HomographyMatrix homography = // Features2DTracker<float>.GetHomographyMatrixFromMatchedFeatures(uniqueRotOriFeatures); // if (homography != null) // { // isMatch = homography.IsValid(5); // } //} //if (isMatch) //{ // surfRecord.Distance = goodMatchCount; // rtnImageList.Add((ImageRecord)surfRecord); //} //int goodMatchCount = 0; //foreach (Features2DTracker<float>.MatchedImageFeature ms in matchedFeatures) //{ // if (ms.SimilarFeatures[0].Distance < uniquenessThreshold) // goodMatchCount++; //} //double totalnumberOfModelFeature = modelFeatures.Length; //double matchPercentage = ((totalnumberOfModelFeature - (double)goodMatchCount) / totalnumberOfModelFeature); //matchPercentage = (1 - matchPercentage) * 100; //matchPercentage = Math.Round(matchPercentage); //if (matchPercentage >= minGoodMatchPercent) //{ // surfRecord.Distance = matchPercentage; // rtnImageList.Add((ImageRecord)surfRecord); //} } } rtnImageList = rtnImageList.OrderByDescending(x => x.Distance).ToList(); return rtnImageList; }