public static List <ImageSearchResult> SearchImageForObjects(WorldObject modelObject, string imageToSearch) { int k = 2; double uniquenessThreshold = 0.8; double hessianThresh = 300; int nonZeroThreshold = 10; ObjectFeatures targetImageFeatures = DetectFeatures_Brisk(imageToSearch); Mat mask; List <ImageSearchResult> searchResults = new List <ImageSearchResult>(); foreach (ObjectView view in modelObject.Views) { if (view == null) { continue; } VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch(); BFMatcher matcher = new BFMatcher(DistanceType.L2); matcher.Add(view.Features.Descriptors); matcher.KnnMatch(targetImageFeatures.Descriptors, matches, 2, null); mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); int nonZeroCount = CvInvoke.CountNonZero(mask); if (nonZeroCount >= nonZeroThreshold) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(view.Features.KeyPoints, targetImageFeatures.KeyPoints, matches, mask, 1.5, 20); if (nonZeroCount >= nonZeroThreshold) { Mat homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(view.Features.KeyPoints, targetImageFeatures.KeyPoints, matches, mask, 2); searchResults.Add(new ImageSearchResult(view, homography, matches, targetImageFeatures, mask)); } } } return(searchResults); }
public ObjectView(Perspective perspective, string imagePath) { this._imagePerspective = perspective; this._imagePath = imagePath; this.Features = FeatureDetector.DetectFeatures_Brisk(imagePath); }
public ImageSearchResult(ObjectView view, Mat homography, VectorOfVectorOfDMatch matches, ObjectFeatures observedFeatures, Mat mask) { this.MatchingView = view; this.Homography = homography; this.Matches = matches; this.ObservedFeatures = observedFeatures; this.Mask = mask; }