public ImageData(string path, string correspondingValue, Emgu.CV.Features2D.ORBDetector orbDetector, KAZE featureDetector) { this.path = path; this.image = new Image <Bgr, byte>(path); orbDetector.DetectAndCompute(image, null, keyPointsOrb, descriptorOrb, false); UMat uObservedImage = image.Mat.GetUMat(AccessType.Read); //featureDetector.DetectAndCompute(image, null, keyPointsSurf, descriptorSurf, false); featureDetector.DetectAndCompute(uObservedImage, null, keyPointsSurf, descriptorSurf, false); this.correspondingValue = correspondingValue; }
private double DetectTemplate(Mat observedImage, TemplateContainer.ImageData template) { orbDetector.DetectAndCompute(observedImage, null, observedKeyPoints, observedDescriptor, false); if (template.keyPointsOrb.Size > 0 && observedKeyPoints.Size > 0) { BFMatcher matcher = new BFMatcher(DistanceType.L2); matcher.Add(template.descriptorOrb); VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch(); matcher.KnnMatch(observedDescriptor, matches, 2, null); //Copied Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, 0.8, mask); if (matches.Size == 0) { return(0.0); } else { int nonZeroCount = CvInvoke.CountNonZero(mask); double nonZeroCountNormalized = 1.0 * nonZeroCount / template.keyPointsOrb.Size; if (nonZeroCount > 3) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(template.keyPointsOrb, observedKeyPoints, matches, mask, 1.8, 18); nonZeroCountNormalized = 1.0 * nonZeroCount / template.keyPointsOrb.Size; return(nonZeroCount); } return(0.0); } } else { return(0.0); } }