public override void ViewDidLoad() { base.ViewDidLoad(); ButtonText = "Match"; base.OnButtonClick += delegate { long processingTime; Size frameSize = FrameSize; using (Mat modelImage = CvInvoke.Imread("box.png", Emgu.CV.CvEnum.ImreadModes.Grayscale)) using (Mat observedImage = CvInvoke.Imread("box_in_scene.png", Emgu.CV.CvEnum.ImreadModes.Grayscale)) using (Emgu.CV.Features2D.KAZE kaze = new Emgu.CV.Features2D.KAZE()) using (Mat image = DrawMatches.Draw(modelImage, observedImage, kaze, out processingTime)) using (Mat resized = new Mat()) { double dx = ((double)frameSize.Width) / image.Width; double dy = ((double)frameSize.Height) / image.Height; double min = Math.Min(dx, dy); CvInvoke.Resize(image, resized, Size.Empty, min, min); //image.Resize(frameSize.Width, frameSize.Height, Emgu.CV.CvEnum.Inter.Nearest, true) MessageText = String.Format("Matching Time: {0} milliseconds.", processingTime); SetImage(resized); } }; }
//public static Highlighter[] HighlightMatches(Rectangle[] Matches) //{ // var result = new List<Highlighter>(); // foreach (var m in Matches) // { // result.Add(new Highlighter(m, System.Drawing.Color.Red)); // } // return result.ToArray(); //} //public static void HighlightMatches(Rectangle[] Matches, TimeSpan Duration) //{ // foreach (var Match in Matches) // { // Task.Factory.StartNew(() => // { // var h2 = new Highlighter(Match, System.Drawing.Color.Red); // System.Threading.Thread.Sleep(Duration); // h2.remove(); // }); // } //} //public static void HighlightMatch(Rectangle Match, bool Blocking, Color Color, TimeSpan Duration) //{ // if (!Blocking) // { // Task.Factory.StartNew(() => // { // var h2 = new Highlighter(Match, System.Drawing.Color.Red); // System.Threading.Thread.Sleep(Duration); // h2.remove(); // System.Windows.Forms.Application.DoEvents(); // }); // return; // } // var h = new Highlighter(Match, System.Drawing.Color.Red); // System.Threading.Thread.Sleep(Duration); // h.remove(); // System.Windows.Forms.Application.DoEvents(); //} //https://www.meridium.se/sv/blogg/imagematching-using-opencv/ public static void FindMatch(Mat modelImage, Mat observedImage, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography, out long score) { int k = 2; double uniquenessThreshold = 0.80; homography = null; modelKeyPoints = new VectorOfKeyPoint(); observedKeyPoints = new VectorOfKeyPoint(); using (UMat uModelImage = modelImage.GetUMat(AccessType.Read)) using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read)) { var featureDetector = new Emgu.CV.Features2D.KAZE(); Mat modelDescriptors = new Mat(); featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false); Mat observedDescriptors = new Mat(); featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false); // KdTree for faster results / less accuracy using (var ip = new Emgu.CV.Flann.KdTreeIndexParams()) using (var sp = new Emgu.CV.Flann.SearchParams()) using (var matcher = new Emgu.CV.Features2D.FlannBasedMatcher(ip, sp)) { matcher.Add(modelDescriptors); matcher.KnnMatch(observedDescriptors, matches, k, null); mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Emgu.CV.Features2D.Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); // Calculate score based on matches size // ----------------------------------------------> score = 0; for (int i = 0; i < matches.Size; i++) { // if (mask.GetData(i)[0] == 0) continue; foreach (var e in matches[i].ToArray()) { ++score; } } // <---------------------------------------------- int nonZeroCount = CvInvoke.CountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Emgu.CV.Features2D.Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Emgu.CV.Features2D.Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2); } } } } }