private static void BForceMatcherSample() { var src1 = new Mat("data/match1.png"); var src2 = new Mat("data/match2.png"); var gray1 = new Mat(); var gray2 = new Mat(); Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray); Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray); var fast = new FastFeatureDetector(10); var descriptorExtractor = new BriefDescriptorExtractor(32); var descriptors1 = new Mat(); var descriptors2 = new Mat(); KeyPoint[] keypoints1 = fast.Run(gray1, null); descriptorExtractor.Compute(gray1, ref keypoints1, descriptors1); KeyPoint[] keypoints2 = fast.Run(gray2, null); descriptorExtractor.Compute(gray2, ref keypoints2, descriptors2); // Match descriptor vectors var bfMatcher = new BFMatcher(NormType.L2, false); DMatch[][] bfMatches = bfMatcher.KnnMatch(descriptors1, descriptors2, 3, null, false); bfMatches.ToString(); var view = new Mat(); Cv2.DrawMatches(src1, keypoints1, src2, keypoints2, bfMatches, view); Window.ShowImages(view); }
public MKeyPoint[] Maspointer(Image <Bgr, byte> image, int mode) { switch (mode) { case 0: { GFTTDetector detector = new GFTTDetector(40, 0.01, 5, 3, true); MKeyPoint[] GFP1 = detector.Detect(image.Convert <Gray, byte>().Mat); return(GFP1); } case 1: { Brisk detector = new Brisk(); MKeyPoint[] GFP1 = detector.Detect(image.Convert <Gray, byte>().Mat); return(GFP1); } case 2: { FastFeatureDetector detector = new FastFeatureDetector(); MKeyPoint[] GFP1 = detector.Detect(image.Convert <Gray, byte>().Mat); return(GFP1); } } return(null); }
public void TestFreak() { FastFeatureDetector fast = new FastFeatureDetector(10, true); Freak freak = new Freak(true, true, 22.0f, 4); //ParamDef[] parameters = freak.GetParams(); //int nOctaves = freak.GetInt("nbOctave"); EmguAssert.IsTrue(TestFeature2DTracker(fast, freak), "Unable to find homography matrix"); }
public void TestFAST() { FastFeatureDetector fast = new FastFeatureDetector(10, true); //GridAdaptedFeatureDetector fastGrid = new GridAdaptedFeatureDetector(fast, 2000, 4, 4); BriefDescriptorExtractor brief = new BriefDescriptorExtractor(32); //ParamDef[] parameters = fastGrid.GetParams(); EmguAssert.IsTrue(TestFeature2DTracker(fast, brief), "Unable to find homography matrix"); }
public void Detect() { KeyPoint[] keyPoints; using (var gray = Image("lenna.png", 0)) using (var orb = FastFeatureDetector.Create()) keyPoints = orb.Detect(gray); Console.WriteLine($"KeyPoint has {keyPoints.Length} items."); }
private void ApplyFASTFeatureDetector(int threshold = 10) { try { if (imgList["Input"] == null) { return; } var img = imgList["Input"].Clone(); var gray = img.Convert <Gray, byte>(); FastFeatureDetector detector = new FastFeatureDetector(threshold); var corners = detector.Detect(gray); dt.Rows.Clear(); lab_notes.Text = "Number of corners: " + corners.Length.ToString(); foreach (MKeyPoint pt in corners) { dt.Rows.Add(pt.ClassId, pt.Point.ToString(), pt.Angle, pt.Size, pt.Octave, pt.Response ); } Mat outimg = new Mat(); Features2DToolbox.DrawKeypoints(img, new VectorOfKeyPoint(corners), outimg, new Bgr(0, 0, 255)); imageBoxEx1.Image = outimg.ToBitmap(); dataGridView1.DataSource = dt; } catch (Exception ex) { MessageBox.Show(ex.Message); } }
public void TestBruteForceHammingDistance() { if (CudaInvoke.HasCuda) { Image <Gray, byte> box = new Image <Gray, byte>("box.png"); FastFeatureDetector fast = new FastFeatureDetector(100, true); BriefDescriptorExtractor brief = new BriefDescriptorExtractor(32); #region extract features from the object image Stopwatch stopwatch = Stopwatch.StartNew(); VectorOfKeyPoint modelKeypoints = new VectorOfKeyPoint(); fast.DetectRaw(box, modelKeypoints); Mat modelDescriptors = new Mat(); brief.Compute(box, modelKeypoints, modelDescriptors); stopwatch.Stop(); Trace.WriteLine(String.Format("Time to extract feature from model: {0} milli-sec", stopwatch.ElapsedMilliseconds)); #endregion Image <Gray, Byte> observedImage = new Image <Gray, byte>("box_in_scene.png"); #region extract features from the observed image stopwatch.Reset(); stopwatch.Start(); VectorOfKeyPoint observedKeypoints = new VectorOfKeyPoint(); fast.DetectRaw(observedImage, observedKeypoints); Mat observedDescriptors = new Mat(); brief.Compute(observedImage, observedKeypoints, observedDescriptors); stopwatch.Stop(); Trace.WriteLine(String.Format("Time to extract feature from image: {0} milli-sec", stopwatch.ElapsedMilliseconds)); #endregion Mat homography = null; using (GpuMat <Byte> gpuModelDescriptors = new GpuMat <byte>(modelDescriptors)) //initialization of GPU code might took longer time. { stopwatch.Reset(); stopwatch.Start(); CudaBFMatcher hammingMatcher = new CudaBFMatcher(DistanceType.Hamming); //BFMatcher hammingMatcher = new BFMatcher(BFMatcher.DistanceType.Hamming, modelDescriptors); int k = 2; Matrix <int> trainIdx = new Matrix <int>(observedKeypoints.Size, k); Matrix <float> distance = new Matrix <float>(trainIdx.Size); using (GpuMat <Byte> gpuObservedDescriptors = new GpuMat <byte>(observedDescriptors)) //using (GpuMat<int> gpuTrainIdx = new GpuMat<int>(trainIdx.Rows, trainIdx.Cols, 1, true)) //using (GpuMat<float> gpuDistance = new GpuMat<float>(distance.Rows, distance.Cols, 1, true)) using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch()) { Stopwatch w2 = Stopwatch.StartNew(); //hammingMatcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k); hammingMatcher.KnnMatch(gpuObservedDescriptors, gpuModelDescriptors, matches, k, null, true); w2.Stop(); Trace.WriteLine(String.Format("Time for feature matching (excluding data transfer): {0} milli-sec", w2.ElapsedMilliseconds)); //gpuTrainIdx.Download(trainIdx); //gpuDistance.Download(distance); Mat mask = new Mat(distance.Rows, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, 0.8, mask); int nonZeroCount = CvInvoke.CountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeypoints, observedKeypoints, matches, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeypoints, observedKeypoints, matches, mask, 2); } nonZeroCount = CvInvoke.CountNonZero(mask); } stopwatch.Stop(); Trace.WriteLine(String.Format("Time for feature matching (including data transfer): {0} milli-sec", stopwatch.ElapsedMilliseconds)); } } if (homography != null) { Rectangle rect = box.ROI; PointF[] pts = new PointF[] { new PointF(rect.Left, rect.Bottom), new PointF(rect.Right, rect.Bottom), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Top) }; PointF[] points = CvInvoke.PerspectiveTransform(pts, homography); //homography.ProjectPoints(points); //Merge the object image and the observed image into one big image for display Image <Gray, Byte> res = box.ConcateVertical(observedImage); for (int i = 0; i < points.Length; i++) { points[i].Y += box.Height; } res.DrawPolyline(Array.ConvertAll <PointF, Point>(points, Point.Round), true, new Gray(255.0), 5); //ImageViewer.Show(res); } } }
public void CreateAndDispose() { var surf = FastFeatureDetector.Create(); surf.Dispose(); }