private void FindAndDrawHomo() { using (var template = new Mat("Images\\Circle_Template.bmp", ImreadModes.Color)) { using (var surf = SURF.Create(1000)) { using (var templateDescriptors = new Mat()) { surf.DetectAndCompute(template, null, out KeyPoint[] templateKeyPoints, templateDescriptors); using (var image = new Mat("Images\\Circle.bmp", ImreadModes.Color)) { using (var imageDescriptors = new Mat()) { surf.DetectAndCompute(image, null, out KeyPoint[] imageKeyPoints, imageDescriptors); using (var matcher = new BFMatcher()) { var matches = matcher.Match(imageDescriptors, templateDescriptors); var goodMatches = matches;//.Where(m => m.Distance < 0.2).ToArray(); using (var srcPoints = InputArray.Create(goodMatches.Select(m => templateKeyPoints[m.TrainIdx].Pt))) { using (var dstPoints = InputArray.Create(goodMatches.Select(m => imageKeyPoints[m.QueryIdx].Pt))) { using (var h**o = Cv2.FindHomography(srcPoints, dstPoints, HomographyMethods.Rho)) { ////using (var overlay = image.Overlay()) ////{ //// DrawBox(template, h**o, overlay); //// this.Result.Source = overlay.ToBitmapSource(); ////} using (var tmp = image.Overlay()) { Cv2.BitwiseNot(template, template); Cv2.WarpPerspective(template, tmp, h**o, tmp.Size()); using (var overlay = tmp.Overlay()) { for (var r = 0; r < tmp.Rows; r++) { for (var c = 0; c < tmp.Cols; c++) { overlay.Set(r, c, tmp.At <int>(r, c) == 0 ? new Vec4b(0, 0, 0, 0) : new Vec4b(0, 0, 255, 150)); } } this.Result.Source = overlay.ToBitmapSource(); } } } } } } } } } } } }
private void MatchBySurf(Mat src1, Mat src2) { using var gray1 = new Mat(); using var gray2 = new Mat(); Cv2.CvtColor(src1, gray1, ColorConversionCodes.BGR2GRAY); Cv2.CvtColor(src2, gray2, ColorConversionCodes.BGR2GRAY); using var surf = SURF.Create(200, 4, 2, true); // Detect the keypoints and generate their descriptors using SURF KeyPoint[] keypoints1, keypoints2; using var descriptors1 = new Mat <float>(); using var descriptors2 = new Mat <float>(); surf.DetectAndCompute(gray1, null, out keypoints1, descriptors1); surf.DetectAndCompute(gray2, null, out keypoints2, descriptors2); // Match descriptor vectors using var bfMatcher = new BFMatcher(NormTypes.L2, false); using var flannMatcher = new FlannBasedMatcher(); DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2); DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2); // Draw matches using var bfView = new Mat(); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView); using var flannView = new Mat(); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView); using (new Window("SURF matching (by BFMather)", WindowMode.AutoSize, bfView)) using (new Window("SURF matching (by FlannBasedMatcher)", WindowMode.AutoSize, flannView)) { Cv2.WaitKey(); } }
private void FindAndDrawMatches() { using (var template = new Mat("Images\\Circle_Template.bmp", ImreadModes.GrayScale)) { using (var surf = SURF.Create(1000)) { using (var templateDescriptors = new Mat()) { surf.DetectAndCompute(template, null, out KeyPoint[] templateKeyPoints, templateDescriptors); using (var image = new Mat("Images\\Circle.bmp", ImreadModes.GrayScale)) { using (var imageDescriptors = new Mat()) { surf.DetectAndCompute(image, null, out KeyPoint[] imageKeyPoints, imageDescriptors); using (var matcher = new BFMatcher()) { var matches = matcher.Match(imageDescriptors, templateDescriptors); using (var overLay = image.Overlay()) { Cv2.DrawMatches(image, imageKeyPoints, template, templateKeyPoints, matches, overLay); this.Result.Source = overLay.ToBitmapSource(); } } } } } } } }
public void New2() { var descriptorExtractor = SURF.Create(100); var descriptorMatcher = new BFMatcher(); new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher); }
public void Run(Mat gray1, Mat gray2, Mat dst, int hessianThreshold, bool useBFMatcher) { var surf = SURF.Create(hessianThreshold, 4, 2, true); KeyPoint[] keypoints1, keypoints2; var descriptors1 = new Mat(); var descriptors2 = new Mat(); surf.DetectAndCompute(gray1, null, out keypoints1, descriptors1); surf.DetectAndCompute(gray2, null, out keypoints2, descriptors2); if (useBFMatcher) { if (descriptors1.Rows > 0 && descriptors2.Rows > 0) // occasionally there is nothing to match! { var bfMatcher = new BFMatcher(NormTypes.L2, false); DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, dst); } } else { var flannMatcher = new FlannBasedMatcher(); if (descriptors1.Width > 0 && descriptors2.Width > 0) { DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, dst); } } }
public void New4() { using (var ip = new LinearIndexParams()) using (var sp = new SearchParams()) using (var descriptorExtractor = SURF.Create(100)) using (var descriptorMatcher = new FlannBasedMatcher(ip, sp)) using (new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher)) { } }
public void Detect() { // This parameter should introduce same result of http://opencv.jp/wordpress/wp-content/uploads/lenna_SURF-150x150.png using var gray = Image("lenna.png", 0); using var surf = SURF.Create(500, 4, 2, true); var keyPoints = surf.Detect(gray); Console.WriteLine($"KeyPoint has {keyPoints.Length} items."); }
public void New4() { var descriptorExtractor = SURF.Create(100); LinearIndexParams ip = new LinearIndexParams(); SearchParams sp = new SearchParams(); var descriptorMatcher = new FlannBasedMatcher(ip, sp); new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher); }
public void New2Flann() { using (var descriptorExtractor = SURF.Create(100)) using (var descriptorMatcher = new FlannBasedMatcher()) { using (new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher)) { } using (new BOWImgDescriptorExtractor(descriptorExtractor, descriptorMatcher)) { } } }
public void DetectAndCompute() { using (var gray = Image("lenna.png", ImreadModes.Grayscale)) using (var surf = SURF.Create(500)) using (Mat descriptor = new Mat()) { surf.DetectAndCompute(gray, null, out var keyPoints, descriptor); Console.WriteLine($"keyPoints has {keyPoints.Length} items."); Console.WriteLine($"descriptor has {descriptor.Rows} items."); } }
public void Compute() { using (var color = Image("lenna.png", ImreadModes.Color)) using (var gray = Image("lenna.png", ImreadModes.GrayScale)) using (var descriptors = new Mat()) using (var latch = LATCH.Create()) using (var surf = SURF.Create(500)) { var keypoints = surf.Detect(gray); latch.Compute(color, ref keypoints, descriptors); } }
public void SURFTest() { using (var mat = new Mat(ThreeSquaresWB, ImreadModes.GrayScale)) { using (var surf = SURF.Create(200)) { var keyPoints = surf.Detect(mat); using (var result = new Mat(300, 300, MatType.CV_8UC3, Scalar.White)) { Cv2.DrawKeypoints(mat, keyPoints, result); Window.ShowImages(new[] { mat, result }, new[] { "Original", "Result" }); } } } }
public void DescriptorSize() { using (var alg = SURF.Create(300)) { var ext = alg.Extended; var sz = alg.DescriptorSize; Assert.Equal(ext ? 128 : 64, sz); alg.Extended = !ext; var ext2 = alg.Extended; Assert.NotEqual(ext, ext2); var sz2 = alg.DescriptorSize; Assert.Equal(ext2 ? 128 : 64, sz2); } }
static void Main(string[] args) { var img1 = new Mat(@"..\..\Images\left.png", ImreadModes.GrayScale); Cv2.ImShow("Left", img1); Cv2.WaitKey(1); // do events var img2 = new Mat(@"..\..\Images\right.png", ImreadModes.GrayScale); Cv2.ImShow("Right", img2); Cv2.WaitKey(1); // do events // detecting keypoints // FastFeatureDetector, StarDetector, SIFT, SURF, ORB, BRISK, MSER, GFTTDetector, DenseFeatureDetector, SimpleBlobDetector // SURF = Speeded Up Robust Features var detector = SURF.Create(hessianThreshold: 400); //A good default value could be from 300 to 500, depending from the image contrast. var keypoints1 = detector.Detect(img1); var keypoints2 = detector.Detect(img2); // computing descriptors, BRIEF, FREAK // BRIEF = Binary Robust Independent Elementary Features var extractor = BriefDescriptorExtractor.Create(); var descriptors1 = new Mat(); var descriptors2 = new Mat(); extractor.Compute(img1, ref keypoints1, descriptors1); extractor.Compute(img2, ref keypoints2, descriptors2); // matching descriptors var matcher = new BFMatcher(); var matches = matcher.Match(descriptors1, descriptors2); // drawing the results var imgMatches = new Mat(); Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, matches, imgMatches); Cv2.ImShow("Matches", imgMatches); Cv2.WaitKey(1); // do events Cv2.WaitKey(0); Cv2.DestroyAllWindows(); img1.Dispose(); img2.Dispose(); }
public SURFView() { this.InitializeComponent(); using (var image = new Mat("Images//3SquaresWB.bmp", ImreadModes.GrayScale)) { var sw = Stopwatch.StartNew(); using (var surf = SURF.Create(200)) { var keyPoints = surf.Detect(image); this.Status.Text = $"{sw.ElapsedMilliseconds} ms"; using (var overLay = image.Overlay()) { Cv2.DrawKeypoints(image, keyPoints, overLay); this.Result.Source = overLay.ToBitmapSource(); } } } }
private void image_maatching(Mat img1, Mat img2) { Cv2.ImShow("Matches1", img1); Cv2.ImShow("Matches2", img2); var detector = SURF.Create(hessianThreshold: 300, 4, 2, true, false); //A good default value could be from 300 to 500, depending from the image contrast. KeyPoint[] keypoints1 = null; KeyPoint[] keypoints2 = null; Mat descriptors1 = new Mat(); Mat descriptors2 = new Mat(); detector.DetectAndCompute(img1, null, out keypoints1, descriptors1); detector.DetectAndCompute(img2, null, out keypoints2, descriptors2); var matcher = new BFMatcher(); var matches = matcher.Match(descriptors1, descriptors2); float max_dist = 50; int cntSuccessPoint = 0; for (int i = 0; i < matches.Length; i++) { log_write("matches[i].Distance:" + Convert.ToString(max_dist) + "--" + Convert.ToString(matches[i].Distance)); if ((matches[i].Distance * 100) < max_dist) { cntSuccessPoint = cntSuccessPoint + 1; } }// end for double rate = (cntSuccessPoint * 100) / matches.Length; log_write("유사율:" + Convert.ToString(rate) + "---" + Convert.ToString(cntSuccessPoint) + "/" + Convert.ToString(matches.Length)); var imgMatches = new Mat(); Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, matches, imgMatches); Cv2.ImShow("Matches3", imgMatches); }
public ImageModel ConvertImagePathToImageModel(string imagePath) { Mat imgObject = Cv2.ImRead(imagePath, ImreadModes.Grayscale); Mat descriptorsObject = new Mat(); double minHessian = 400; SURF detector = SURF.Create(minHessian, extended: false); var keypointsObject = detector.Detect(imgObject).Take(1000).ToArray(); detector.Compute(imgObject, ref keypointsObject, descriptorsObject); var imageModel = new ImageModel() { ImagePath = imagePath, ImageHash = ImageHelper.GetImageHash(imagePath), Descriptor = ConvertOpenCVTypes.ConvertMatToFloatArray(descriptorsObject) }; return(imageModel); }
public V1() { ProcessFunction = (object obj) => { dynamic images = obj as dynamic; var src1 = images.src1; var src2 = images.src2; var gray1 = new Mat(); var gray2 = new Mat(); Cv2.CvtColor(src1, gray1, ColorConversionCodes.BGR2GRAY); Cv2.CvtColor(src2, gray2, ColorConversionCodes.BGR2GRAY); var surf = SURF.Create(200, 4, 2, true); // Detect the keypoints and generate their descriptors using SURF KeyPoint[] keypoints1, keypoints2; var descriptors1 = new MatOfFloat(); var descriptors2 = new MatOfFloat(); surf.DetectAndCompute(gray1, null, out keypoints1, descriptors1); surf.DetectAndCompute(gray2, null, out keypoints2, descriptors2); return(new { gray1, gray2, descriptors1, descriptors2, keypoints1, keypoints2 }); }; PostProcessAction = () => { Console.WriteLine("V1 terminou de processar!! Tempo gasto: {0}" + Environment.NewLine, ElapsedTime); }; }
public SURFDescriptor() { //_featureDetector = ORB.Create(nFeatures: 200, edgeThreshold: 40, patchSize: 40, wtaK: 3); _featureDetector = SURF.Create(1200); // better but a lot slower }
public Task <string> AuthenticateBio(bool?check) { int matchesCounter = 0; OpenFileDialog openFileDialog = new OpenFileDialog(); openFileDialog.Filter = "Image files (*.png;*.jpeg;*.jpg)|*.png;*.jpeg;*.jpg"; if (openFileDialog.ShowDialog() == true) { //arquivos da pasta images var files = Directory.GetFiles(Path.GetDirectoryName(System.Diagnostics.Process.GetCurrentProcess().MainModule.FileName) + "\\Resources\\Images"); //Source -- arquivo que escolhi e transformo ele em cinza Mat src = new Mat(Path.GetFullPath(openFileDialog.FileName), ImreadModes.Grayscale); //SURF - Speeded Up Robust Features var detector = SURF.Create(hessianThreshold: 400); //variaveis criadas em run-time, garbage collector cuida deles depois. var imgMatches = new Mat(); //aqui é o matcher -- COMPARADOR var matcher = new BFMatcher(); for (int i = 0; i < 4; i++) { switch (i) { case 0: Mat resSrcTermination = new Mat(); Mat resDstTermination = new Mat(); //pega o src, dá resize e joga em resSrc Cv2.Resize(src, resSrcTermination, new Size(450, 450)); // pega a área de interesse var srcTerminacao = new Mat(resSrcTermination, new Rect(75, 75, 150, 150)); foreach (var item in files) { //arquivo destinatario Mat dst = new Mat(item, ImreadModes.Grayscale); //pega o dst, dá resize e joga em resDst Cv2.Resize(dst, resDstTermination, new Size(450, 450)); //pega a área de interesse var resTerminacao = new Mat(resDstTermination, new Rect(75, 75, 150, 150)); // Keypoints - são as bolinhas var keypoints1 = detector.Detect(srcTerminacao); var keypoints2 = detector.Detect(resTerminacao); // -------------------- if (keypoints1.Length == keypoints2.Length) { firstkp = keypoints1.Length; matchesCounter++; if (check ?? false) { //Match das imagens filtradas var matches = matcher.Match(srcTerminacao, resTerminacao); try { //desenha as linhas entre os keypoints Cv2.DrawMatches(srcTerminacao, keypoints1, resTerminacao, keypoints2, matches, imgMatches); //mostra os matches Cv2.ImShow("Terminação", imgMatches); } catch { } } break; } } break; case 1: Mat resSrcBifurcation = new Mat(); Mat resDstBifurcation = new Mat(); //pega o src, dá resize e joga em resSrc Cv2.Resize(src, resSrcBifurcation, new Size(450, 450)); // var srcBifurcacao = new Mat(resSrcBifurcation, new Rect(75, 250, 150, 150)); foreach (var item in files) { //arquivo destinatario Mat dst = new Mat(item, ImreadModes.Grayscale); //pega o dst, dá resize e joga em resDst Cv2.Resize(dst, resDstBifurcation, new Size(450, 450)); //pega a área de interesse var resBifurcacao = new Mat(resDstBifurcation, new Rect(75, 250, 150, 150)); // Keypoints - são as bolinhas var keypoints1 = detector.Detect(srcBifurcacao); var keypoints2 = detector.Detect(resBifurcacao); // -------------------- if (keypoints1.Length == keypoints2.Length) { matchesCounter++; secondkp = keypoints1.Length; if (check ?? false) { //Match das imagens filtradas var matches = matcher.Match(srcBifurcacao, resBifurcacao); try { //desenha as linhas entre os keypoints Cv2.DrawMatches(srcBifurcacao, keypoints1, resBifurcacao, keypoints2, matches, imgMatches); //mostra os matches Cv2.ImShow("Bifurcação", imgMatches); } catch { } } break; } } break; case 2: Mat resSrcIndependency = new Mat(); Mat resDstIndependency = new Mat(); //pega o src, dá resize e joga em resSrc Cv2.Resize(src, resSrcIndependency, new Size(450, 450)); // pega a área de interesse var srcIndependency = new Mat(resSrcIndependency, new Rect(235, 250, 150, 120)); foreach (var item in files) { //arquivo destinatario Mat dst = new Mat(item, ImreadModes.Grayscale); //pega o dst, dá resize e joga em resDst Cv2.Resize(dst, resDstIndependency, new Size(450, 450)); //pega a área de interesse var resIndependency = new Mat(resDstIndependency, new Rect(235, 250, 150, 120)); // Keypoints - são as bolinhas var keypoints1 = detector.Detect(srcIndependency); var keypoints2 = detector.Detect(resIndependency); // -------------------- if (keypoints1.Length == keypoints2.Length) { thirdkp = keypoints1.Length; matchesCounter++; if (check ?? false) { //Match das imagens filtradas var matches = matcher.Match(srcIndependency, resIndependency); try { //desenha as linhas entre os keypoints Cv2.DrawMatches(srcIndependency, keypoints1, resIndependency, keypoints2, matches, imgMatches); //mostra os matches Cv2.ImShow("Independente", imgMatches); } catch { } } break; } } break; case 3: Mat resSrcIsland = new Mat(); Mat resDstIsland = new Mat(); //pega o src, dá resize e joga em resSrc Cv2.Resize(src, resSrcIsland, new Size(450, 450)); // pega a área de interesse var srcIlha = new Mat(resSrcIsland, new Rect(220, 220, 150, 130)); foreach (var item in files) { //arquivo destinatario Mat dst = new Mat(item, ImreadModes.Grayscale); //pega o dst, dá resize e joga em resDst Cv2.Resize(dst, resDstIsland, new Size(450, 450)); //pega a área de interesse var resIlha = new Mat(resDstIsland, new Rect(220, 220, 150, 130)); // Keypoints - são as bolinhas var keypoints1 = detector.Detect(srcIlha); var keypoints2 = detector.Detect(resIlha); // -------------------- if (keypoints1.Length == keypoints2.Length) { fourthkp = keypoints1.Length; matchesCounter++; if (check ?? false) { //Match das imagens filtradas var matches = matcher.Match(srcIlha, resIlha); try { //desenha as linhas entre os keypoints Cv2.DrawMatches(srcIlha, keypoints1, resIlha, keypoints2, matches, imgMatches); //mostra os matches Cv2.ImShow("Ilha", imgMatches); } catch { } } break; } } break; default: return(Task.FromResult("Canceled")); } } if (matchesCounter == 4 && firstkp == 201 && secondkp == 169 && thirdkp == 127 && fourthkp == 143) { return(Task.FromResult("ADMIN")); } else if (matchesCounter == 4 && firstkp == 174 && secondkp == 169 && thirdkp == 133 && fourthkp == 154) { return(Task.FromResult("DIRETOR")); } else if (matchesCounter == 4) { return(Task.FromResult("OK")); } else { return(Task.FromResult("Wrong")); } } else { return(Task.FromResult("Canceled")); } }
/// <summary> /// Surf match /// </summary> /// <param name="wantBitmap">Want match bitmap</param> /// <param name="bitmap">target bitmap</param> /// <param name="surfMatch">surf match option</param> /// <param name="cancellationToken">cancellationToken</param> /// <returns>Target bitmap location</returns> private async Task <Rectangle?> SurfMatchLocation(Bitmap wantBitmap, Bitmap bitmap, SurfMatch surfMatch, CancellationToken cancellationToken) { return(await Task.Run(() => { try { using (var matSrc = bitmap.ToMat()) using (var matTo = wantBitmap.ToMat()) using (var matSrcRet = new Mat()) using (var matToRet = new Mat()) { cancellationToken.ThrowIfCancellationRequested(); KeyPoint[] keyPointsSrc, keyPointsTo; using (var surf = SURF.Create(surfMatch.HessianThreshold, 4, 3, true, true)) { surf.DetectAndCompute(matSrc, null, out keyPointsSrc, matSrcRet); surf.DetectAndCompute(matTo, null, out keyPointsTo, matToRet); } cancellationToken.ThrowIfCancellationRequested(); using (var flnMatcher = new FlannBasedMatcher()) { var matches = flnMatcher.Match(matSrcRet, matToRet); cancellationToken.ThrowIfCancellationRequested(); //求最小最大距离 var minDistance = 1000d; //反向逼近 var maxDistance = 0d; for (int i = 0; i < matSrcRet.Rows; i++) { var distance = matches[i].Distance; if (distance > maxDistance) { maxDistance = distance; } if (distance < minDistance) { minDistance = distance; } } var pointsSrc = new List <Point2f>(); var pointsDst = new List <Point2f>(); for (int i = 0; i < matSrcRet.Rows; i++) { double distance = matches[i].Distance; if (distance < Math.Max(minDistance * 2, 0.02)) { pointsSrc.Add(keyPointsSrc[matches[i].QueryIdx].Pt); pointsDst.Add(keyPointsTo[matches[i].TrainIdx].Pt); } } if (pointsSrc.Count > 0 && pointsDst.Count > 0) { var location = pointsSrc[0] - pointsDst[0]; var rectangle = new Rectangle?(new Rectangle((int)location.X, (int)location.Y, wantBitmap.Width, wantBitmap.Height)); WindowsApi.WriteLog( $"{nameof(SurfMatchLocation)} match success, {nameof(maxDistance)}:{maxDistance};{nameof(minDistance)}:{minDistance} match count:{pointsSrc.Count}, {rectangle}"); return rectangle; } else { WindowsApi.WriteLog( $"{nameof(SurfMatchLocation)} match failed, {nameof(maxDistance)}:{maxDistance};{nameof(minDistance)}:{minDistance}"); } } } } catch (Exception ex) { WindowsApi.WriteLog($"{nameof(SurfMatchLocation)} ErrorMessage:{ex.Message}"); } return null; }, cancellationToken)); }
public void MatchBySurf(string filepath) { Mat src1 = new Mat(); originalImg.CopyTo(src1); Mat src2 = new Mat(filepath); //크기 조정 //Cv2.Resize(src2, src2, new Size(src2.Size().Width / 2, src2.Size().Height / 2)); //Cv2.Resize(src1, src1, new Size(src2.Size().Width / 2, src2.Size().Height / 2)); var gray1 = new Mat(); var gray2 = new Mat(); Cv2.CvtColor(src1, gray1, ColorConversionCodes.BGR2GRAY); Cv2.CvtColor(src2, gray2, ColorConversionCodes.BGR2GRAY); var surf = SURF.Create(1000); // Detect the keypoints and generate their descriptors using SURF KeyPoint[] keypoints1, keypoints2; var descriptors1 = new Mat(); var descriptors2 = new Mat(); surf.DetectAndCompute(gray1, null, out keypoints1, descriptors1); surf.DetectAndCompute(gray2, null, out keypoints2, descriptors2); // Match descriptor vectors OpenCvSharp.Flann.SearchParams sParam = new OpenCvSharp.Flann.SearchParams(50); var flannMatcher = new FlannBasedMatcher(null, sParam); DMatch[][] flannMatches = flannMatcher.KnnMatch(descriptors1, descriptors2, 2); double max_dist = 0; double min_dist = 100; for (int i = 0; i < descriptors1.Rows; i++) { double dist = flannMatches[i][1].Distance; if (dist < min_dist) { min_dist = dist; } if (dist > max_dist) { max_dist = dist; } } //find goodMatches float ratio_thresh = 3 * (float)min_dist; List <DMatch> goodMatches = new List <DMatch>(); for (int i = 0; i < flannMatches.Length; i++) { if ((flannMatches[i][0].Distance < ratio_thresh)) //* flannMatches[i][1].Distance))// && (flannMatches[i][0].Distance > 0.4 * flannMatches[i][1].Distance)) { goodMatches.Add(flannMatches[i][0]); } } // Draw matches var flannView = new Mat(); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, goodMatches, flannView, new Scalar(0, 255, 0), new Scalar(255, 255, 0), null, DrawMatchesFlags.Default); List <Point2f> obj = new List <Point2f>(); List <Point2f> scene = new List <Point2f>(); if (goodMatches.Count == 0) { return; } for (int i = 0; i < goodMatches.Count; i++) { obj.Add(keypoints1[goodMatches[i].QueryIdx].Pt); scene.Add(keypoints2[goodMatches[i].TrainIdx].Pt); } List <Point2d> obj_corners = new List <Point2d>(); obj_corners.Add(new Point(0, 0)); obj_corners.Add(new Point(gray1.Cols, 0)); obj_corners.Add(new Point(gray1.Cols, gray1.Rows)); obj_corners.Add(new Point(0, gray1.Rows)); List <Point2d> scene_corners = new List <Point2d>(); Mat H = Cv2.FindHomography(obj.ConvertAll(Point2fToPoint2d), scene.ConvertAll(Point2fToPoint2d), HomographyMethods.Ransac); if (H.Empty()) { return; } scene_corners = Cv2.PerspectiveTransform(obj_corners, H).ToList(); Mat h2 = Cv2.GetPerspectiveTransform(scene_corners.ConvertAll(Point2dToPoint2f), obj_corners.ConvertAll(Point2dToPoint2f)); Mat crop = new Mat(); Cv2.WarpPerspective(src2, crop, h2, src1.Size()); //Cv2.ImShow("original", src1); new Window("Cropped", WindowMode.AutoSize, crop); crop.CopyTo(processImg); }
public void CreateAndDispose() { var surf = SURF.Create(400); surf.Dispose(); }