static void Main(string[] args) { var img1 = new Mat(@"..\..\Images\left.png", LoadMode.GrayScale); Cv2.ImShow("Left", img1); Cv2.WaitKey(1); // do events var img2 = new Mat(@"..\..\Images\right.png", LoadMode.GrayScale); Cv2.ImShow("Right", img2); Cv2.WaitKey(1); // do events // detecting keypoints // FastFeatureDetector, StarDetector, SIFT, SURF, ORB, BRISK, MSER, GFTTDetector, DenseFeatureDetector, SimpleBlobDetector // SURF = Speeded Up Robust Features var detector = new SURF(hessianThreshold: 400); //A good default value could be from 300 to 500, depending from the image contrast. var keypoints1 = detector.Detect(img1); var keypoints2 = detector.Detect(img2); // computing descriptors, BRIEF, FREAK // BRIEF = Binary Robust Independent Elementary Features var extractor = new BriefDescriptorExtractor(); var descriptors1 = new Mat(); var descriptors2 = new Mat(); extractor.Compute(img1, ref keypoints1, descriptors1); extractor.Compute(img2, ref keypoints2, descriptors2); // matching descriptors var matcher = new BFMatcher(); var matches = matcher.Match(descriptors1, descriptors2); // drawing the results var imgMatches = new Mat(); Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, matches, imgMatches); Cv2.ImShow("Matches", imgMatches); Cv2.WaitKey(1); // do events Cv2.WaitKey(0); Cv2.DestroyAllWindows(); img1.Dispose(); img2.Dispose(); }
private void MatchBySurf(Mat src1, Mat src2) { var gray1 = new Mat(); var gray2 = new Mat(); Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray); Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray); var surf = new SURF(500, 4, 2, true); // Detect the keypoints and generate their descriptors using SURF KeyPoint[] keypoints1, keypoints2; var descriptors1 = new MatOfFloat(); var descriptors2 = new MatOfFloat(); surf.Run(gray1, null, out keypoints1, descriptors1); surf.Run(gray2, null, out keypoints2, descriptors2); // Match descriptor vectors var bfMatcher = new BFMatcher(NormType.L2, false); var flannMatcher = new FlannBasedMatcher(); DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2); DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2); // Draw matches var bfView = new Mat(); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView); var flannView = new Mat(); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView); using (new Window("SURF matching (by BFMather)", WindowMode.AutoSize, bfView)) using (new Window("SURF matching (by FlannBasedMatcher)", WindowMode.AutoSize, flannView)) { Cv2.WaitKey(); } }
public void Run(Mat gray1, Mat gray2, Mat dst, int hessianThreshold, bool useBFMatcher) { var surf = SURF.Create(hessianThreshold, 4, 2, true); KeyPoint[] keypoints1, keypoints2; var descriptors1 = new Mat(); var descriptors2 = new Mat(); surf.DetectAndCompute(gray1, null, out keypoints1, descriptors1); surf.DetectAndCompute(gray2, null, out keypoints2, descriptors2); if (useBFMatcher) { if (descriptors1.Rows > 0 && descriptors2.Rows > 0) // occasionally there is nothing to match! { var bfMatcher = new BFMatcher(NormTypes.L2, false); DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, dst); } } else { var flannMatcher = new FlannBasedMatcher(); if (descriptors1.Width > 0 && descriptors2.Width > 0) { DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, dst); } } }
public void Run(Mat gray1, Mat gray2, Mat dst2, bool useBFMatcher, int pointsToMatch) { var sift = SIFT.Create(pointsToMatch); KeyPoint[] keypoints1, keypoints2; var descriptors1 = new Mat(); var descriptors2 = new Mat(); sift.DetectAndCompute(gray1, null, out keypoints1, descriptors1); sift.DetectAndCompute(gray2, null, out keypoints2, descriptors2); if (useBFMatcher) { var bfMatcher = new BFMatcher(NormTypes.L2, false); DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, dst2); } else { var flannMatcher = new FlannBasedMatcher(); DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, dst2); } kp1 = keypoints1; kp2 = keypoints2; }
private void MatchBySift(Mat src1, Mat src2) { using var gray1 = new Mat(); using var gray2 = new Mat(); Cv2.CvtColor(src1, gray1, ColorConversionCodes.BGR2GRAY); Cv2.CvtColor(src2, gray2, ColorConversionCodes.BGR2GRAY); using var sift = SIFT.Create(); // Detect the keypoints and generate their descriptors using SIFT using var descriptors1 = new Mat <float>(); using var descriptors2 = new Mat <float>(); sift.DetectAndCompute(gray1, null, out var keypoints1, descriptors1); sift.DetectAndCompute(gray2, null, out var keypoints2, descriptors2); // Match descriptor vectors using var bfMatcher = new BFMatcher(NormTypes.L2, false); using var flannMatcher = new FlannBasedMatcher(); DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2); DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2); // Draw matches using var bfView = new Mat(); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView); using var flannView = new Mat(); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView); using (new Window("SIFT matching (by BFMather)", bfView)) using (new Window("SIFT matching (by FlannBasedMatcher)", flannView)) { Cv2.WaitKey(); } }
public V2() { ProcessFunction = (object obj) => { dynamic prt = obj as dynamic; var descriptors1 = prt.descriptors1; var descriptors2 = prt.descriptors2; var keypoints1 = prt.keypoints1; var keypoints2 = prt.keypoints2; Mat gray1 = prt.gray1; Mat gray2 = prt.gray2; // Match descriptor vectors var bfMatcher = new BFMatcher(NormTypes.L2, false); var flannMatcher = new FlannBasedMatcher(); DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2); DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2); // Draw matches var bfView = new Mat(); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView); var flannView = new Mat(); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView); return(new { bfView, flannView }); }; PostProcessAction = () => { Console.WriteLine("V2 terminou de processar!! Tempo gasto: {0}" + Environment.NewLine, ElapsedTime); }; }
private void FindAndDrawMatches() { using (var template = new Mat("Images\\Circle_Template.bmp", ImreadModes.GrayScale)) { using (var surf = SURF.Create(1000)) { using (var templateDescriptors = new Mat()) { surf.DetectAndCompute(template, null, out KeyPoint[] templateKeyPoints, templateDescriptors); using (var image = new Mat("Images\\Circle.bmp", ImreadModes.GrayScale)) { using (var imageDescriptors = new Mat()) { surf.DetectAndCompute(image, null, out KeyPoint[] imageKeyPoints, imageDescriptors); using (var matcher = new BFMatcher()) { var matches = matcher.Match(imageDescriptors, templateDescriptors); using (var overLay = image.Overlay()) { Cv2.DrawMatches(image, imageKeyPoints, template, templateKeyPoints, matches, overLay); this.Result.Source = overLay.ToBitmapSource(); } } } } } } } }
/// <summary> /// To avoid NaN's when best match has zero distance we will use inversed ratio. /// KNN match will return 2 nearest matches for each query descriptor /// </summary> List <DMatch> GetMatches(BFMatcher matcher, Mat queryDescriptors, Mat trainDescriptors) { List <DMatch> matchesList = new List <DMatch>(); if (enableRatioTest) { float minRatio = 1.0f / 1.5f; DMatch[][] dm = matcher.KnnMatch(queryDescriptors, trainDescriptors, 2); for (int i = 0; i < dm.Length; i++) { DMatch bestMatch = dm[i][0]; DMatch betterMatch = dm[i][1]; float distanceRatio = bestMatch.Distance / betterMatch.Distance; if (distanceRatio < minRatio) { matchesList.Add(bestMatch); } } } else { matchesList.AddRange(matcher.Match(queryDescriptors, trainDescriptors)); } return(matchesList); }
private void FindAndDrawHomo() { using (var template = new Mat("Images\\Circle_Template.bmp", ImreadModes.Color)) { using (var surf = SURF.Create(1000)) { using (var templateDescriptors = new Mat()) { surf.DetectAndCompute(template, null, out KeyPoint[] templateKeyPoints, templateDescriptors); using (var image = new Mat("Images\\Circle.bmp", ImreadModes.Color)) { using (var imageDescriptors = new Mat()) { surf.DetectAndCompute(image, null, out KeyPoint[] imageKeyPoints, imageDescriptors); using (var matcher = new BFMatcher()) { var matches = matcher.Match(imageDescriptors, templateDescriptors); var goodMatches = matches;//.Where(m => m.Distance < 0.2).ToArray(); using (var srcPoints = InputArray.Create(goodMatches.Select(m => templateKeyPoints[m.TrainIdx].Pt))) { using (var dstPoints = InputArray.Create(goodMatches.Select(m => imageKeyPoints[m.QueryIdx].Pt))) { using (var h**o = Cv2.FindHomography(srcPoints, dstPoints, HomographyMethods.Rho)) { ////using (var overlay = image.Overlay()) ////{ //// DrawBox(template, h**o, overlay); //// this.Result.Source = overlay.ToBitmapSource(); ////} using (var tmp = image.Overlay()) { Cv2.BitwiseNot(template, template); Cv2.WarpPerspective(template, tmp, h**o, tmp.Size()); using (var overlay = tmp.Overlay()) { for (var r = 0; r < tmp.Rows; r++) { for (var c = 0; c < tmp.Cols; c++) { overlay.Set(r, c, tmp.At <int>(r, c) == 0 ? new Vec4b(0, 0, 0, 0) : new Vec4b(0, 0, 255, 150)); } } this.Result.Source = overlay.ToBitmapSource(); } } } } } } } } } } } }
private void MatchBySurf(Mat src1, Mat src2) { Mat gray1 = new Mat(); Mat gray2 = new Mat(); Cv2.CvtColor(src1, gray1, ColorConversion.BgrToGray); Cv2.CvtColor(src2, gray2, ColorConversion.BgrToGray); SURF surf = new SURF(500, 4, 2, true); // Detect the keypoints and generate their descriptors using SURF KeyPoint[] keypoints1, keypoints2; MatOfFloat descriptors1 = new MatOfFloat(); MatOfFloat descriptors2 = new MatOfFloat(); surf.Run(gray1, null, out keypoints1, descriptors1); surf.Run(gray2, null, out keypoints2, descriptors2); // Matching descriptor vectors with a brute force matcher BFMatcher matcher = new BFMatcher(NormType.L2, false); DMatch[] matches = matcher.Match(descriptors1, descriptors2); // Draw matches Mat view = new Mat(); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, matches, view); using (new Window("SURF matching", WindowMode.AutoSize, view)) { Cv2.WaitKey(); } }
private void MatchBySurf(Mat src1, Mat src2) { using var gray1 = new Mat(); using var gray2 = new Mat(); Cv2.CvtColor(src1, gray1, ColorConversionCodes.BGR2GRAY); Cv2.CvtColor(src2, gray2, ColorConversionCodes.BGR2GRAY); using var surf = SURF.Create(200, 4, 2, true); // Detect the keypoints and generate their descriptors using SURF KeyPoint[] keypoints1, keypoints2; using var descriptors1 = new Mat <float>(); using var descriptors2 = new Mat <float>(); surf.DetectAndCompute(gray1, null, out keypoints1, descriptors1); surf.DetectAndCompute(gray2, null, out keypoints2, descriptors2); // Match descriptor vectors using var bfMatcher = new BFMatcher(NormTypes.L2, false); using var flannMatcher = new FlannBasedMatcher(); DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2); DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2); // Draw matches using var bfView = new Mat(); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView); using var flannView = new Mat(); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView); using (new Window("SURF matching (by BFMather)", WindowMode.AutoSize, bfView)) using (new Window("SURF matching (by FlannBasedMatcher)", WindowMode.AutoSize, flannView)) { Cv2.WaitKey(); } }
private Mat MatchBySurf(Mat src1, Mat src2) { using var gray1 = new Mat(); using var gray2 = new Mat(); Cv2.CvtColor(src1, gray1, ColorConversionCodes.BGR2GRAY); Cv2.CvtColor(src2, gray2, ColorConversionCodes.BGR2GRAY); //using var surf = SURF.Create(200, 4, 2, true); using var surf = AKAZE.Create(); // Detect the keypoints and generate their descriptors using SURF using var descriptors1 = new Mat <float>(); using var descriptors2 = new Mat <float>(); surf.DetectAndCompute(gray1, null, out var keypoints1, descriptors1); surf.DetectAndCompute(gray2, null, out var keypoints2, descriptors2); // Match descriptor vectors using var bfMatcher = new BFMatcher(NormTypes.L2, false); DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2); // Draw matches var bfView = new Mat(); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView, flags: DrawMatchesFlags.NotDrawSinglePoints); return(bfView); }
static IEnumerable <DMatch> BFMatch(Mat image1, Mat image2) { Mat dst1 = new Mat(); Mat dst2 = new Mat(); var orb = ORB.Create(); orb.DetectAndCompute(image1, null, out var kp1, dst1); orb.DetectAndCompute(image2, null, out var kp2, dst2); BFMatcher matcher = new BFMatcher(); return(matcher.Match(dst1, dst2)); }
public static List <System.Drawing.Point> func(Bitmap bitmap1, Bitmap bitmap2) { //Mat img1 = new Mat(@"roll/0.png", ImreadModes.Unchanged); //Mat img2 = new Mat(@"roll/1.png", ImreadModes.Unchanged); Mat img1 = BitmapToMat(bitmap1); Mat img2 = BitmapToMat(bitmap2); SIFT sift = SIFT.Create(20); //KeyPoint[] k = sift.Detect(img1); // Detect the keypoints and generate their descriptors using SIFT KeyPoint[] keypoints1, keypoints2; var descriptors1 = new Mat <float>(); var descriptors2 = new Mat <float>(); sift.DetectAndCompute(img1, null, out keypoints1, descriptors1); sift.DetectAndCompute(img2, null, out keypoints2, descriptors2); // Match descriptor vectors var bfMatcher = new BFMatcher(NormTypes.L2, false); var flannMatcher = new FlannBasedMatcher(); DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2); DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2); // Draw matches var bfView = new Mat(); Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, bfMatches, bfView); var flannView = new Mat(); Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, flannMatches, flannView); using (new Window("SIFT matching (by BFMather)", bfView)) using (new Window("SIFT matching (by FlannBasedMatcher)", flannView)) { Cv2.WaitKey(); } List <System.Drawing.Point> points = new List <System.Drawing.Point>(); foreach (DMatch match in bfMatches) { System.Drawing.Point p = new System.Drawing.Point(); p.X = (int)(keypoints1[match.QueryIdx].Pt.X - keypoints2[match.TrainIdx].Pt.X); p.Y = (int)(keypoints1[match.QueryIdx].Pt.Y - keypoints2[match.TrainIdx].Pt.Y); points.Add(p); } return(points); }
static void Main(string[] args) { var img1 = new Mat(@"..\..\Images\left.png", ImreadModes.GrayScale); Cv2.ImShow("Left", img1); Cv2.WaitKey(1); // do events var img2 = new Mat(@"..\..\Images\right.png", ImreadModes.GrayScale); Cv2.ImShow("Right", img2); Cv2.WaitKey(1); // do events // detecting keypoints // FastFeatureDetector, StarDetector, SIFT, SURF, ORB, BRISK, MSER, GFTTDetector, DenseFeatureDetector, SimpleBlobDetector // SURF = Speeded Up Robust Features var detector = SURF.Create(hessianThreshold: 400); //A good default value could be from 300 to 500, depending from the image contrast. var keypoints1 = detector.Detect(img1); var keypoints2 = detector.Detect(img2); // computing descriptors, BRIEF, FREAK // BRIEF = Binary Robust Independent Elementary Features var extractor = BriefDescriptorExtractor.Create(); var descriptors1 = new Mat(); var descriptors2 = new Mat(); extractor.Compute(img1, ref keypoints1, descriptors1); extractor.Compute(img2, ref keypoints2, descriptors2); // matching descriptors var matcher = new BFMatcher(); var matches = matcher.Match(descriptors1, descriptors2); // drawing the results var imgMatches = new Mat(); Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, matches, imgMatches); Cv2.ImShow("Matches", imgMatches); Cv2.WaitKey(1); // do events Cv2.WaitKey(0); Cv2.DestroyAllWindows(); img1.Dispose(); img2.Dispose(); }
public override void RunTest() { using var img1 = new Mat(ImagePath.Match1, ImreadModes.Color); using var img2 = new Mat(ImagePath.Match2, ImreadModes.Color); using var orb = ORB.Create(1000); using var descriptors1 = new Mat(); using var descriptors2 = new Mat(); orb.DetectAndCompute(img1, null, out var keyPoints1, descriptors1); orb.DetectAndCompute(img2, null, out var keyPoints2, descriptors2); using var bf = new BFMatcher(NormTypes.Hamming, crossCheck: true); var matches = bf.Match(descriptors1, descriptors2); var goodMatches = matches .OrderBy(x => x.Distance) .Take(10) .ToArray(); var srcPts = goodMatches.Select(m => keyPoints1[m.QueryIdx].Pt).Select(p => new Point2d(p.X, p.Y)); var dstPts = goodMatches.Select(m => keyPoints2[m.TrainIdx].Pt).Select(p => new Point2d(p.X, p.Y)); using var homography = Cv2.FindHomography(srcPts, dstPts, HomographyMethods.Ransac, 5, null); int h = img1.Height, w = img1.Width; var img2Bounds = new[] { new Point2d(0, 0), new Point2d(0, h - 1), new Point2d(w - 1, h - 1), new Point2d(w - 1, 0), }; var img2BoundsTransformed = Cv2.PerspectiveTransform(img2Bounds, homography); using var view = img2.Clone(); var drawingPoints = img2BoundsTransformed.Select(p => (Point)p).ToArray(); Cv2.Polylines(view, new [] { drawingPoints }, true, Scalar.Red, 3); using (new Window("view", view)) { Cv2.WaitKey(); } }
public async Task <FingerprintModel> CompareImages(Image input) { BFMatcher bF = new BFMatcher(DistanceType.Hamming); VectorOfDMatch matches = new VectorOfDMatch(); var descriptorToCompare = FingerprintDescriptor( BitConvert.GetMatFromImage(input) ); var AllFingerPrints = await _fingerPrintData.GetAll(); foreach (FingerprintModel fingerprintDatabase in AllFingerPrints) { var descriptorDatabase = FingerprintDescriptor( BitConvert.GetMatFromImage( fingerprintDatabase.GetFingerPrintImage() ) ); //Here you put the firgerPrint's Mat you want to compare. bF.Match(descriptorToCompare, descriptorDatabase, matches); //Algorithm to Compare fingerprints //Calculate score float score = 0; foreach (MDMatch match in matches.ToArray()) { score += match.Distance; } float score_threshold = 33; if (score / matches.ToArray().Length < score_threshold) { return(fingerprintDatabase); } else { continue; } } return(null); }
private void image_maatching(Mat img1, Mat img2) { Cv2.ImShow("Matches1", img1); Cv2.ImShow("Matches2", img2); var detector = SURF.Create(hessianThreshold: 300, 4, 2, true, false); //A good default value could be from 300 to 500, depending from the image contrast. KeyPoint[] keypoints1 = null; KeyPoint[] keypoints2 = null; Mat descriptors1 = new Mat(); Mat descriptors2 = new Mat(); detector.DetectAndCompute(img1, null, out keypoints1, descriptors1); detector.DetectAndCompute(img2, null, out keypoints2, descriptors2); var matcher = new BFMatcher(); var matches = matcher.Match(descriptors1, descriptors2); float max_dist = 50; int cntSuccessPoint = 0; for (int i = 0; i < matches.Length; i++) { log_write("matches[i].Distance:" + Convert.ToString(max_dist) + "--" + Convert.ToString(matches[i].Distance)); if ((matches[i].Distance * 100) < max_dist) { cntSuccessPoint = cntSuccessPoint + 1; } }// end for double rate = (cntSuccessPoint * 100) / matches.Length; log_write("유사율:" + Convert.ToString(rate) + "---" + Convert.ToString(cntSuccessPoint) + "/" + Convert.ToString(matches.Length)); var imgMatches = new Mat(); Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, matches, imgMatches); Cv2.ImShow("Matches3", imgMatches); }
public void siftcharacterors(Mat src1, Mat src2) { Mat gray1 = new Mat(); Mat gray2 = new Mat(); Cv2.CvtColor(src1, gray1, ColorConversionCodes.BGR2GRAY); Cv2.CvtColor(src2, gray2, ColorConversionCodes.BGR2GRAY); var siftdemo = SIFT.Create(); //寻找特征点 KeyPoint[] keypoints1, keypoints2; var descriptors1 = new MatOfFloat(); var descriptors2 = new MatOfFloat(); siftdemo.DetectAndCompute(gray1, null, out keypoints1, descriptors1); siftdemo.DetectAndCompute(gray2, null, out keypoints2, descriptors2); var bfMatcher = new BFMatcher(NormTypes.L2, false); var flannMatcher = new FlannBasedMatcher(); DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2); DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2); // Draw matches var bfView = new Mat(); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView); var flannView = new Mat(); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, flannMatches, flannView); using (new Window("SIFT matching (by BFMather)", WindowMode.AutoSize, bfView)) //using (new Window("SIFT matching (by FlannBasedMatcher)", WindowMode.AutoSize, flannView)) { Cv2.WaitKey(); } }
/// <summary> /// Orb特征提取 /// </summary> void OnOrb() { Mat image01 = Cv2.ImRead(Application.streamingAssetsPath + "/Textures/p1.jpg"); Mat image02 = Cv2.ImRead(Application.streamingAssetsPath + "/Textures/p2.jpg"); //灰度图转换 Mat image1 = new Mat(), image2 = new Mat(); Cv2.CvtColor(image01, image1, ColorConversionCodes.RGB2GRAY); Cv2.CvtColor(image02, image2, ColorConversionCodes.RGB2GRAY); KeyPoint[] keyPoint1 = null; KeyPoint[] keyPoint2 = null; using (ORB orb = ORB.Create(500)) using (Mat descriptor1 = new Mat()) using (Mat descriptor2 = new Mat()) using (var matcher = new BFMatcher()) { //特征点提取并计算 orb.DetectAndCompute(image1, new Mat(), out keyPoint1, descriptor1); orb.DetectAndCompute(image2, new Mat(), out keyPoint2, descriptor2); Debug.Log($"keyPoints has {keyPoint1.Length},{keyPoint2.Length} items."); Debug.Log($"descriptor has {descriptor1.Rows},{descriptor2.Rows} items."); //特征点匹配 DMatch[] matchePoints = null; matchePoints = matcher.Match(descriptor1, descriptor2); dstMat = new Mat(); Cv2.DrawMatches(image01, keyPoint1, image02, keyPoint2, matchePoints, dstMat); t2d = Utils.MatToTexture2D(dstMat); } Sprite dst_sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero); m_srcImage.sprite = dst_sp; m_srcImage.preserveAspect = true; }
private static void Surf(IplImage img1, IplImage img2) { Mat src = new Mat(img1, true); Mat src2 = new Mat(img2, true); //Detect the keypoints and generate their descriptors using SURF SURF surf = new SURF(500, 4, 2, true); KeyPoint[] keypoints1, keypoints2; MatOfFloat descriptors1 = new MatOfFloat(); MatOfFloat descriptors2 = new MatOfFloat(); surf.Run(src, null, out keypoints1, descriptors1); surf.Run(src2, null, out keypoints2, descriptors2); // Matching descriptor vectors with a brute force matcher BFMatcher matcher = new BFMatcher(NormType.L2, false); DMatch[] matches = matcher.Match(descriptors1, descriptors2);//例外が発生する箇所 Mat view = new Mat(); Cv2.DrawMatches(src, keypoints1, src2, keypoints2, matches, view); Window.ShowImages(view); }
public async Task Update() { //サーバーID等の変数の宣言 string serverfolder = $@"R:\Project\RUMM.warehouse\{Context.Guild.Id}"; string datafolder = $@"{serverfolder}\Data"; string datafolder_recenter = $@"{datafolder}\Recenter"; string datafolder_trimmode = $@"{datafolder}\Trimmode"; string uploadedfolder = $@"{serverfolder}\Uploaded"; string uploadedfolder_map = $@"{uploadedfolder}\UploadedMap"; string trimedfolder = $@"{serverfolder}\Trimed"; string trimedfolder_map = $@"{trimedfolder}\TrimedMap"; string trimedfolder_map_pre = $@"{trimedfolder}\TrimedMap[Pre]"; string trimedfolder_map_backup = $@"{trimedfolder}\TrimedMap[Backup]"; //データ用テキストファイルの指定 string recenter_txt = $@"{datafolder_recenter}\recenter.txt"; string trimmode_txt = $@"{datafolder_trimmode}\trimmode.txt"; //メッセージに画像が添付されているかどうかを判断 if (!Context.Message.Attachments.Any()) { await Context.Channel.SendErrorAsync("エラー", "画像が添付されてないよ!必ずコマンドと併せて画像を送信してね!"); return; } //Discordに送信されたメッセージとそのメッセージに付いているファイルを取得 var attachments = Context.Message.Attachments; //新しいWebClientのインスタンスを作成 WebClient myWebClient = new WebClient(); //保存先とURLの指定 string uploadedmap = $@"{uploadedfolder_map}\uploadedmap.png"; string url = attachments.ElementAt(0).Url; //ファイルをダウンロード myWebClient.DownloadFile(url, uploadedmap); string trimedmap_pre = $@"{trimedfolder_map_pre}\trimedmap[pre].png"; Call.Device(uploadedmap, trimedmap_pre); Graphic.Resize_Own(trimedmap_pre, 384); var comparemap = Directory.EnumerateFiles(trimedfolder_map, "*", SearchOption.AllDirectories); float ImageMatch(Mat mat1, Mat mat2, bool show) { using (var descriptors1 = new Mat()) using (var descriptors2 = new Mat()) { // 特徴点を検出 var akaze = AKAZE.Create(); // キーポイントを検出 akaze.DetectAndCompute(mat1, null, out KeyPoint[] keyPoints1, descriptors1); akaze.DetectAndCompute(mat2, null, out KeyPoint[] keyPoints2, descriptors2); // それぞれの特徴量をマッチング var matcher = new BFMatcher(NormTypes.Hamming, false); var matches = matcher.Match(descriptors1, descriptors2); // 平均距離を返却(小さい方が類似度が高い) var sum = matches.Sum(x => x.Distance); return(sum / matches.Length); } } foreach (string comparemapnum in comparemap) { string mapxcoord = comparemapnum.Split(',')[0].Replace(trimedfolder_map + "\\", ""); string mapzcoord = comparemapnum.Split(',')[1].Replace(".png", ""); string trimedmap = $@"{trimedfolder_map}\{mapxcoord},{mapzcoord}.png"; string trimedmap_await = $@"{trimedfolder_map_pre}\{mapxcoord},{mapzcoord}[await].png"; using (var mat1 = new Mat(trimedmap_pre)) using (var mat2 = new Mat(trimedmap)) { // 2つの画像を比較(平均距離をスコアとした) float score = ImageMatch(mat1, mat2, true); Console.WriteLine(score); if (score < 75) { File.Copy(trimedmap_pre, trimedmap_await, true); } } } List <string> coordslist = new List <string>(); string searchfileword = @"*await*.png"; string[] comparemap2 = Directory.GetFiles(trimedfolder_map_pre, searchfileword); foreach (string mapnum in comparemap2) { coordslist.Add(mapnum); } string[] filelist = Directory.GetFiles(trimedfolder_map_pre, searchfileword); if (coordslist.Count() == 1) { foreach (string premapnum in filelist) { string mapxcoord = premapnum.Split(',')[0].Replace(trimedfolder_map_pre + "\\", ""); string mapzcoord = premapnum.Split(',')[1].Replace("[await].png", ""); string trimedmap = $@"{trimedfolder_map}\{mapxcoord},{mapzcoord}.png"; string trimedfolder_map_backup_foreach = $@"{trimedfolder_map_backup}\{mapxcoord},{mapzcoord}"; string trimedmap_backup = $@"{trimedfolder_map_backup_foreach}\{DateTime.Now.ToString("yyyyMMdd")}.png"; Directory.CreateDirectory(trimedfolder_map_backup_foreach); File.Copy(premapnum, trimedmap, true); File.Copy(premapnum, trimedmap_backup, true); File.Delete(premapnum); } await Context.Channel.SendSuccessAsync("完了", "正常に画像を切り取ったよ!"); } else if (coordslist.Count() > 1 || coordslist.Count() == 0) { foreach (string premapnum in filelist) { File.Delete(premapnum); } await Context.Channel.SendErrorAsync("エラー", "定義されている地図と類似度が高くないよ!"); } }
public Mat Stitch() { Mat src1Gray = new Mat(); Mat src2Gray = new Mat(); Cv2.CvtColor(src1Color, src1Gray, ColorConversionCodes.BGR2GRAY); Cv2.CvtColor(src2Color, src2Gray, ColorConversionCodes.BGR2GRAY); // Setting hyperparameters int numBestMatch = 10; // Detect the keypoints and generate their descriptors using SIFT SIFT sift = SIFT.Create(); KeyPoint[] keypoints1, keypoints2; MatOfFloat descriptors1 = new MatOfFloat(); MatOfFloat descriptors2 = new MatOfFloat(); sift.DetectAndCompute(src1Gray, null, out keypoints1, descriptors1); sift.DetectAndCompute(src2Gray, null, out keypoints2, descriptors2); // Matching descriptor vectors with a brute force matcher BFMatcher matcher = new BFMatcher(); DMatch[] matches = matcher.Match(descriptors1, descriptors2); // Sort the match points Comparison <DMatch> DMatchComparison = delegate(DMatch match1, DMatch match2) { if (match1 < match2) { return(-1); } else { return(1); } }; Array.Sort(matches, DMatchComparison); // Get the best n match points int n = Math.Min(numBestMatch, keypoints1.Length); Point2f[] imagePoints1 = new Point2f[n]; Point2f[] imagePoints2 = new Point2f[n]; DMatch[] bestMatches = new DMatch[n]; for (int i = 0; i < n; i++) { imagePoints1[i] = keypoints1[matches[i].QueryIdx].Pt; imagePoints2[i] = keypoints2[matches[i].TrainIdx].Pt; bestMatches[i] = matches[i]; } // visiualize match result Mat matchImg = new Mat(); Cv2.DrawMatches(src1Color, keypoints1, src2Color, keypoints2, bestMatches, matchImg, Scalar.All(-1), Scalar.All(-1), null, DrawMatchesFlags.NotDrawSinglePoints); using (new OpenCvSharp.Window("SIFT matching", WindowMode.AutoSize, matchImg)) { Cv2.WaitKey(); } // Get homographic matrix that represents a transformation. // The size of such matrix is 3x3, which can represents every possible matrix transformation in 2-D plane. Mat h**o = Cv2.FindHomography(InputArray.Create <Point2f>(imagePoints2), InputArray.Create <Point2f>(imagePoints1)); // calculate the transformed location of the second image's conor // use this value to calculate the size of result image Point2f[] transfromedConors = transfromConors(src2Color.Size(), h**o); // make sure the result image is large enough double maxWidth = src1Color.Width; double maxHeight = src1Color.Height; for (int i = 0; i < 4; i++) { if (transfromedConors[i].X > maxWidth) { maxWidth = transfromedConors[i].X; } if (transfromedConors[i].Y > maxHeight) { maxHeight = transfromedConors[i].Y; } } OpenCvSharp.Size resultSize = new OpenCvSharp.Size(maxWidth, maxHeight); // the position that the first image should be copied to in the final result int src1StartPositonY = 0; int src1StartPositonX = 0; // if still some X coordinate is less than 0, do shift operation along x-axis bool shouldShiftX = false; double shiftDistanceX = double.MinValue; for (int i = 0; i < 4; i++) { if (transfromedConors[i].X < 0) { shouldShiftX = true; shiftDistanceX = Math.Max(shiftDistanceX, -transfromedConors[i].X); } } if (shouldShiftX) { /* * matrix for shifting algong x-axis * 1 0 d * 0 1 0 * 0 0 1 */ Mat shiftMatrix = new Mat(3, 3, h**o.Type()); shiftMatrix.Set <double>(0, 0, 1); shiftMatrix.Set <double>(0, 1, 0); shiftMatrix.Set <double>(0, 2, shiftDistanceX); shiftMatrix.Set <double>(1, 0, 0); shiftMatrix.Set <double>(1, 1, 1); shiftMatrix.Set <double>(1, 2, 0); shiftMatrix.Set <double>(2, 0, 0); shiftMatrix.Set <double>(2, 1, 0); shiftMatrix.Set <double>(2, 2, 1); h**o = shiftMatrix * h**o; resultSize.Width = resultSize.Width + (int)shiftDistanceX; src1StartPositonX = (int)shiftDistanceX; } // if still some Y coordinate is less than 0, do shift operation along y-axis bool shouldShiftY = false; double shiftDistanceY = double.MinValue; for (int i = 0; i < 4; i++) { if (transfromedConors[i].Y < 0) { shouldShiftY = true; shiftDistanceY = Math.Max(shiftDistanceY, -transfromedConors[i].Y); } } if (shouldShiftY) { /* * matrix for shifting algong y-axis * 1 0 0 * 0 1 d * 0 0 1 */ Mat shiftMatrix = new Mat(3, 3, h**o.Type()); shiftMatrix.Set <double>(0, 0, 1); shiftMatrix.Set <double>(0, 1, 0); shiftMatrix.Set <double>(0, 2, 0); shiftMatrix.Set <double>(1, 0, 0); shiftMatrix.Set <double>(1, 1, 1); shiftMatrix.Set <double>(1, 2, shiftDistanceY); shiftMatrix.Set <double>(2, 0, 0); shiftMatrix.Set <double>(2, 1, 0); shiftMatrix.Set <double>(2, 2, 1); h**o = shiftMatrix * h**o; resultSize.Height = resultSize.Height + (int)shiftDistanceY; src1StartPositonY = (int)shiftDistanceY; } Mat result = new Mat(); Cv2.WarpPerspective(src2Color, result, h**o, resultSize); src1Color.CopyTo(new Mat(result, new OpenCvSharp.Rect(src1StartPositonX, src1StartPositonY, src1Gray.Cols, src1Gray.Rows))); return(result); }
public static Result Run(AirPhoto mainMap, AirPhoto image) { SIFT sift = new SIFT(); BFMatcher matcher = new BFMatcher(); Result res = new Result(mainMap, image); Mat map = new Mat(res.MainMap.FileName); Mat singleImage = new Mat(res.SingleImage.FileName); sift.Run(map, null, out res.MainMap.keyPoints, res.MainMap.Descriptors); sift.Run(singleImage, null, out res.SingleImage.keyPoints, res.SingleImage.Descriptors); res.Matches = matcher.Match(res.MainMap.Descriptors, res.SingleImage.Descriptors); for (int i = 0; i < res.MainMap.Descriptors.Rows; i++) { double dist = res.Matches[i].Distance; if (dist < res.Min_dist) { res.Min_dist = dist; } if (dist > res.Max_dist) { res.Max_dist = dist; } } for (int i = 0; i < res.MainMap.Descriptors.Rows; i++) { if (res.Matches[i].Distance <= Math.Max(2 * res.Min_dist, 0.25)) { res.Good_matchesRAW.Add(res.Matches[i]); res.Current_good_matches.Add(res.Matches[i]); } } for (int i = 0; i < res.Good_matchesRAW.Count; i++) { res.MainMap.Keypoints_goodRAW.Add(res.MainMap.Keypoints[res.Good_matchesRAW[i].QueryIdx].Pt); res.SingleImage.Keypoints_goodRAW.Add(res.SingleImage.Keypoints[res.Good_matchesRAW[i].TrainIdx].Pt); res.MainMap.Current_good_keypoints.Add(res.MainMap.Keypoints[res.Good_matchesRAW[i].QueryIdx].Pt); res.SingleImage.Current_good_keypoints.Add(res.SingleImage.Keypoints[res.Good_matchesRAW[i].TrainIdx].Pt); } Mat view = new Mat(); Mat map_draw = new Mat(res.MainMap.FileName); Mat singleImage_draw = new Mat(res.SingleImage.FileName); Cv2.DrawMatches(map, res.MainMap.Keypoints, singleImage, res.SingleImage.Keypoints, res.Current_good_matches, view); for (int i = 0; i < res.MainMap.Current_good_keypoints.Count; i++) { Cv.DrawCircle((IplImage)map_draw, new CvPoint(Convert.ToInt32(res.MainMap.Current_good_keypoints[i].X), Convert.ToInt32(res.MainMap.Current_good_keypoints[i].Y)), map_draw.Width / 500, CvColor.Red, 2); } for (int i = 0; i < res.SingleImage.Current_good_keypoints.Count; i++) { Cv.DrawCircle((IplImage)singleImage_draw, new CvPoint(Convert.ToInt32(res.SingleImage.Current_good_keypoints[i].X), Convert.ToInt32(res.SingleImage.Current_good_keypoints[i].Y)), singleImage_draw.Width / 100, CvColor.Red, 2); } res.MainMap.RAWwithKP = map_draw; res.SingleImage.RAWwithKP = singleImage_draw; res.RAWmatches = view; return(res); }
public Task <string> AuthenticateBio(bool?check) { int matchesCounter = 0; OpenFileDialog openFileDialog = new OpenFileDialog(); openFileDialog.Filter = "Image files (*.png;*.jpeg;*.jpg)|*.png;*.jpeg;*.jpg"; if (openFileDialog.ShowDialog() == true) { //arquivos da pasta images var files = Directory.GetFiles(Path.GetDirectoryName(System.Diagnostics.Process.GetCurrentProcess().MainModule.FileName) + "\\Resources\\Images"); //Source -- arquivo que escolhi e transformo ele em cinza Mat src = new Mat(Path.GetFullPath(openFileDialog.FileName), ImreadModes.Grayscale); //SURF - Speeded Up Robust Features var detector = SURF.Create(hessianThreshold: 400); //variaveis criadas em run-time, garbage collector cuida deles depois. var imgMatches = new Mat(); //aqui é o matcher -- COMPARADOR var matcher = new BFMatcher(); for (int i = 0; i < 4; i++) { switch (i) { case 0: Mat resSrcTermination = new Mat(); Mat resDstTermination = new Mat(); //pega o src, dá resize e joga em resSrc Cv2.Resize(src, resSrcTermination, new Size(450, 450)); // pega a área de interesse var srcTerminacao = new Mat(resSrcTermination, new Rect(75, 75, 150, 150)); foreach (var item in files) { //arquivo destinatario Mat dst = new Mat(item, ImreadModes.Grayscale); //pega o dst, dá resize e joga em resDst Cv2.Resize(dst, resDstTermination, new Size(450, 450)); //pega a área de interesse var resTerminacao = new Mat(resDstTermination, new Rect(75, 75, 150, 150)); // Keypoints - são as bolinhas var keypoints1 = detector.Detect(srcTerminacao); var keypoints2 = detector.Detect(resTerminacao); // -------------------- if (keypoints1.Length == keypoints2.Length) { firstkp = keypoints1.Length; matchesCounter++; if (check ?? false) { //Match das imagens filtradas var matches = matcher.Match(srcTerminacao, resTerminacao); try { //desenha as linhas entre os keypoints Cv2.DrawMatches(srcTerminacao, keypoints1, resTerminacao, keypoints2, matches, imgMatches); //mostra os matches Cv2.ImShow("Terminação", imgMatches); } catch { } } break; } } break; case 1: Mat resSrcBifurcation = new Mat(); Mat resDstBifurcation = new Mat(); //pega o src, dá resize e joga em resSrc Cv2.Resize(src, resSrcBifurcation, new Size(450, 450)); // var srcBifurcacao = new Mat(resSrcBifurcation, new Rect(75, 250, 150, 150)); foreach (var item in files) { //arquivo destinatario Mat dst = new Mat(item, ImreadModes.Grayscale); //pega o dst, dá resize e joga em resDst Cv2.Resize(dst, resDstBifurcation, new Size(450, 450)); //pega a área de interesse var resBifurcacao = new Mat(resDstBifurcation, new Rect(75, 250, 150, 150)); // Keypoints - são as bolinhas var keypoints1 = detector.Detect(srcBifurcacao); var keypoints2 = detector.Detect(resBifurcacao); // -------------------- if (keypoints1.Length == keypoints2.Length) { matchesCounter++; secondkp = keypoints1.Length; if (check ?? false) { //Match das imagens filtradas var matches = matcher.Match(srcBifurcacao, resBifurcacao); try { //desenha as linhas entre os keypoints Cv2.DrawMatches(srcBifurcacao, keypoints1, resBifurcacao, keypoints2, matches, imgMatches); //mostra os matches Cv2.ImShow("Bifurcação", imgMatches); } catch { } } break; } } break; case 2: Mat resSrcIndependency = new Mat(); Mat resDstIndependency = new Mat(); //pega o src, dá resize e joga em resSrc Cv2.Resize(src, resSrcIndependency, new Size(450, 450)); // pega a área de interesse var srcIndependency = new Mat(resSrcIndependency, new Rect(235, 250, 150, 120)); foreach (var item in files) { //arquivo destinatario Mat dst = new Mat(item, ImreadModes.Grayscale); //pega o dst, dá resize e joga em resDst Cv2.Resize(dst, resDstIndependency, new Size(450, 450)); //pega a área de interesse var resIndependency = new Mat(resDstIndependency, new Rect(235, 250, 150, 120)); // Keypoints - são as bolinhas var keypoints1 = detector.Detect(srcIndependency); var keypoints2 = detector.Detect(resIndependency); // -------------------- if (keypoints1.Length == keypoints2.Length) { thirdkp = keypoints1.Length; matchesCounter++; if (check ?? false) { //Match das imagens filtradas var matches = matcher.Match(srcIndependency, resIndependency); try { //desenha as linhas entre os keypoints Cv2.DrawMatches(srcIndependency, keypoints1, resIndependency, keypoints2, matches, imgMatches); //mostra os matches Cv2.ImShow("Independente", imgMatches); } catch { } } break; } } break; case 3: Mat resSrcIsland = new Mat(); Mat resDstIsland = new Mat(); //pega o src, dá resize e joga em resSrc Cv2.Resize(src, resSrcIsland, new Size(450, 450)); // pega a área de interesse var srcIlha = new Mat(resSrcIsland, new Rect(220, 220, 150, 130)); foreach (var item in files) { //arquivo destinatario Mat dst = new Mat(item, ImreadModes.Grayscale); //pega o dst, dá resize e joga em resDst Cv2.Resize(dst, resDstIsland, new Size(450, 450)); //pega a área de interesse var resIlha = new Mat(resDstIsland, new Rect(220, 220, 150, 130)); // Keypoints - são as bolinhas var keypoints1 = detector.Detect(srcIlha); var keypoints2 = detector.Detect(resIlha); // -------------------- if (keypoints1.Length == keypoints2.Length) { fourthkp = keypoints1.Length; matchesCounter++; if (check ?? false) { //Match das imagens filtradas var matches = matcher.Match(srcIlha, resIlha); try { //desenha as linhas entre os keypoints Cv2.DrawMatches(srcIlha, keypoints1, resIlha, keypoints2, matches, imgMatches); //mostra os matches Cv2.ImShow("Ilha", imgMatches); } catch { } } break; } } break; default: return(Task.FromResult("Canceled")); } } if (matchesCounter == 4 && firstkp == 201 && secondkp == 169 && thirdkp == 127 && fourthkp == 143) { return(Task.FromResult("ADMIN")); } else if (matchesCounter == 4 && firstkp == 174 && secondkp == 169 && thirdkp == 133 && fourthkp == 154) { return(Task.FromResult("DIRETOR")); } else if (matchesCounter == 4) { return(Task.FromResult("OK")); } else { return(Task.FromResult("Wrong")); } } else { return(Task.FromResult("Canceled")); } }
/// <summary> /// Feature extraction and matching on the given 2 images. /// The result is the images concatenated together with features, /// matching and homography drawn on it. /// </summary> /// <param name="image1"></param> /// <param name="image2"></param> /// <param name="featureType"></param> /// <returns></returns> private IplImage Matching(IplImage image1, IplImage image2, FeatureType featureType) { Mat src1 = new Mat(image1); Mat src2 = new Mat(image2); KeyPoint[] keypoints1; KeyPoint[] keypoints2; MatOfFloat descriptors1 = new MatOfFloat(); MatOfFloat descriptors2 = new MatOfFloat(); // extract features with different feature-extration methods switch (featureType) { case FeatureType.Sift: sift.Run(src1, null, out keypoints1, descriptors1); sift.Run(src2, null, out keypoints2, descriptors2); break; case FeatureType.Surf: surf.Run(src1, null, out keypoints1, descriptors1); surf.Run(src2, null, out keypoints2, descriptors2); break; default: throw new NotSupportedException("Sorry, missing feature type."); } // matching descriptor vectors with a brute force matcher DMatch[] matches; switch (FeatureMatcher) { case MatcherType.BruteForce: matches = bruteForceMatcher.Match(descriptors1, descriptors2); break; case MatcherType.FlannBased: matches = flannBasedMatcher.Match(descriptors1, descriptors2); break; default: throw new NotSupportedException("Sorry, missing matcher type."); } // get only "good" matches, only good matches will be drawn List <DMatch> goodMatches; // // check to get only good matches or all matches if (IsGoodMatching) { // quick calculation of max and min distances between keypoints IEnumerable <float> distances = matches.Select(i => i.Distance); double maxDistance = 0; double minDistance = 100; double newMinDistance = distances.Min(); double newMaxDistance = distances.Max(); minDistance = (newMinDistance < minDistance) ? newMinDistance : minDistance; maxDistance = (newMaxDistance > maxDistance) ? newMaxDistance : maxDistance; goodMatches = matches.Where(i => i.Distance <= GoodMatchingThreshold * minDistance).ToList(); } else { goodMatches = matches.ToList(); } // draw matches Mat view = new Mat(); Cv2.DrawMatches(src1, keypoints1, src2, keypoints2, goodMatches, view); // homography need at least 4 points or more if (IsHomography && goodMatches.Count > 4) { // get good keypoints (localize the object) List <Point2d> goodKeypoints1 = new List <Point2d>(); List <Point2d> goodKeypoints2 = new List <Point2d>(); Point2f pt; // get the keypoints from the good matches for (int i = 0; i < goodMatches.Count; i++) { pt = keypoints1[goodMatches[i].QueryIdx].Pt; goodKeypoints1.Add(new Point2d(pt.X, pt.Y)); pt = keypoints2[goodMatches[i].TrainIdx].Pt; goodKeypoints2.Add(new Point2d(pt.X, pt.Y)); } // find the homography Mat homography = Cv2.FindHomography(goodKeypoints2, goodKeypoints1, HomographyMethod.Ransac); // get the corners from image1 InputArray corners1 = InputArray.Create(new Point2f[] { new Point2f(0, 0), new Point2f(src1.Cols, 0), new Point2f(src1.Cols, src1.Rows), new Point2f(0, src1.Rows), }.ToList()); OutputArray corners2 = OutputArray.Create(new Point2f[] { new Point2f(0, 0), new Point2f(0, 0), new Point2f(0, 0), new Point2f(0, 0), }.ToList()); InputArray perspectiveMatrix = InputArray.Create(homography); Cv2.PerspectiveTransform(corners1, corners2, perspectiveMatrix); Mat corners2Matrix = corners2.GetMat(); Point2f point1 = corners2Matrix.At <Point2f>(0, 0); Point2f point2 = corners2Matrix.At <Point2f>(1, 0); Point2f point3 = corners2Matrix.At <Point2f>(2, 0); Point2f point4 = corners2Matrix.At <Point2f>(3, 0); Scalar color = new Scalar(0, 200, 253); // draw lines between the corners Cv2.Line(view, point1, point2, color, 4); Cv2.Line(view, point2, point3, color, 4); Cv2.Line(view, point3, point4, color, 4); Cv2.Line(view, point4, point1, color, 4); } IplImage result = view.ToIplImage(); return(result); }
private void bn_Match_Click(object sender, RoutedEventArgs e) { if (listImage.Count > 0) { SubWindow.Win_Matching win = new SubWindow.Win_Matching(listImage); if (win.ShowDialog() == true) { int mode = win.cb_Mode.SelectedIndex; int idxSrc = win.cb_Src.SelectedIndex; int idxTmpl = win.cb_Tmpl.SelectedIndex; string strTitle = listImage[_nSelWin].Title; Mat matSrc = listImage[idxSrc].fn_GetImage(); Mat matTmpl = listImage[idxTmpl].fn_GetImage(); Mat matDst = new Mat(); int width = matSrc.Cols; int height = matSrc.Rows; timeStart = DateTime.Now; if (mode == 0)// Template { Mat matResult = new Mat(); Cv2.MatchTemplate(matSrc, matTmpl, matResult, TemplateMatchModes.SqDiffNormed); OpenCvSharp.Point matchLoc = new OpenCvSharp.Point(); unsafe { float *pData = (float *)matResult.DataPointer; float fMin = 1.0f; for (int stepY = 0; stepY < matResult.Rows; stepY++) { for (int stepX = 0; stepX < matResult.Cols; stepX++) { if (fMin >= pData[stepY * matResult.Cols + stepX]) { fMin = pData[stepY * matResult.Cols + stepX]; matchLoc.X = stepX; matchLoc.Y = stepY; } } } } matDst = matSrc.Clone(); Cv2.CvtColor(matDst, matDst, ColorConversionCodes.GRAY2BGR); Cv2.Rectangle(matDst, new OpenCvSharp.Rect(matchLoc.X, matchLoc.Y, matTmpl.Cols, matTmpl.Rows), new Scalar(0, 255, 0)); } else if (mode == 1)// SIFT { OpenCvSharp.Features2D.SIFT detector = OpenCvSharp.Features2D.SIFT.Create(); KeyPoint[] keypoint1, keypoint2; Mat matDescriptor1 = new Mat(); Mat matDescriptor2 = new Mat(); detector.DetectAndCompute(matTmpl, new Mat(), out keypoint1, matDescriptor1); detector.DetectAndCompute(matSrc, new Mat(), out keypoint2, matDescriptor2); BFMatcher matcher = new BFMatcher(); DMatch[] dMatches = matcher.Match(matDescriptor1, matDescriptor2); if (dMatches.Length > 0) { int GOOD = Math.Min(50, (int)(dMatches.Length * 0.1)); DMatch[] dGood = new DMatch[GOOD]; for (int step = 0; step < GOOD; step++) { dGood[step] = new DMatch(); dGood[step] = dMatches[step]; } Cv2.DrawMatches(matTmpl, keypoint1, matSrc, keypoint2, dGood, matDst, Scalar.All(-1), Scalar.All(-1), new List <byte>(), DrawMatchesFlags.NotDrawSinglePoints); } } else if (mode == 2)// SURF { OpenCvSharp.XFeatures2D.SURF detector = OpenCvSharp.XFeatures2D.SURF.Create(800); KeyPoint[] keypoint1, keypoint2; Mat matDescriptor1 = new Mat(); Mat matDescriptor2 = new Mat(); detector.DetectAndCompute(matTmpl, new Mat(), out keypoint1, matDescriptor1); detector.DetectAndCompute(matSrc, new Mat(), out keypoint2, matDescriptor2); BFMatcher matcher = new BFMatcher(); DMatch[] dMatches = matcher.Match(matDescriptor1, matDescriptor2); if (dMatches.Length > 0) { int GOOD = Math.Min(50, (int)(dMatches.Length * 0.1)); DMatch[] dGood = new DMatch[GOOD]; for (int step = 0; step < GOOD; step++) { dGood[step] = new DMatch(); dGood[step] = dMatches[step]; } Cv2.DrawMatches(matTmpl, keypoint1, matSrc, keypoint2, dGood, matDst, Scalar.All(-1), Scalar.All(-1), new List <byte>(), DrawMatchesFlags.NotDrawSinglePoints); } } fn_WriteLog($"[Matching] {strTitle} ({(DateTime.Now - timeStart).TotalMilliseconds} ms)"); fn_NewImage(matDst, $"Matching {mode}"); } } }