public override void RunTest() { var gray = new Mat(ImagePath.Lenna, ImreadModes.Grayscale); var kaze = KAZE.Create(); var akaze = AKAZE.Create(); var kazeDescriptors = new Mat(); var akazeDescriptors = new Mat(); KeyPoint[] kazeKeyPoints = null, akazeKeyPoints = null; var kazeTime = MeasureTime(() => kaze.DetectAndCompute(gray, null, out kazeKeyPoints, kazeDescriptors)); var akazeTime = MeasureTime(() => akaze.DetectAndCompute(gray, null, out akazeKeyPoints, akazeDescriptors)); var dstKaze = new Mat(); var dstAkaze = new Mat(); Cv2.DrawKeypoints(gray, kazeKeyPoints, dstKaze); Cv2.DrawKeypoints(gray, akazeKeyPoints, dstAkaze); using (new Window(String.Format("KAZE [{0:F2}ms]", kazeTime.TotalMilliseconds), dstKaze)) using (new Window(String.Format("AKAZE [{0:F2}ms]", akazeTime.TotalMilliseconds), dstAkaze)) { Cv2.WaitKey(); } }
public void TestBOWKmeansTrainer() { Image <Gray, byte> box = EmguAssert.LoadImage <Gray, byte>("box.png"); AKAZE detector = new AKAZE(); VectorOfKeyPoint kpts = new VectorOfKeyPoint(); Mat descriptors = new Mat(); detector.DetectAndCompute(box, null, kpts, descriptors, false); BOWKMeansTrainer trainer = new BOWKMeansTrainer(100, new MCvTermCriteria(), 3, CvEnum.KMeansInitType.PPCenters); trainer.Add(descriptors); Mat vocabulary = new Mat(); trainer.Cluster(vocabulary); BFMatcher matcher = new BFMatcher(DistanceType.L2); BOWImgDescriptorExtractor extractor = new BOWImgDescriptorExtractor(detector, matcher); extractor.SetVocabulary(vocabulary); Mat descriptors2 = new Mat(); extractor.Compute(box, kpts, descriptors2); }
private Mat MatchBySurf(Mat src1, Mat src2) { using var gray1 = new Mat(); using var gray2 = new Mat(); Cv2.CvtColor(src1, gray1, ColorConversionCodes.BGR2GRAY); Cv2.CvtColor(src2, gray2, ColorConversionCodes.BGR2GRAY); //using var surf = SURF.Create(200, 4, 2, true); using var surf = AKAZE.Create(); // Detect the keypoints and generate their descriptors using SURF using var descriptors1 = new Mat <float>(); using var descriptors2 = new Mat <float>(); surf.DetectAndCompute(gray1, null, out var keypoints1, descriptors1); surf.DetectAndCompute(gray2, null, out var keypoints2, descriptors2); // Match descriptor vectors using var bfMatcher = new BFMatcher(NormTypes.L2, false); DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2); // Draw matches var bfView = new Mat(); Cv2.DrawMatches(gray1, keypoints1, gray2, keypoints2, bfMatches, bfView, flags: DrawMatchesFlags.NotDrawSinglePoints); return(bfView); }
public void TestAkaze() { AKAZE detector = new AKAZE(); //ParamDef[] parameters = detector.GetParams(); EmguAssert.IsTrue(TestFeature2DTracker(detector, detector), "Unable to find homography matrix"); }
/// <summary> /// SrcとTargetのマッチングを行う。 /// </summary> public void RunMutching() { // Akazeで特徴抽出 var akaze = AKAZE.Create(); var descriptorSrc = new Mat(); var descriptorTarget = new Mat(); akaze.DetectAndCompute(SrcMat, null, out KeyPtsSrc, descriptorSrc); akaze.DetectAndCompute(TargetMat, null, out KeyPtsTarget, descriptorTarget); // 総当たりマッチング実行 var matcher = DescriptorMatcher.Create("BruteForce"); var matches = matcher.Match(descriptorSrc, descriptorTarget); // 結果を昇順にソートし、上位からある割合(UseRate)の結果のみを使用する。 SelectedMatched = matches .OrderBy(p => p.Distance) .Take((int)(matches.Length * UseRate)); // SrcとTargetの対応する特徴点を描画する Cv2.DrawMatches( SrcMat, KeyPtsSrc, TargetMat, KeyPtsTarget, SelectedMatched, MatchingResultMat); }
public void TestMSER() { MSERDetector keyPointDetector = new MSERDetector(); AKAZE descriptorGenerator = new AKAZE(); //ParamDef[] parameters = keyPointDetector.GetParams(); TestFeature2DTracker(keyPointDetector, descriptorGenerator); }
public void GetKeypoints(Mat gray) { var akaze = AKAZE.Create(); var akazeDescriptors = new Mat(); akaze.DetectAndCompute(gray, null, out akazeKeyPoints, akazeDescriptors); }
public void TestGFTTDetector() { GFTTDetector keyPointDetector = new GFTTDetector(1000, 0.01, 1, 3, false, 0.04); AKAZE descriptorGenerator = new AKAZE(); //ParamDef[] parameters = keyPointDetector.GetParams(); TestFeature2DTracker(keyPointDetector, descriptorGenerator); }
private Mat Process(ref Mat buffer) { Mat img = new Mat(); AKAZE akaze = AKAZE.Create(); akaze.Threshold = 0.0001; KeyPoint[] keyPoints; DMatch[] matches; List <DMatch> goodMatches = new List <DMatch>(); Mat descriptor = new Mat(); DescriptorMatcher matcher = DescriptorMatcher.Create("BruteForce"); Cv2.CvtColor(buffer, buffer, ColorConversionCodes.BGR2GRAY); akaze.DetectAndCompute(buffer, null, out keyPoints, descriptor); Cv2.DrawKeypoints(buffer, keyPoints, img, Scalar.Black); Cv2.ImShow("keyps", img); if (islastSeted) { matches = matcher.Match(descriptor, lastDescriptor); for (int i = 0; i < matches.Length; i++) { if (matches[i].Distance < distanceStandard) { goodMatches.Add(matches[i]); } } //Cv2.DrawMatches(buffer, keyPoints, lastBuffer, lastkeyPoints, goodMatches, img); img = buffer; if (goodMatches.Count > 3) { float[] average = new float[2]; average[0] = 0; average[1] = 0; for (int i = 0; i < goodMatches.Count; i++) { average[0] += keyPoints[goodMatches[0].QueryIdx].Pt.X - lastkeyPoints[goodMatches[0].TrainIdx].Pt.X; average[1] += keyPoints[goodMatches[0].QueryIdx].Pt.Y - lastkeyPoints[goodMatches[0].TrainIdx].Pt.Y; } lastPoint = new Point(lastPoint.X + average[0] / goodMatches.Count, lastPoint.Y + average[1] / goodMatches.Count); lastBuffer = buffer; lastDescriptor = descriptor; lastkeyPoints = keyPoints; } Cv2.Circle(img, lastPoint, 15, Scalar.Red, 3); } else { islastSeted = true; img = buffer; lastPoint = new Point(buffer.Cols / 2, buffer.Rows / 2); lastBuffer = buffer; lastDescriptor = descriptor; lastkeyPoints = keyPoints; } return(img); }
public void TestAkazeBlankImage() { AKAZE detector = new AKAZE(); Image <Gray, Byte> img = new Image <Gray, byte>(1024, 900); VectorOfKeyPoint vp = new VectorOfKeyPoint(); Mat descriptors = new Mat(); detector.DetectAndCompute(img, null, vp, descriptors, false); }
public void TestLATCH() { //SURF surf = new SURF(300); AKAZE akaze = new AKAZE(); LATCH latch = new LATCH(); TestFeature2DTracker(akaze, latch); //EmguAssert.IsTrue(TestFeature2DTracker(akaze, latch), "Unable to find homography matrix"); }
public void TestDAISY() { //SURF surf = new SURF(300); AKAZE akaze = new AKAZE(); DAISY daisy = new DAISY(); TestFeature2DTracker(akaze, daisy); //EmguAssert.IsTrue(TestFeature2DTracker(akaze, daisy), "Unable to find homography matrix"); }
public FeatureMatching(Mat src, Mat target) { // 画像初期化 SrcMat = src.Clone(); TargetMat = target.Clone(); ResultMat = new Mat(); // 重心初期化 PtSrc = new System.Drawing.PointF(0.0f, 0.0f); PtTarget = new System.Drawing.PointF(0.0f, 0.0f); // 特徴点抽出 var akaze = AKAZE.Create(); var descriptorSrc = new Mat(); var descriptorTarget = new Mat(); akaze.DetectAndCompute(SrcMat, null, out KeyPtsSrc, descriptorSrc); akaze.DetectAndCompute(TargetMat, null, out KeyPtsTarget, descriptorTarget); // マッチング実行 var matcher = DescriptorMatcher.Create("BruteForce"); var matches = matcher.Match(descriptorSrc, descriptorTarget); // 結果を昇順にソートし、上位半分の結果を使用する。 var selectedMatches = matches .OrderBy(p => p.Distance) //.Take(matches.Length / 2); .Take(1); // Src - Target 対応画像作成 Cv2.DrawMatches(SrcMat, KeyPtsSrc, TargetMat, KeyPtsTarget, selectedMatches, ResultMat); // 特徴点の重心を求める (Src) foreach (var item in selectedMatches) { int idx = item.QueryIdx; PtSrc.X += KeyPtsSrc[idx].Pt.X; PtSrc.Y += KeyPtsSrc[idx].Pt.Y; } PtSrc.X /= (float)selectedMatches.Count(); PtSrc.Y /= (float)selectedMatches.Count(); // 特徴点の重心を求める (Target) foreach (var item in selectedMatches) { int idx = item.TrainIdx; PtTarget.X += KeyPtsTarget[idx].Pt.X; PtTarget.Y += KeyPtsTarget[idx].Pt.Y; } PtTarget.X /= (float)selectedMatches.Count(); PtTarget.Y /= (float)selectedMatches.Count(); }
public void ComputeImageFeaturesTest() { using var featuresFinder = AKAZE.Create(); using var image = Image("abbey_road.jpg", ImreadModes.Grayscale); using var features = CvDetail.ComputeImageFeatures(featuresFinder, image); Assert.NotNull(features); Assert.NotEqual(0, features.ImgIdx); Assert.Equal(image.Size(), features.ImgSize); Assert.NotEmpty(features.Keypoints); Assert.NotNull(features.Descriptors); Assert.False(features.Descriptors.Empty()); }
static (KeyPoint[], Mat) FeatureCommand(Mat source) { // 特徴量検出アルゴリズム var feature = AKAZE.Create(); // 特徴量計算 KeyPoint[] keyPoints; // 特徴点 Mat descriptor = new Mat(); // 特徴量 feature.DetectAndCompute(source, null, out keyPoints, descriptor); //var _featureImage = new Mat(); //Cv2.DrawKeypoints(_temp_gammaImage, _keypoint, _featureImage); return(keyPoints, descriptor); }
public void AffineBestOf2NearestMatcherTest() { using var featuresFinder = AKAZE.Create(); using var image1 = Image("tsukuba_left.png", ImreadModes.Grayscale); using var image2 = Image("tsukuba_right.png", ImreadModes.Grayscale); using var features1 = CvDetail.ComputeImageFeatures(featuresFinder, image1); using var features2 = CvDetail.ComputeImageFeatures(featuresFinder, image2); using var matcher = new AffineBestOf2NearestMatcher(); using var matchesInfo = matcher.Apply(features1, features2); Assert.NotEmpty(matchesInfo.Matches); Assert.NotEmpty(matchesInfo.InliersMask); Assert.False(matchesInfo.H.Empty()); Assert.True(matchesInfo.Confidence > 0); }
private static void BowTest() { DescriptorMatcher matcher = new BFMatcher(); Feature2D extractor = AKAZE.Create(); Feature2D detector = AKAZE.Create(); TermCriteria criteria = new TermCriteria(CriteriaType.Count | CriteriaType.Eps, 10, 0.001); BOWKMeansTrainer bowTrainer = new BOWKMeansTrainer(200, criteria, 1); BOWImgDescriptorExtractor bowDescriptorExtractor = new BOWImgDescriptorExtractor(extractor, matcher); Mat img = null; KeyPoint[] keypoint = detector.Detect(img); Mat features = new Mat(); extractor.Compute(img, ref keypoint, features); bowTrainer.Add(features); throw new NotImplementedException(); }
/// <summary> /// Stitch images together /// </summary> /// <param name="images">The list of images to stitch</param> /// <returns>A final stitched image</returns> public static Mat StichImages(List <Mat> images) { //Declare the Mat object that will store the final output Mat output = new Mat(); //Declare a vector to store all images from the list VectorOfMat matVector = new VectorOfMat(); //Push all images in the list into a vector foreach (Mat img in images) { matVector.Push(img); } //Declare a new stitcher Stitcher stitcher = new Stitcher(Stitcher.Mode.Scans); //Declare the type of detector that will be used to detect keypoints //Brisk detector = new Brisk(); //Here are some other detectors that you can try //ORBDetector detector = new ORBDetector(); //KAZE detector = new KAZE(); AKAZE detector = new AKAZE(); //Set the stitcher class to use the specified detector declared above stitcher.SetFeaturesFinder(detector); //Stitch the images together//no more than 35 images. should be around 2 to 3. stitcher.Stitch(matVector, output); //Return the final stiched image return(output); }
private void btnAKAZE_Click(object sender, EventArgs e) { var temproot = RootImg.Clone(); var tempimg1 = WorkingImg.Clone(); Image <Bgr, byte> colorimg = tempimg1.Convert <Bgr, byte>(); Image <Bgr, byte> tempOriImg = temproot.Convert <Bgr, byte>(); var f2d = new AKAZE( descriptorChannels: 1); var keypoint = f2d.Detect(WorkingImg); foreach (var point in keypoint) { System.Drawing.Rectangle rect = new Rectangle(); rect.X = (int)point.Point.X; rect.Y = (int)point.Point.Y; rect.Width = (int)point.Size; rect.Height = (int)point.Size; tempOriImg.Draw(rect, new Bgr(60, 200, 10), 2); } rtxLog.AppendText("btnAKAZE_Click" + Environment.NewLine); RegistHisroty(tempOriImg); }
// http://docs.opencv.org/3.0-beta/modules/features2d/doc/features2d.html // http://docs.opencv.org/3.0-beta/modules/features2d/doc/feature_detection_and_description.html // http://docs.opencv.org/3.0-beta/doc/tutorials/features2d/akaze_matching/akaze_matching.html /// <summary> /// Compare images with a feature detection algorithm /// </summary> /// <param name="mat_image1"> 1st image (OpenCv Mat)</param> /// <param name="mat_image2"> 2nd image (OpenCv Mat)</param> /// <param name="feature_count">number of feature keypoints found</param> /// <param name="match_count">number of matches founds</param> /// <param name="view">image of the feature and good matches</param> /// <returns>Similarity % (#good matches/ # matches)</returns> private static double CompareFeatures(Mat mat_image1, Mat mat_image2, out double feature_count, out double match_count, out Bitmap view) { match_count = 0; feature_count = 0; int nmatch = 0; int ngmatch = 0; view = new Bitmap(1, 1); // stop here if one of the image does not seem to be valid if (mat_image1 == null) { return(0); } if (mat_image1.Empty()) { return(0); } if (mat_image2 == null) { return(0); } if (mat_image2.Empty()) { return(0); } try { // Detect the keypoints and generate their descriptors var detector = AKAZE.Create(); //var detector = BRISK.Create(); //var detector = ORB.Create(); // require grayscale /* * // grayscale * Cv2.CvtColor(mat_image1, mat_image1, ColorConversionCodes.BGR2GRAY); * Cv2.CvtColor(mat_image2, mat_image2, ColorConversionCodes.BGR2GRAY); * mat_image1.EqualizeHist(); * mat_image2.EqualizeHist(); */ var descriptors1 = new MatOfFloat(); var descriptors2 = new MatOfFloat(); var keypoints1 = new KeyPoint[1]; var keypoints2 = new KeyPoint[1]; try { keypoints1 = detector.Detect(mat_image1); keypoints2 = detector.Detect(mat_image2); if (keypoints1 != null) { detector.Compute(mat_image1, ref keypoints1, descriptors1); if (descriptors1 == null) { return(0); } } if (keypoints2 != null) { detector.Compute(mat_image2, ref keypoints2, descriptors2); if (descriptors2 == null) { return(0); } } } catch (System.AccessViolationException) { } catch (Exception) { } // Find good matches (Nearest neighbor matching ratio) float nn_match_ratio = 0.95f; var matcher = new BFMatcher(NormTypes.Hamming); var nn_matches = new DMatch[1][]; try { nn_matches = matcher.KnnMatch(descriptors1, descriptors2, 2); } catch (System.AccessViolationException) { } catch (Exception) { } var good_matches = new List <DMatch>(); var matched1 = new List <KeyPoint>(); var matched2 = new List <KeyPoint>(); var inliers1 = new List <KeyPoint>(); var inliers2 = new List <KeyPoint>(); if (nn_matches != null && nn_matches.Length > 0) { for (int i = 0; i < nn_matches.GetLength(0); i++) { if (nn_matches[i].Length >= 2) { DMatch first = nn_matches[i][0]; float dist1 = nn_matches[i][0].Distance; float dist2 = nn_matches[i][1].Distance; if (dist1 < nn_match_ratio * dist2) { good_matches.Add(first); matched1.Add(keypoints1[first.QueryIdx]); matched2.Add(keypoints2[first.TrainIdx]); } } } } // Count matches & features feature_count = keypoints1.Length + keypoints2.Length; nmatch = nn_matches.Length; match_count = nmatch; ngmatch = good_matches.Count; // Draw matches view var mview = new Mat(); // show images + good matchs if (keypoints1.Length > 0 && keypoints2.Length > 0) { Cv2.DrawMatches(mat_image1, keypoints1, mat_image2, keypoints2, good_matches.ToArray(), mview); view = BitmapConverter.ToBitmap(mview); } else { // no matchs view = new Bitmap(1, 1); } } catch (System.AccessViolationException e) { Console.Error.WriteLine("Access Error => CompareFeatures : \n{0}", e.Message); } catch (Exception) { // Console.Error.WriteLine("Error => CompareFeatures : \n{0}", e.Message); } // similarity = 0 when there was no feature or no match if (feature_count <= 0) { return(0); } if (nmatch <= 0) { return(0); } // similarity = ratio of good matches/ # matches var similarity = 100.0 * ngmatch / nmatch; return(similarity); }
public async Task Update() { //サーバーID等の変数の宣言 string serverfolder = $@"R:\Project\RUMM.warehouse\{Context.Guild.Id}"; string datafolder = $@"{serverfolder}\Data"; string datafolder_recenter = $@"{datafolder}\Recenter"; string datafolder_trimmode = $@"{datafolder}\Trimmode"; string uploadedfolder = $@"{serverfolder}\Uploaded"; string uploadedfolder_map = $@"{uploadedfolder}\UploadedMap"; string trimedfolder = $@"{serverfolder}\Trimed"; string trimedfolder_map = $@"{trimedfolder}\TrimedMap"; string trimedfolder_map_pre = $@"{trimedfolder}\TrimedMap[Pre]"; string trimedfolder_map_backup = $@"{trimedfolder}\TrimedMap[Backup]"; //データ用テキストファイルの指定 string recenter_txt = $@"{datafolder_recenter}\recenter.txt"; string trimmode_txt = $@"{datafolder_trimmode}\trimmode.txt"; //メッセージに画像が添付されているかどうかを判断 if (!Context.Message.Attachments.Any()) { await Context.Channel.SendErrorAsync("エラー", "画像が添付されてないよ!必ずコマンドと併せて画像を送信してね!"); return; } //Discordに送信されたメッセージとそのメッセージに付いているファイルを取得 var attachments = Context.Message.Attachments; //新しいWebClientのインスタンスを作成 WebClient myWebClient = new WebClient(); //保存先とURLの指定 string uploadedmap = $@"{uploadedfolder_map}\uploadedmap.png"; string url = attachments.ElementAt(0).Url; //ファイルをダウンロード myWebClient.DownloadFile(url, uploadedmap); string trimedmap_pre = $@"{trimedfolder_map_pre}\trimedmap[pre].png"; Call.Device(uploadedmap, trimedmap_pre); Graphic.Resize_Own(trimedmap_pre, 384); var comparemap = Directory.EnumerateFiles(trimedfolder_map, "*", SearchOption.AllDirectories); float ImageMatch(Mat mat1, Mat mat2, bool show) { using (var descriptors1 = new Mat()) using (var descriptors2 = new Mat()) { // 特徴点を検出 var akaze = AKAZE.Create(); // キーポイントを検出 akaze.DetectAndCompute(mat1, null, out KeyPoint[] keyPoints1, descriptors1); akaze.DetectAndCompute(mat2, null, out KeyPoint[] keyPoints2, descriptors2); // それぞれの特徴量をマッチング var matcher = new BFMatcher(NormTypes.Hamming, false); var matches = matcher.Match(descriptors1, descriptors2); // 平均距離を返却(小さい方が類似度が高い) var sum = matches.Sum(x => x.Distance); return(sum / matches.Length); } } foreach (string comparemapnum in comparemap) { string mapxcoord = comparemapnum.Split(',')[0].Replace(trimedfolder_map + "\\", ""); string mapzcoord = comparemapnum.Split(',')[1].Replace(".png", ""); string trimedmap = $@"{trimedfolder_map}\{mapxcoord},{mapzcoord}.png"; string trimedmap_await = $@"{trimedfolder_map_pre}\{mapxcoord},{mapzcoord}[await].png"; using (var mat1 = new Mat(trimedmap_pre)) using (var mat2 = new Mat(trimedmap)) { // 2つの画像を比較(平均距離をスコアとした) float score = ImageMatch(mat1, mat2, true); Console.WriteLine(score); if (score < 75) { File.Copy(trimedmap_pre, trimedmap_await, true); } } } List <string> coordslist = new List <string>(); string searchfileword = @"*await*.png"; string[] comparemap2 = Directory.GetFiles(trimedfolder_map_pre, searchfileword); foreach (string mapnum in comparemap2) { coordslist.Add(mapnum); } string[] filelist = Directory.GetFiles(trimedfolder_map_pre, searchfileword); if (coordslist.Count() == 1) { foreach (string premapnum in filelist) { string mapxcoord = premapnum.Split(',')[0].Replace(trimedfolder_map_pre + "\\", ""); string mapzcoord = premapnum.Split(',')[1].Replace("[await].png", ""); string trimedmap = $@"{trimedfolder_map}\{mapxcoord},{mapzcoord}.png"; string trimedfolder_map_backup_foreach = $@"{trimedfolder_map_backup}\{mapxcoord},{mapzcoord}"; string trimedmap_backup = $@"{trimedfolder_map_backup_foreach}\{DateTime.Now.ToString("yyyyMMdd")}.png"; Directory.CreateDirectory(trimedfolder_map_backup_foreach); File.Copy(premapnum, trimedmap, true); File.Copy(premapnum, trimedmap_backup, true); File.Delete(premapnum); } await Context.Channel.SendSuccessAsync("完了", "正常に画像を切り取ったよ!"); } else if (coordslist.Count() > 1 || coordslist.Count() == 0) { foreach (string premapnum in filelist) { File.Delete(premapnum); } await Context.Channel.SendErrorAsync("エラー", "定義されている地図と類似度が高くないよ!"); } }
public void FindContours(string sLeftPictureFile, string sRightPictureFile) { Mat tokuLeft = new Mat(); Mat tokuRight = new Mat(); Mat output = new Mat(); AKAZE akaze = AKAZE.Create(); KeyPoint[] keyPointsLeft; KeyPoint[] keyPointsRight; Mat descriptorLeft = new Mat(); Mat descriptorRight = new Mat(); DescriptorMatcher matcher; //マッチング方法 DMatch[] matches; //特徴量ベクトル同士のマッチング結果を格納する配列 //画像をグレースケールとして読み込み、平滑化する Mat Lsrc = new Mat(sLeftPictureFile, ImreadModes.Color); //画像をグレースケールとして読み込み、平滑化する Mat Rsrc = new Mat(sRightPictureFile, ImreadModes.Color); //特徴量の検出と特徴量ベクトルの計算 akaze.DetectAndCompute(Lsrc, null, out keyPointsLeft, descriptorLeft); akaze.DetectAndCompute(Rsrc, null, out keyPointsRight, descriptorRight); //画像1の特徴点をoutput1に出力 Cv2.DrawKeypoints(Lsrc, keyPointsLeft, tokuLeft); Image imageLeftToku = BitmapConverter.ToBitmap(tokuLeft); pictureBox3.SizeMode = PictureBoxSizeMode.Zoom; pictureBox3.Image = imageLeftToku; tokuLeft.SaveImage("result/LeftToku.jpg"); //画像2の特徴点をoutput1に出力 Cv2.DrawKeypoints(Rsrc, keyPointsRight, tokuRight); Image imageRightToku = BitmapConverter.ToBitmap(tokuRight); pictureBox4.SizeMode = PictureBoxSizeMode.Zoom; pictureBox4.Image = imageRightToku; tokuRight.SaveImage("result/RightToku.jpg"); //総当たりマッチング matcher = DescriptorMatcher.Create("BruteForce"); matches = matcher.Match(descriptorLeft, descriptorRight); Cv2.DrawMatches(Lsrc, keyPointsLeft, Rsrc, keyPointsRight, matches, output); output.SaveImage(@"result\output.jpg"); int size = matches.Count(); var getPtsSrc = new Vec2f[size]; var getPtsTarget = new Vec2f[size]; int count = 0; foreach (var item in matches) { var ptSrc = keyPointsLeft[item.QueryIdx].Pt; var ptTarget = keyPointsRight[item.TrainIdx].Pt; getPtsSrc[count][0] = ptSrc.X; getPtsSrc[count][1] = ptSrc.Y; getPtsTarget[count][0] = ptTarget.X; getPtsTarget[count][1] = ptTarget.Y; count++; } // SrcをTargetにあわせこむ変換行列homを取得する。ロバスト推定法はRANZAC。 var hom = Cv2.FindHomography( InputArray.Create(getPtsSrc), InputArray.Create(getPtsTarget), HomographyMethods.Ransac); // 行列homを用いてSrcに射影変換を適用する。 Mat WarpedSrcMat = new Mat(); Cv2.WarpPerspective( Lsrc, WarpedSrcMat, hom, new OpenCvSharp.Size(Rsrc.Width, Rsrc.Height)); WarpedSrcMat.SaveImage(@"result\Warap.jpg"); //画像1の特徴点をoutput1に出力 Image imageLeftSyaei = BitmapConverter.ToBitmap(WarpedSrcMat); pictureBox5.SizeMode = PictureBoxSizeMode.Zoom; pictureBox5.Image = imageLeftSyaei; //画像2の特徴点をoutput1に出力 Image imageRightSyaei = BitmapConverter.ToBitmap(Rsrc); pictureBox6.SizeMode = PictureBoxSizeMode.Zoom; pictureBox6.Image = imageRightSyaei; Mat LmatFloat = new Mat(); WarpedSrcMat.ConvertTo(LmatFloat, MatType.CV_16SC3); Mat[] LmatPlanes = LmatFloat.Split(); Mat RmatFloat = new Mat(); Rsrc.ConvertTo(RmatFloat, MatType.CV_16SC3); Mat[] RmatPlanes = RmatFloat.Split(); Mat diff0 = new Mat(); Mat diff1 = new Mat(); Mat diff2 = new Mat(); Cv2.Absdiff(LmatPlanes[0], RmatPlanes[0], diff0); Cv2.Absdiff(LmatPlanes[1], RmatPlanes[1], diff1); Cv2.Absdiff(LmatPlanes[2], RmatPlanes[2], diff2); Cv2.MedianBlur(diff0, diff0, 5); Cv2.MedianBlur(diff1, diff1, 5); Cv2.MedianBlur(diff2, diff2, 5); diff0.SaveImage("result/diff0.jpg"); diff1.SaveImage("result/diff1.jpg"); diff2.SaveImage("result/diff2.jpg"); Mat wiseMat = new Mat(); Cv2.BitwiseOr(diff0, diff1, wiseMat); Cv2.BitwiseOr(wiseMat, diff2, wiseMat); wiseMat.SaveImage("result/wiseMat.jpg"); Mat openingMat = new Mat(); Cv2.MorphologyEx(wiseMat, openingMat, MorphTypes.Open, new Mat()); Mat dilationMat = new Mat(); Cv2.Dilate(openingMat, dilationMat, new Mat()); Cv2.Threshold(dilationMat, dilationMat, 100, 255, ThresholdTypes.Binary); dilationMat.SaveImage(@"result\dilationMat.jpg"); Mat LaddMat = new Mat(); Mat RaddMat = new Mat(); Console.WriteLine(dilationMat.GetType()); Console.WriteLine(Rsrc.GetType()); // dilationMatはグレースケールなので合成先のMatと同じ色空間に変換する Mat dilationScaleMat = new Mat(); Mat dilationColorMat = new Mat(); Cv2.ConvertScaleAbs(dilationMat, dilationScaleMat); Cv2.CvtColor(dilationScaleMat, dilationColorMat, ColorConversionCodes.GRAY2RGB); Cv2.AddWeighted(WarpedSrcMat, 0.3, dilationColorMat, 0.7, 0, LaddMat); Cv2.AddWeighted(Rsrc, 0.3, dilationColorMat, 0.7, 0, RaddMat); Image LaddImage = BitmapConverter.ToBitmap(LaddMat); pictureBox7.SizeMode = PictureBoxSizeMode.Zoom; pictureBox7.Image = LaddImage; Image RaddImage = BitmapConverter.ToBitmap(RaddMat); pictureBox8.SizeMode = PictureBoxSizeMode.Zoom; pictureBox8.Image = RaddImage; RaddMat.SaveImage(@"result\Result.jpg"); MessageBox.Show("Done!"); }