public DescriptorMatherWrapper(IOptionsMonitor <ConfigSettings> settings) { _indexIdMapImage = new Dictionary <int, string>(); _settings = settings; _matcher = DescriptorMatcher.Create(_settings.CurrentValue.DescriptorMatcherType); }
private void Start() { webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>(); webCamTextureToMatHelper.Initialize(); grayMat = new Mat(); makerGrayMat = new Mat(originMakerTexture.height, originMakerTexture.width, CvType.CV_8UC1); makerTexture = new Texture2D(originMakerTexture.width, originMakerTexture.height); Graphics.CopyTexture(originMakerTexture, makerTexture); detector = ORB.create(); extractor = ORB.create(); // Get Key Points of Maker makerMat = new Mat(originMakerTexture.height, originMakerTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(makerTexture, makerMat, false); makerKeyPoints = new MatOfKeyPoint(); makerDescriptors = new Mat(); Imgproc.cvtColor(makerMat, makerGrayMat, Imgproc.COLOR_BGR2GRAY); detector.detect(makerGrayMat, makerKeyPoints); extractor.compute(makerGrayMat, makerKeyPoints, makerDescriptors); matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); }
/// <summary> /// SrcとTargetのマッチングを行う。 /// </summary> public void RunMutching() { // Akazeで特徴抽出 var akaze = AKAZE.Create(); var descriptorSrc = new Mat(); var descriptorTarget = new Mat(); akaze.DetectAndCompute(SrcMat, null, out KeyPtsSrc, descriptorSrc); akaze.DetectAndCompute(TargetMat, null, out KeyPtsTarget, descriptorTarget); // 総当たりマッチング実行 var matcher = DescriptorMatcher.Create("BruteForce"); var matches = matcher.Match(descriptorSrc, descriptorTarget); // 結果を昇順にソートし、上位からある割合(UseRate)の結果のみを使用する。 SelectedMatched = matches .OrderBy(p => p.Distance) .Take((int)(matches.Length * UseRate)); // SrcとTargetの対応する特徴点を描画する Cv2.DrawMatches( SrcMat, KeyPtsSrc, TargetMat, KeyPtsTarget, SelectedMatched, MatchingResultMat); }
/// <summary> /// Initializes a new instance of the <see cref="PatternDetector"/> class. /// </summary> /// <param name="detector">Detector.</param> /// <param name="extractor">Extractor.</param> /// <param name="matcher">Matcher.</param> /// <param name="ratioTest">If set to <c>true</c> ratio test.</param> public PatternDetector(ORB detector, ORB extractor, DescriptorMatcher matcher, bool ratioTest = false) { if (detector == null) { detector = ORB.create(); detector.setMaxFeatures(1000); } if (extractor == null) { extractor = ORB.create(); extractor.setMaxFeatures(1000); } if (matcher == null) { matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING); } m_detector = detector; m_extractor = extractor; m_matcher = matcher; enableRatioTest = ratioTest; enableHomographyRefinement = true; homographyReprojectionThreshold = 3; m_queryKeypoints = new MatOfKeyPoint(); m_queryDescriptors = new Mat(); m_matches = new MatOfDMatch(); m_knnMatches = new List <MatOfDMatch>(); m_grayImg = new Mat(); m_warpedImg = new Mat(); m_roughHomography = new Mat(); m_refinedHomography = new Mat(); }
// Use this for initialization void Start() { Texture2D imgTexture = Resources.Load("lena") as Texture2D; Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, img1Mat); Debug.Log("img1Mat dst ToString " + img1Mat.ToString()); Mat img2Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, img2Mat); Debug.Log("img2Mat dst ToString " + img2Mat.ToString()); float angle = UnityEngine.Random.Range(0, 360), scale = 1.0f; Point center = new Point(img2Mat.cols() * 0.5f, img2Mat.rows() * 0.5f); Mat affine_matrix = Imgproc.getRotationMatrix2D(center, angle, scale); Imgproc.warpAffine(img1Mat, img2Mat, affine_matrix, img2Mat.size()); FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB); DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB); MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); Mat descriptors1 = new Mat(); detector.detect(img1Mat, keypoints1); extractor.compute(img1Mat, keypoints1, descriptors1); MatOfKeyPoint keypoints2 = new MatOfKeyPoint(); Mat descriptors2 = new Mat(); detector.detect(img2Mat, keypoints2); extractor.compute(img2Mat, keypoints2, descriptors2); DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); MatOfDMatch matches = new MatOfDMatch(); matcher.match(descriptors1, descriptors2, matches); Mat resultImg = new Mat(); Features2d.drawMatches(img1Mat, keypoints1, img2Mat, keypoints2, matches, resultImg); Texture2D texture = new Texture2D(resultImg.cols(), resultImg.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(resultImg, texture); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; }
protected override void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); adapter = new TabsAdapter(this, SupportFragmentManager); pager = FindViewById <ViewPager>(Resource.Id.viewpager); var tabs = FindViewById <TabLayout>(Resource.Id.tabs); pager.Adapter = adapter; tabs.SetupWithViewPager(pager); pager.OffscreenPageLimit = 3; pager.PageSelected += (sender, args) => { var fragment = adapter.InstantiateItem(pager, args.Position) as IFragmentVisible; fragment?.BecameVisible(); }; Toolbar.MenuItemClick += (sender, e) => { var intent = new Intent(this, typeof(AddItemActivity));; StartActivity(intent); }; SupportActionBar.SetDisplayHomeAsUpEnabled(false); SupportActionBar.SetHomeButtonEnabled(false); var src = new Mat[2]; var dst = new Mat[2]; var keyPoints1 = new MatOfKeyPoint(); var keyPoints2 = new MatOfKeyPoint(); var descripter1 = new Mat(); var descripter2 = new Mat(); var dmatch = new MatOfDMatch(); var output = new Mat(); src[0] = Imgcodecs.Imread("path/to/source/1.png"); src[1] = Imgcodecs.Imread("path/to/source/2.png"); dst[0] = new Mat(); dst[1] = new Mat(); Imgproc.CvtColor(src[0], dst[0], Imgproc.COLORBayerGR2GRAY); Imgproc.CvtColor(src[1], dst[1], Imgproc.COLORBayerGR2GRAY); var akaze = FeatureDetector.Create(FeatureDetector.Akaze); var executor = DescriptorExtractor.Create(DescriptorExtractor.Akaze); akaze.Detect(dst[0], keyPoints1); akaze.Detect(dst[1], keyPoints2); executor.Compute(dst[0], keyPoints1, descripter1); executor.Compute(dst[1], keyPoints2, descripter2); var matcher = DescriptorMatcher.Create(DescriptorMatcher.BruteforceHamming); matcher.Match(descripter1, descripter2, dmatch); Features2d.DrawMatches(src[0], keyPoints1, src[1], keyPoints2, dmatch, output); }
public CVObjectDetector() { // Brisk, KAZE, AKAZE, ORBDetector, HarrisLaplaceFeatureDetector, FastDetector featureDetector = new Brisk(); // Brisk, ORBDetector, BriefDescriptorExtractor, Freak featureDescriptor = new Brisk(); featureMatcher = new BFMatcher(DistanceType.L2); }
private Mat Process(ref Mat buffer) { Mat img = new Mat(); AKAZE akaze = AKAZE.Create(); akaze.Threshold = 0.0001; KeyPoint[] keyPoints; DMatch[] matches; List <DMatch> goodMatches = new List <DMatch>(); Mat descriptor = new Mat(); DescriptorMatcher matcher = DescriptorMatcher.Create("BruteForce"); Cv2.CvtColor(buffer, buffer, ColorConversionCodes.BGR2GRAY); akaze.DetectAndCompute(buffer, null, out keyPoints, descriptor); Cv2.DrawKeypoints(buffer, keyPoints, img, Scalar.Black); Cv2.ImShow("keyps", img); if (islastSeted) { matches = matcher.Match(descriptor, lastDescriptor); for (int i = 0; i < matches.Length; i++) { if (matches[i].Distance < distanceStandard) { goodMatches.Add(matches[i]); } } //Cv2.DrawMatches(buffer, keyPoints, lastBuffer, lastkeyPoints, goodMatches, img); img = buffer; if (goodMatches.Count > 3) { float[] average = new float[2]; average[0] = 0; average[1] = 0; for (int i = 0; i < goodMatches.Count; i++) { average[0] += keyPoints[goodMatches[0].QueryIdx].Pt.X - lastkeyPoints[goodMatches[0].TrainIdx].Pt.X; average[1] += keyPoints[goodMatches[0].QueryIdx].Pt.Y - lastkeyPoints[goodMatches[0].TrainIdx].Pt.Y; } lastPoint = new Point(lastPoint.X + average[0] / goodMatches.Count, lastPoint.Y + average[1] / goodMatches.Count); lastBuffer = buffer; lastDescriptor = descriptor; lastkeyPoints = keyPoints; } Cv2.Circle(img, lastPoint, 15, Scalar.Red, 3); } else { islastSeted = true; img = buffer; lastPoint = new Point(buffer.Cols / 2, buffer.Rows / 2); lastBuffer = buffer; lastDescriptor = descriptor; lastkeyPoints = keyPoints; } return(img); }
public Matcher() { ip = new Emgu.CV.Flann.KdTreeIndexParams(); sp = new SearchParams(); matcher = new FlannBasedMatcher(ip, sp); _disposables.Add(ip); _disposables.Add(sp); _disposables.Add(matcher); }
public Searcher(string mainpath) { _descriptor = new SURFDescriptor(); _indexedDatset = Trainer.LoadFromDisk(mainpath); _descriptorMatcher = InitMatcher(); _starFull = new Mat(Path.Combine(mainpath, "data", "starfull.png")); _closeButton = new Mat(Path.Combine(mainpath, "data", "closebutton.png")); }
static Mat MatcheWindow(Mat template, double gammaPower, double distanceThreshold) { using var source = new Mat(); // カメラ画像取り込み camera.Read(source); //Cv2.Resize(source, source, new Size(2592, 1944)); Cv2.Rotate(source, source, RotateFlags.Rotate90Clockwise); using var trim = source;//.SubMat(new Rect(20, 195, 1880, 873)); // ガンマ補正 using var gammaDst = AdjustGamma(trim, gammaPower); // テンプレート画像の特徴量計算 (var tempKey, var tempDesc) = FeatureCommand(template); // カメラ画像の特徴量計算 (var srcKey, var srcDesc) = FeatureCommand(gammaDst); // 特徴点マッチャー DescriptorMatcher matcher = DescriptorMatcher.Create("BruteForce"); // 特徴量マッチング 上位2位 DMatch[][] matches = matcher.KnnMatch(tempDesc, srcDesc, 2); // 閾値で対応点を絞り込む List <DMatch> goodMatches; List <Point2f> goodTemplateKeyPoints, goodSourceKeyPoints; (goodMatches, goodTemplateKeyPoints, goodSourceKeyPoints) = FilterMatchGoodScore(tempKey, srcKey, matches, distanceThreshold); Console.WriteLine("matches: {0},goodMatches: {1}", matches.Length, goodMatches.Count); // ロバスト推定してホモグラフィーを算出する Mat homoGraphy = LookupHomoGraphy(goodTemplateKeyPoints, goodSourceKeyPoints); // 対象物体画像からコーナーを取得する var cornerPoints = LookupCornerFromTargetObjectImage(template, homoGraphy); var rect = new Rect(cornerPoints[0].X, cornerPoints[0].Y, template.Width / 2, template.Height / 2); // 枠描画 gammaDst.Rectangle(rect, Scalar.Pink, 3); //マッチングした特徴量同士を線でつなぐ using var output = new Mat(); Cv2.DrawMatches(template, tempKey, gammaDst, srcKey, goodMatches, output); var output2 = new Mat(); Cv2.Resize(output, output2, new Size(), 0.5, 0.5); return(output2); }
private static void Run() { var dm = DescriptorMatcher.Create("BruteForce"); dm.Clear(); Console.WriteLine(Cv2.GetCudaEnabledDeviceCount()); string[] algoNames = Algorithm.GetList(); Console.WriteLine(String.Join("\n", algoNames)); SIFT al1 = Algorithm.Create <SIFT>("Feature2D.SIFT"); string[] ppp = al1.GetParams(); Console.WriteLine(ppp); var t = al1.ParamType("contrastThreshold"); double d = al1.GetDouble("contrastThreshold"); t.ToString(); d.ToString(); var src = new Mat("img/lenna.png"); var rand = new Random(); var memory = new List <long>(100); var a1 = new Mat(src, Rect.FromLTRB(0, 0, 30, 40)); var a2 = new Mat(src, Rect.FromLTRB(0, 0, 30, 40)); var a3 = new Mat(src, Rect.FromLTRB(0, 0, 30, 40)); a3.ToString(); for (long i = 0;; i++) { SIFT a = Algorithm.Create <SIFT>("Feature2D.SIFT"); a.ToString(); for (int j = 0; j < 200; j++) { int c1 = rand.Next(100, 400); int c2 = rand.Next(100, 400); Mat temp = src.Row[c1]; src.Row[c1] = src.Row[c2]; src.Row[c2] = temp; } memory.Add(MyProcess.WorkingSet64); if (memory.Count >= 100) { double average = memory.Average(); Console.WriteLine("{0:F3}MB", average / 1024.0 / 1024.0); memory.Clear(); GC.Collect(); } } }
private void FindPointButton_Click(object sender, EventArgs e) { MinValueHarris = Convert.ToDouble(txb_minValue.Text); WindowSize = Convert.ToInt32(txb_WindowSize.Text); int maxPoints; int gridSize = Convert.ToInt32(txb_gridSize.Text); int cellSize = Convert.ToInt32(txb_cellSize.Text); int binsCount = Convert.ToInt32(txb_binsCount.Text); if (filter_checkBox.Checked == true) { maxPoints = Convert.ToInt32(txb_Filter.Text); } else { maxPoints = 5000; } List <InterestingPoint> pointsA = NonMaximumSuppression.FilterA(imageA, Harris.DoHarris(MinValueHarris, WindowSize, imageA), maxPoints); List <InterestingPoint> pointsB = NonMaximumSuppression.FilterA(imageB, Harris.DoHarris(MinValueHarris, WindowSize, imageB), maxPoints); List <ForDescriptor.Descriptor> descriptorsA = RotationInvariant.Calculate(imageA, pointsA); List <ForDescriptor.Descriptor> descriptorsB = RotationInvariant.Calculate(imageB, pointsB); List <ValueTuple <ForDescriptor.Descriptor, ForDescriptor.Descriptor> > match; if (rbt_usual.Checked == true) { match = DescriptorMatcher.Match(descriptorsA, descriptorsB); } else if (rbt_NNDR.Checked == true) { match = DescriptorMatcher.Nndr(descriptorsA, descriptorsB); } else { match = DescriptorMatcher.Match(descriptorsA, descriptorsB); } lbl_findPoints1.Text = "Найдено интересных точек(1): " + pointsA.Count; lbl_findPoints2.Text = "Найдено интересных точек(2): " + pointsB.Count; lbl_PairCount.Text = "Найдено пар точек: " + match.Count; var image = DrawHelper.DrawTwoImages( DrawHelper.DrawPoints(imageA, pointsA), DrawHelper.DrawPoints(imageB, pointsB), match); IOHelper.WriteImageToFile(image, "..\\..\\..\\..\\Output\\OutputPicture.png"); pictureBox1.Image = image; }
public FeatureMatching(Mat src, Mat target) { // 画像初期化 SrcMat = src.Clone(); TargetMat = target.Clone(); ResultMat = new Mat(); // 重心初期化 PtSrc = new System.Drawing.PointF(0.0f, 0.0f); PtTarget = new System.Drawing.PointF(0.0f, 0.0f); // 特徴点抽出 var akaze = AKAZE.Create(); var descriptorSrc = new Mat(); var descriptorTarget = new Mat(); akaze.DetectAndCompute(SrcMat, null, out KeyPtsSrc, descriptorSrc); akaze.DetectAndCompute(TargetMat, null, out KeyPtsTarget, descriptorTarget); // マッチング実行 var matcher = DescriptorMatcher.Create("BruteForce"); var matches = matcher.Match(descriptorSrc, descriptorTarget); // 結果を昇順にソートし、上位半分の結果を使用する。 var selectedMatches = matches .OrderBy(p => p.Distance) //.Take(matches.Length / 2); .Take(1); // Src - Target 対応画像作成 Cv2.DrawMatches(SrcMat, KeyPtsSrc, TargetMat, KeyPtsTarget, selectedMatches, ResultMat); // 特徴点の重心を求める (Src) foreach (var item in selectedMatches) { int idx = item.QueryIdx; PtSrc.X += KeyPtsSrc[idx].Pt.X; PtSrc.Y += KeyPtsSrc[idx].Pt.Y; } PtSrc.X /= (float)selectedMatches.Count(); PtSrc.Y /= (float)selectedMatches.Count(); // 特徴点の重心を求める (Target) foreach (var item in selectedMatches) { int idx = item.TrainIdx; PtTarget.X += KeyPtsTarget[idx].Pt.X; PtTarget.Y += KeyPtsTarget[idx].Pt.Y; } PtTarget.X /= (float)selectedMatches.Count(); PtTarget.Y /= (float)selectedMatches.Count(); }
private void button1_Click(object sender, EventArgs e) { IOHelper.DeletePictures(); MinValueHarris = Convert.ToDouble(txb_minValue.Text); WindowSize = Convert.ToInt32(txb_WindowSize.Text); int maxPoints = Convert.ToInt32(txb_Filter.Text); int gridSize = Convert.ToInt32(txb_gridSize.Text); int cellSize = Convert.ToInt32(txb_cellSize.Text); int binsCount = Convert.ToInt32(txb_binsCount.Text); if (filter_checkBox.Checked == true) { maxPoints = Convert.ToInt32(txb_Filter.Text); } else { maxPoints = 5000; } List <ForDescriptor.Descriptor> descriptorsA = BlobsFinder.FindBlobs(imageA, maxPoints); List <ForDescriptor.Descriptor> descriptorsB = BlobsFinder.FindBlobs(imageB, maxPoints); List <ValueTuple <ForDescriptor.Descriptor, ForDescriptor.Descriptor> > match; if (rbt_usual.Checked == true) { match = DescriptorMatcher.Match(descriptorsA, descriptorsB); } else if (rbt_NNDR.Checked == true) { match = DescriptorMatcher.Nndr(descriptorsA, descriptorsB); } else { match = DescriptorMatcher.Match(descriptorsA, descriptorsB); } lbl_findPoints1.Text = "Найдено интересных точек(1): " + descriptorsA.Count; lbl_findPoints2.Text = "Найдено интересных точек(2): " + descriptorsB.Count; lbl_PairCount.Text = "Найдено пар точек: " + match.Count; var image = DrawHelper.DrawTwoImages( DrawHelper.DrawPoints(imageA, descriptorsA), DrawHelper.DrawPoints(imageB, descriptorsB), match); // var image = DrawHelper.DrawPoints(imageA, descriptorsA); IOHelper.WriteImageToFile(image, "..\\..\\..\\..\\Output\\OutputPicture.png"); pictureBox1.Image = image; }
public PatternDetector(bool ratioTest) { m_detector = ORB.Create(1000); m_extractor = ORB.Create(1000); //BFMatcher bfMatcher = new BFMatcher(NormTypes.Hamming, true); m_matcher = new BFMatcher(NormTypes.Hamming); //m_matcher = DescriptorMatcher.Create("BRUTEFORCE_HAMMING"); enableRatioTest = ratioTest; enableHomographyRefinement = true; homographyReprojectionThreshold = 3; //m_queryKeypoints = new MatOfKeyPoint (); m_queryDescriptors = new Mat(); //m_matches = new MatOfDMatch (); //m_knnMatches = new List<MatOfDMatch> (); m_grayImg = new Mat(); m_warpedImg = new Mat(); m_roughHomography = new Mat(); m_refinedHomography = new Mat(); }
public Image Match(List <ImageDataResult> resultList) { DescriptorMatcher matcher = GetDescritorMatcher(); matcher.Add(Image1.Descriptors); matcher.KnnMatch(Image2.Descriptors, Matches, 2, null); var mask = GetMask(); Mat homography = GetHomography(Image1.KeyPoints, Image2.KeyPoints, Matches, mask); Mat result = new Mat(); Features2DToolbox.DrawMatches(Image1.CvMaterial, Image1.KeyPoints, Image2.CvMaterial, Image2.KeyPoints, Matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask); Mat regged = new Mat(); if (homography != null) { Rectangle rect = new System.Drawing.Rectangle(Point.Empty, Image1.CvMaterial.Size); PointF[] pts = { new PointF(rect.Left, rect.Bottom), new PointF(rect.Right, rect.Bottom), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Top) }; pts = CvInvoke.PerspectiveTransform(pts, homography); Point[] points = Array.ConvertAll(pts, Point.Round); using (VectorOfPoint vp = new VectorOfPoint(points)) { CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255)); } CvInvoke.WarpPerspective(Image1.CvOriginal, regged, homography, Image1.Size); var image = ImageData.Subtract(regged.Bitmap, Image2.CvOriginal.Bitmap); resultList.Add(new ImageDataResult((Bitmap)image, result, regged)); return(image); } return(ImageData.Subtract(Image1.CvOriginal.Bitmap, Image2.CvOriginal.Bitmap)); }
public SimpleFeature(SimpleFeatureDesc desc, SIFT featureDetector = null) { size = desc.size; cropRect = new Rectangle(desc.cropPoint, size); mask = new Mat(desc.size, DepthType.Cv8U, 1); features = desc.features; CvInvoke.Rectangle(mask, desc.rect, new MCvScalar(255, 255, 255), -1); CvInvoke.Circle(mask, desc.maskPoint, 23, new MCvScalar(0, 0, 0), -1); //SimpleFeature sf = new SimpleFeature(new Point(842, 646), size, mask); if (desc.colorFilter != null) { SetColorFilter(desc.colorFilter.lowerBound, desc.colorFilter.upperBound); } //featureDetector = new SIFT(0,2,0.02,20,0.4); if (featureDetector == null) { this.featureDetector = new SIFT(0, 3, 0.04, 10, 0.8); } Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams(); Emgu.CV.Flann.SearchParams sp = new SearchParams(); matcher = new FlannBasedMatcher(ip, sp); }
private static void Run() { var dm = DescriptorMatcher.Create("BruteForce"); dm.Clear(); var src = new Mat("data/lenna.png"); var rand = new Random(); var memory = new List <long>(100); var a1 = new Mat(src, Rect.FromLTRB(0, 0, 30, 40)); var a2 = new Mat(src, Rect.FromLTRB(0, 0, 30, 40)); var a3 = new Mat(src, Rect.FromLTRB(0, 0, 30, 40)); a3.ToString(); for (long i = 0; ; i++) { for (int j = 0; j < 200; j++) { int c1 = rand.Next(100, 400); int c2 = rand.Next(100, 400); Mat temp = src.Row[c1]; src.Row[c1] = src.Row[c2]; src.Row[c2] = temp; } memory.Add(MyProcess.WorkingSet64); if (memory.Count >= 100) { double average = memory.Average(); Console.WriteLine("{0:F3}MB", average / 1024.0 / 1024.0); memory.Clear(); GC.Collect(); } } }
private void button2_Click(object sender, EventArgs e) { int maxPoints = Convert.ToInt32(txb_Filter.Text); string name = PathToWriteImage + "resultPicture.jpg"; Bitmap bmp1 = new Bitmap(PathToReadImage + "bridge1.jpg"); Bitmap bmp2 = new Bitmap(PathToReadImage + "bridge2.jpg"); var descriptorsABlobs = BlobsFinder.FindBlobs(IOHelper.ImageToMat(bmp1), maxPoints); var descriptorsBBlobs = BlobsFinder.FindBlobs(IOHelper.ImageToMat(bmp2), maxPoints); var match = DescriptorMatcher.Nndr(descriptorsABlobs, descriptorsBBlobs); lbl_findPoints1.Text = "Найдено интересных точек(1): " + descriptorsABlobs.Count; lbl_findPoints2.Text = "Найдено интересных точек(2): " + descriptorsBBlobs.Count; lbl_PairCount.Text = "Найдено пар точек: " + match.Count; var(matrixA, matrixB) = Ransac.CalculateTransform(match); var result = Transformer.Transform(bmp1, bmp2, matrixA, matrixB); result.Save(name, ImageFormat.Jpeg); pictureBox1.Image = result; }
internal static extern void cv_features2d_DescriptorMatcher_match( DescriptorMatcher matcher, Arr queryDescriptors, Arr trainDescriptors, DMatchCollection matches, Arr mask);
public ImageString MatchFeatures(string base64image, List <string> base64imageList) { List <MatOfDMatch> winnerMatches = new List <MatOfDMatch>(); MatOfKeyPoint winnerKeyPoints = new MatOfKeyPoint(); Mat winnerImage = new Mat(); int winnerIndex = -1; int winnerValue = 0; Texture2D imgTexture = base64ImageToTexture(base64image); List <Texture2D> imgTextures = new List <Texture2D>(); for (int i = 0; i < base64imageList.Count; i++) { imgTextures.Add(base64ImageToTexture(base64imageList[i])); } //Create Mat from texture Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, img1Mat); MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); Mat descriptors1 = new Mat(); FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB); DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB); //Detect keypoints and compute descriptors from photo. detector.detect(img1Mat, keypoints1); extractor.compute(img1Mat, keypoints1, descriptors1); Debug.Log("Billede features: " + descriptors1.rows()); if (descriptors1.rows() < 10) { Debug.Log("ARRRRRRGH der er ikke mange descripters i mit original-billede"); return(new ImageString(base64image, winnerIndex)); } //Run through each image in list for (int i = 0; i < imgTextures.Count; i++) { Texture2D imgTexture2 = imgTextures[i]; //Create Mat from texture Mat img2Mat = new Mat(imgTexture2.height, imgTexture2.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture2, img2Mat); //Find keypoints and descriptors from image in list MatOfKeyPoint keypoints2 = new MatOfKeyPoint(); Mat descriptors2 = new Mat(); detector.detect(img2Mat, keypoints2); extractor.compute(img2Mat, keypoints2, descriptors2); //Match photo with image from list DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); Debug.Log("Billede2 features: " + descriptors2.rows()); if (descriptors2.rows() < 10) { Debug.Log("ARRRRRRGH der er ikke mange descripters i mit test billede: " + i); continue; } List <MatOfDMatch> matchList = new List <MatOfDMatch>(); matcher.knnMatch(descriptors1, descriptors2, matchList, 2); //Find the good matches and put them ind a list List <MatOfDMatch> good = new List <MatOfDMatch>(); foreach (MatOfDMatch match in matchList) { DMatch[] arrayDmatch = match.toArray(); if (arrayDmatch[0].distance < 0.7f * arrayDmatch[1].distance) { good.Add(match); } } //Find the best match image based on the good lists if (good.Count > winnerThreshold && good.Count > winnerValue) { winnerImage = img2Mat; winnerMatches = good; winnerKeyPoints = keypoints2; winnerIndex = i; winnerValue = good.Count; } } Debug.Log("The winner is image: " + winnerIndex + " with a value of: " + winnerValue); //If no winner just return the original image if (winnerIndex == -1) { Debug.Log("No winner"); return(new ImageString(base64image, winnerIndex)); } Debug.Log("No winner"); //Find the matching keypoints from the winner list. MatOfPoint2f queryPoints = new MatOfPoint2f(); MatOfPoint2f matchPoints = new MatOfPoint2f(); List <Point> queryPointsList = new List <Point>(); List <Point> matchPointsList = new List <Point>(); foreach (MatOfDMatch match in winnerMatches) { DMatch[] arrayDmatch = match.toArray(); queryPointsList.Add(keypoints1.toList()[arrayDmatch[0].queryIdx].pt); matchPointsList.Add(winnerKeyPoints.toList()[arrayDmatch[0].trainIdx].pt); } queryPoints.fromList(queryPointsList); matchPoints.fromList(matchPointsList); //Calculate the homography of the best matching image Mat homography = Calib3d.findHomography(queryPoints, matchPoints, Calib3d.RANSAC, 5.0); Mat resultImg = new Mat(); Imgproc.warpPerspective(img1Mat, resultImg, homography, winnerImage.size()); //Show image Texture2D texture = new Texture2D(winnerImage.cols(), winnerImage.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(resultImg, texture); return(new ImageString(Convert.ToBase64String(texture.EncodeToPNG()), winnerIndex)); }
public List <Point[]> Find() { CoordinateStep = Math.Min(image.Width, image.Height) / 5; var cellsX = (int)Math.Ceiling(image.Width * 1D / CoordinateStep); var cellsY = (int)Math.Ceiling(image.Height * 1D / CoordinateStep); VotesImage = IOHelper.MatToImage(image); for (var i = 1; i < cellsX; i++) { var x = CoordinateStep * i; DrawHelper.DrawLine(VotesImage, x, 0, x, image.Height - 1); } for (var i = 1; i < cellsY; i++) { var y = CoordinateStep * i; DrawHelper.DrawLine(VotesImage, 0, y, image.Width - 1, y); } var descriptors1 = BlobsFinder.FindBlobs(sample, 500); var descriptors2 = BlobsFinder.FindBlobs(image, 500); var matches = DescriptorMatcher.Nndr(descriptors2, descriptors1); ReverseMatches = matches.Select(x => new Match(x.Item2, x.Item1)).ToList(); var sampleCenterX = sample.Width / 2.0; var sampleCenterY = sample.Height / 2.0; votes = new double[cellsX, cellsY, AngleCells, CellsScale]; voters = new List <Match> [cellsX, cellsY, AngleCells, CellsScale]; foreach (var match in matches) { var samplePoint = match.Item2.Point; var imagePoint = match.Item1.Point; var scale = imagePoint.Radius / samplePoint.Radius; var angle = match.Item2.Angle - match.Item1.Angle; var vectorX = scale * (sampleCenterX - samplePoint.getX()); var vectorY = scale * (sampleCenterY - samplePoint.getY()); var centerX = imagePoint.getX() + vectorX * Math.Cos(angle) - vectorY * Math.Sin(angle); var centerY = imagePoint.getY() + vectorX * Math.Sin(angle) + vectorY * Math.Cos(angle); Vote(match, centerX, centerY, scale, angle); DrawHelper.DrawLine(VotesImage, imagePoint.getX(), imagePoint.getY(), (int)Math.Round(centerX), (int)Math.Round(centerY)); } for (var x = 0; x < cellsX; x++) { for (var y = 0; y < cellsY; y++) { for (var a = 0; a < AngleCells; a++) { for (var s = 0; s < CellsScale; s++) { if (IsVotesLocalMaximum(x, y, a, s) && voters[x, y, a, s].Count > VotersThreshold) { // DrawHelper.DrawPolygon(VotesImage, GetPreliminary(x, y, a, s), true); objects.Add(GetLocation(Ransac.CalculateTransform(voters[x, y, a, s]))); } } } } } return(objects); }
static void Main(string[] args) { var _videoCapture = new VideoCapture(1); var referenceTime = DateTime.Now; var cnt = 0; while (true) { /* * 通常の撮像は 1600 * 1200 Fps 5 * 処理速度の都合 * テンプレート画像の撮像は 2592 * 1944 Fps 2(FA 2592 * 1944 Fps 6) */ var frameWidth = 2592; var frameHeight = 1944; var fps = 6; if (_videoCapture.Fps != fps) { _videoCapture.Fps = fps; } if (_videoCapture.FrameWidth != frameWidth) { _videoCapture.FrameWidth = frameWidth; } if (_videoCapture.FrameHeight != frameHeight) { _videoCapture.FrameHeight = frameHeight; } cnt++; Console.WriteLine("設定書き込み {0}回目 {1}秒", cnt, (DateTime.Now - referenceTime).TotalSeconds); if (_videoCapture.FrameWidth == frameWidth && _videoCapture.FrameHeight == frameHeight && Math.Abs(_videoCapture.Fps - fps) < 1) { break; } if ((DateTime.Now - referenceTime).TotalSeconds > 30) { throw new TimeoutException("videoCaptureのフレームサイズの設定がタイムアウトしました"); } } int posMsec = (int)(1000 * 1 / _videoCapture.Fps); _videoCapture.Set(VideoCaptureProperties.PosMsec, posMsec); // 特徴点マッチャー DescriptorMatcher matcher = DescriptorMatcher.Create("BruteForce"); // 特徴量検出アルゴリズム using var _temp = new Mat("escalator1st_20201119115525242.bmp"); // 特徴量計算 (var _temp_keypoint, var _temp_featureImage) = FeatureCommand(_temp); while (true) { using var frame = new Mat(); _videoCapture.Read(frame); // 特徴量計算 (var _frame_keypoint, var _frame_descriptor) = FeatureCommand(frame); // 特徴量マッチング 上位2位 DMatch[][] matches = matcher.KnnMatch(_temp_featureImage, _frame_descriptor, 2); // 閾値で対応点を絞り込む List <DMatch> goodMatches; List <Point2f> goodTemplateKeyPoints, goodSourceKeyPoints; (goodMatches, goodTemplateKeyPoints, goodSourceKeyPoints) = FilterMatchGoodScore(_temp_keypoint, _frame_keypoint, matches); //マッチングした特徴量同士を線でつなぐ Mat output3 = new Mat(); Cv2.DrawMatches(_temp, _temp_keypoint, frame, _frame_keypoint, goodMatches, output3); Cv2.ImShow("feature", output3); Cv2.WaitKey(); } }
public bool descriptorsORB_Old(Mat RGB, Mat cameraFeed, string targetName)//找出特徵的顏色方法三(可運行但效率不佳放棄) { if (RGB == null) { Debug.Log("RGB Mat is Null"); return(false); } //將傳入的RGB存入Src Mat SrcMat = new Mat(); RGB.copyTo(SrcMat); //比對樣本 Texture2D imgTexture = Resources.Load(targetName) as Texture2D; // Texture2D imgTexture2 = Resources.Load("lenaK") as Texture2D; //Texture2D轉Mat Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, img1Mat); //創建 ORB的特徵點裝置 FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB); DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB); //產生存放特徵點Mat MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); Mat descriptors1 = new Mat(); MatOfKeyPoint keypointsSrc = new MatOfKeyPoint(); Mat descriptorsSrc = new Mat(); //找特徵點圖1 detector.detect(img1Mat, keypoints1); extractor.compute(img1Mat, keypoints1, descriptors1); //找特徵點圖Src detector.detect(SrcMat, keypointsSrc); extractor.compute(SrcMat, keypointsSrc, descriptorsSrc); DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); MatOfDMatch matches = new MatOfDMatch(); matcher.match(descriptors1, descriptorsSrc, matches); DMatch[] arrayDmatch = matches.toArray(); for (int i = arrayDmatch.Length - 1; i >= 0; i--) { // Debug.Log("match " + i + ": " + arrayDmatch[i].distance); } //做篩選 double max_dist = 0; double min_dist = 100; //-- Quick calculation of max and min distances between keypoints double dist = new double(); for (int i = 0; i < matches.rows(); i++) { dist = arrayDmatch[i].distance; if (dist < min_dist) { min_dist = dist; } if (dist > max_dist) { max_dist = dist; } } Debug.Log("Max dist :" + max_dist); Debug.Log("Min dist :" + min_dist); //只畫好的點 List <DMatch> matchesGoodList = new List <DMatch>(); for (int i = 0; i < matches.rows(); i++) { //if (arrayDmatch[i].distance < RateDist.value * min_dist) //{ // //Debug.Log("match " + i + ": " + arrayDmatch[i].distance); // matchesGoodList.Add(arrayDmatch[i]); //} } MatOfDMatch matchesGood = new MatOfDMatch(); matchesGood.fromList(matchesGoodList); //Draw Keypoints Features2d.drawKeypoints(SrcMat, keypointsSrc, SrcMat); //做輸出的轉換予宣告 Mat resultImg = new Mat(); // Features2d.drawMatches(img1Mat, keypoints1, SrcMat, keypointsSrc, matchesGood, resultImg); List <Point> P1 = new List <Point>(); // List<Point> P2 = new List<Point>(); List <Point> pSrc = new List <Point>(); Debug.Log("MatchCount" + matchesGoodList.Count); for (int i = 0; i < matchesGoodList.Count; i++) { P1.Add(new Point(keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.x, keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.y)); pSrc.Add(new Point(keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.x, keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.y)); //Debug.Log("ID = " + matchesGoodList[i].queryIdx ); //Debug.Log("x,y =" + (int)keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.x + "," + (int)keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.y); //Debug.Log("x,y =" + (int)keypoints2.toArray()[matchesGoodList[i].trainIdx].pt.x + "," + (int)keypoints2.toArray()[matchesGoodList[i].trainIdx].pt.y); } MatOfPoint2f p2fTarget = new MatOfPoint2f(P1.ToArray()); MatOfPoint2f p2fSrc = new MatOfPoint2f(pSrc.ToArray()); Mat matrixH = Calib3d.findHomography(p2fTarget, p2fSrc, Calib3d.RANSAC, 3); List <Point> srcPointCorners = new List <Point>(); srcPointCorners.Add(new Point(0, 0)); srcPointCorners.Add(new Point(img1Mat.width(), 0)); srcPointCorners.Add(new Point(img1Mat.width(), img1Mat.height())); srcPointCorners.Add(new Point(0, img1Mat.height())); Mat originalRect = Converters.vector_Point2f_to_Mat(srcPointCorners); List <Point> srcPointCornersEnd = new List <Point>(); srcPointCornersEnd.Add(new Point(0, img1Mat.height())); srcPointCornersEnd.Add(new Point(0, 0)); srcPointCornersEnd.Add(new Point(img1Mat.width(), 0)); srcPointCornersEnd.Add(new Point(img1Mat.width(), img1Mat.height())); Mat changeRect = Converters.vector_Point2f_to_Mat(srcPointCornersEnd); Core.perspectiveTransform(originalRect, changeRect, matrixH); List <Point> srcPointCornersSave = new List <Point>(); Converters.Mat_to_vector_Point(changeRect, srcPointCornersSave); if ((srcPointCornersSave[2].x - srcPointCornersSave[0].x) < 5 || (srcPointCornersSave[2].y - srcPointCornersSave[0].y) < 5) { Debug.Log("Match Out Put image is to small"); SrcMat.copyTo(cameraFeed); SrcMat.release(); Imgproc.putText(cameraFeed, "X-S", new Point(10, 50), 0, 1, new Scalar(255, 255, 255), 2); return(false); } // Features2d.drawMatches(img1Mat, keypoints1, SrcMat, keypointsSrc, matchesGood, resultImg); Imgproc.line(SrcMat, srcPointCornersSave[0], srcPointCornersSave[1], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[1], srcPointCornersSave[2], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[2], srcPointCornersSave[3], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[3], srcPointCornersSave[0], new Scalar(255, 0, 0), 3); SrcMat.copyTo(cameraFeed); keypoints1.release(); img1Mat.release(); SrcMat.release(); return(true); }
//============================================================ //=================以下為沒有再使用的函式===================== //============================================================ //找出特徵的顏色方法三(ORB特徵點比對) public bool descriptorsORB(Mat RGB, Mat cameraFeed, string targetName) { if (RGB == null) { Debug.Log("RGB Mat is Null"); return(false); } //將傳入的RGB存入Src Mat SrcMat = new Mat(); RGB.copyTo(SrcMat); //比對樣本載入 Texture2D imgTexture = Resources.Load(targetName) as Texture2D; //Texture2D轉Mat Mat targetMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, targetMat); //創建 ORB的特徵點裝置 FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB); DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB); //產生存放特徵點Mat MatOfKeyPoint keypointsTarget = new MatOfKeyPoint(); Mat descriptorsTarget = new Mat(); MatOfKeyPoint keypointsSrc = new MatOfKeyPoint(); Mat descriptorsSrc = new Mat(); //找特徵點圖Target detector.detect(targetMat, keypointsTarget); extractor.compute(targetMat, keypointsTarget, descriptorsTarget); //找特徵點圖Src detector.detect(SrcMat, keypointsSrc); extractor.compute(SrcMat, keypointsSrc, descriptorsSrc); //創建特徵點比對物件 DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); MatOfDMatch matches = new MatOfDMatch(); //丟入兩影像的特徵點 matcher.match(descriptorsTarget, descriptorsSrc, matches); DMatch[] arrayDmatch = matches.toArray(); //做篩選 double max_dist = 0; double min_dist = 100; //-- Quick calculation of max and min distances between keypoints double dist = new double(); for (int i = 0; i < matches.rows(); i++) { dist = arrayDmatch[i].distance; if (dist < min_dist) { min_dist = dist; } if (dist > max_dist) { max_dist = dist; } } Debug.Log("Max dist :" + max_dist); Debug.Log("Min dist :" + min_dist); List <DMatch> matchesGoodList = new List <DMatch>(); MatOfDMatch matchesGood = new MatOfDMatch(); matchesGood.fromList(matchesGoodList); //Draw Keypoints Features2d.drawKeypoints(SrcMat, keypointsSrc, SrcMat); List <Point> pTarget = new List <Point>(); List <Point> pSrc = new List <Point>(); Debug.Log("MatchCount" + matchesGoodList.Count); for (int i = 0; i < matchesGoodList.Count; i++) { pTarget.Add(new Point(keypointsTarget.toArray()[matchesGoodList[i].queryIdx].pt.x, keypointsTarget.toArray()[matchesGoodList[i].queryIdx].pt.y)); pSrc.Add(new Point(keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.x, keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.y)); } MatOfPoint2f p2fTarget = new MatOfPoint2f(pTarget.ToArray()); MatOfPoint2f p2fSrc = new MatOfPoint2f(pSrc.ToArray()); Mat matrixH = Calib3d.findHomography(p2fTarget, p2fSrc, Calib3d.RANSAC, 3); List <Point> srcPointCorners = new List <Point>(); srcPointCorners.Add(new Point(0, 0)); srcPointCorners.Add(new Point(targetMat.width(), 0)); srcPointCorners.Add(new Point(targetMat.width(), targetMat.height())); srcPointCorners.Add(new Point(0, targetMat.height())); Mat originalRect = Converters.vector_Point2f_to_Mat(srcPointCorners); List <Point> srcPointCornersEnd = new List <Point>(); srcPointCornersEnd.Add(new Point(0, targetMat.height())); srcPointCornersEnd.Add(new Point(0, 0)); srcPointCornersEnd.Add(new Point(targetMat.width(), 0)); srcPointCornersEnd.Add(new Point(targetMat.width(), targetMat.height())); Mat changeRect = Converters.vector_Point2f_to_Mat(srcPointCornersEnd); Core.perspectiveTransform(originalRect, changeRect, matrixH); List <Point> srcPointCornersSave = new List <Point>(); Converters.Mat_to_vector_Point(changeRect, srcPointCornersSave); if ((srcPointCornersSave[2].x - srcPointCornersSave[0].x) < 5 || (srcPointCornersSave[2].y - srcPointCornersSave[0].y) < 5) { Debug.Log("Match Out Put image is to small"); SrcMat.copyTo(cameraFeed); SrcMat.release(); Imgproc.putText(cameraFeed, targetName, srcPointCornersSave[0], 0, 1, new Scalar(255, 255, 255), 2); return(false); } //畫出框框 Imgproc.line(SrcMat, srcPointCornersSave[0], srcPointCornersSave[1], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[1], srcPointCornersSave[2], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[2], srcPointCornersSave[3], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[3], srcPointCornersSave[0], new Scalar(255, 0, 0), 3); //畫中心 Point middlePoint = new Point((srcPointCornersSave[0].x + srcPointCornersSave[2].x) / 2, (srcPointCornersSave[0].y + srcPointCornersSave[2].y) / 2); Imgproc.line(SrcMat, middlePoint, middlePoint, new Scalar(0, 0, 255), 10); SrcMat.copyTo(cameraFeed); keypointsTarget.release(); targetMat.release(); SrcMat.release(); return(true); }
// 描画処理 private void videoRendering(object sender, NewFrameEventArgs eventArgs) { Bitmap img = (Bitmap)eventArgs.Frame.Clone(); // Debug.WriteLine(DateTime.Now + ":" + "描画更新"); // Debug.WriteLine(mode); try { //pictureBoxCamera.Image = img; temp = BitmapConverter.ToMat(img);//比較先画像 //特徴量の検出と特徴量ベクトルの計算 akaze.DetectAndCompute(temp, null, out key_point2, descriptor2); //画像2の特徴点をoutput2に出力 Cv2.DrawKeypoints(temp, key_point2, output2); //Cv2.ImShow("output2", output2); pictureBoxCamera.Image = BitmapConverter.ToBitmap(output2); matcher = DescriptorMatcher.Create("BruteForce"); matches = matcher.Match(descriptor1, descriptor2); //閾値以下の要素数のカウント for (int i = 0; i < key_point1.Length && i < key_point2.Length; ++i) { if (matches[i].Distance < threshold) { ++good_match_length; } } DMatch[] good_matches = new DMatch[good_match_length];//閾値以下の要素数で定義 //good_matchesに格納していく int j = 0; for (int i = 0; i < key_point1.Length && i < key_point2.Length; ++i) { if (matches[i].Distance < threshold) { good_matches[j] = matches[i]; ++j; } } //good_matchesの個数デバッグ表示 Debug.WriteLine(j); Invoke((MethodInvoker) delegate() { labelMatch.Text = j.ToString(); }); //類似点の数が多ければチェックボックスの状態に応じて非常停止 if (j >= 16) { //非常停止 if (checkBoxStop.Checked == true) { //WebRequest request = WebRequest.Create("https://maker.ifttt.com/trigger/raspberry/with/key/gHPH_xDKR664IVIr2YtRRj6BbQoQi-K0mCowIJCGPF3"); //WebResponse response = request.GetResponse(); } //アラート音 if (checkBoxAlert.Checked == true) { // _mediaPlayer.settings.volume = 20; _mediaPlayer.URL = @"D:\DCIM\app\AkazeAlert\PcCameraApp\Resources\decision1.mp3"; _mediaPlayer.controls.play(); } } Cv2.DrawMatches(mat, key_point1, temp, key_point2, good_matches, output3); //Cv2.ImShow("output3", output3); pictureBoxResult.Image = BitmapConverter.ToBitmap(output3); } catch { pictureBoxCamera.Image = img; } }
void Orb() { p1Mat = Imgcodecs.imread(Application.dataPath + "/Textures/1.jpg", 1); p2Mat = Imgcodecs.imread(Application.dataPath + "/Textures/3.jpg", 1); Imgproc.cvtColor(p1Mat, p1Mat, Imgproc.COLOR_BGR2RGB); Imgproc.cvtColor(p2Mat, p2Mat, Imgproc.COLOR_BGR2RGB); Imgproc.resize(p2Mat, p2Mat, new Size(p1Mat.width(), p1Mat.height())); Debug.Log(p2Mat); /* * //仿射变换(矩阵旋转) * float angle = UnityEngine.Random.Range(0, 360), scale = 1.0f; * Point center = new Point(img2Mat.cols() * 0.5f, img2Mat.rows() * 0.5f); * * Mat affine_matrix = Imgproc.getRotationMatrix2D(center, angle, scale); * Imgproc.warpAffine(img1Mat, img2Mat, affine_matrix, img2Mat.size()); * * Texture2D texture = new Texture2D(img2Mat.cols(), img2Mat.rows()); * Utils.matToTexture2D(img2Mat, texture); * outputRawImage.texture = texture; */ ORB detector = ORB.create(); ORB extractor = ORB.create(); //提取图一特征点 MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); Mat descriptors1 = new Mat(); detector.detect(p1Mat, keypoints1); extractor.compute(p1Mat, keypoints1, descriptors1); //提取图二特征点 MatOfKeyPoint keypoints2 = new MatOfKeyPoint(); Mat descriptors2 = new Mat(); detector.detect(p2Mat, keypoints2); extractor.compute(p2Mat, keypoints2, descriptors2); //第一次匹配结果(密密麻麻) DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); MatOfDMatch matches = new MatOfDMatch(); matcher.match(descriptors1, descriptors2, matches); //筛选(非官方) //计算向量距离的最大值/最小值 double max_dist = 0; double min_dist = 15; //通过距离控制需要的特征。 //(设到10,最终只有2个耳朵匹配。。。) //(设到15,尾巴也开始匹配。。。。。。) //新建两个容器存放筛选样本 List <DMatch> matchesArray = matches.toList(); //用Unity版API多转一步 //Debug.Log(matchesArray.Count); //500 List <DMatch> goodmatchesArray = new List <DMatch>(); //Debug.Log(img1Mat.rows()); //512 for (int i = 0; i < matchesArray.Count; i++) { Debug.Log("[" + i + "]" + matchesArray[i].distance); if (matchesArray[i].distance > max_dist) { //max_dist = matchesArray[i].distance; } if (matchesArray[i].distance < min_dist) { min_dist = matchesArray[i].distance; } } //Debug.Log("The max distance is: " + max_dist); Debug.Log("The min distance is: " + min_dist); for (int i = 0; i < matchesArray.Count; i++) { if (matchesArray[i].distance < 2 * min_dist) // { goodmatchesArray.Add(matchesArray[i]); } } MatOfDMatch newMatches = new MatOfDMatch(); newMatches.fromList(goodmatchesArray); Debug.Log(newMatches.toList().Count); //第二次筛选后符合的 //绘制第二次筛选结果 dstMat = new Mat(); Features2d.drawMatches(p1Mat, keypoints1, p2Mat, keypoints2, newMatches, dstMat); Texture2D t2d = new Texture2D(dstMat.width(), dstMat.height()); Utils.matToTexture2D(dstMat, t2d); Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero); m_dstImage.sprite = sp; m_dstImage.preserveAspect = true; }
//public bool verificaImagem(Texture2D texture, Texture2D texture2) //{ // Texture2D camFoto = texture; // Texture2D printTela = texture2; // // Escala de cinza. CV_8UC1 // Mat img1Mat = new Mat(camFoto.height, camFoto.width, CvType.CV_8UC1); // Utils.texture2DToMat(camFoto, img1Mat); // // Escala de cinza. CV_8UC1 // Mat img2Mat = new Mat(printTela.height, printTela.width, CvType.CV_8UC1); // Utils.texture2DToMat(printTela, img2Mat); // Imgproc.GaussianBlur(img1Mat, img1Mat, new Size(5, 5), 0); // Imgproc.threshold(img1Mat, img1Mat, 100, 255, Imgproc.THRESH_BINARY); // Imgproc.GaussianBlur(img2Mat, img2Mat, new Size(5, 5), 0); // Imgproc.threshold(img2Mat, img2Mat, 240, 255, Imgproc.THRESH_BINARY); // //Create the result mat // int result_cols = img1Mat.cols() - img2Mat.cols() + 1; // int result_rows = img1Mat.rows() - img2Mat.rows() + 1; // Mat result = new Mat(result_rows, result_cols, CvType.CV_32FC1); // int match_method = Imgproc.TM_CCOEFF_NORMED; // Imgproc.matchTemplate(img1Mat, img2Mat, result, match_method); // Debug.Log(match_method); // return match_method <= 1; //} public bool verificaImagem(Texture2D textParam, Texture2D textParam2) { var bytes = textParam.EncodeToJPG(); //File.WriteAllBytes("imagem1_tratamento.png", bytes); //bytes = textParam2.EncodeToJPG(); //File.WriteAllBytes("imagem2_tratamento.png", bytes); //Texture2D imgTexture = Resources.Load("circulo") as Texture2D; Texture2D camFoto = textParam; Texture2D printTela = textParam2; // Escala de cinza. CV_8UC1 Mat img1Mat = new Mat(camFoto.height, camFoto.width, CvType.CV_8UC1); Utils.texture2DToMat(camFoto, img1Mat); // Escala de cinza. CV_8UC1 Mat img2Mat = new Mat(printTela.height, printTela.width, CvType.CV_8UC1); Utils.texture2DToMat(printTela, img2Mat); Imgproc.GaussianBlur(img1Mat, img1Mat, new Size(5, 5), 0); Texture2D tex3 = new Texture2D(img1Mat.cols(), img1Mat.rows(), TextureFormat.RGBA32, false); //Utils.matToTexture2D(img1Mat, tex3); //bytes = tex3.EncodeToJPG(); //File.WriteAllBytes("imagem1_tratamento_gaussian.png", bytes); Imgproc.threshold(img1Mat, img1Mat, 100, 255, Imgproc.THRESH_BINARY); tex3 = new Texture2D(img1Mat.cols(), img1Mat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(img1Mat, tex3); bytes = tex3.EncodeToJPG(); File.WriteAllBytes("imagem1_tratamento_threshold.png", bytes); Imgproc.GaussianBlur(img2Mat, img2Mat, new Size(5, 5), 0); Texture2D tex4 = new Texture2D(img2Mat.cols(), img2Mat.rows(), TextureFormat.RGBA32, false); //Utils.matToTexture2D(img2Mat, tex4); //bytes = tex4.EncodeToJPG(); //File.WriteAllBytes("imagem2_tratamento_gaussian.png", bytes); Imgproc.threshold(img2Mat, img2Mat, 240, 255, Imgproc.THRESH_BINARY); tex4 = new Texture2D(img2Mat.cols(), img2Mat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(img2Mat, tex4); bytes = tex4.EncodeToJPG(); File.WriteAllBytes("imagem2_tratamento_threshold.png", bytes); ORB detector = ORB.create(); ORB extractor = ORB.create(); MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); Mat descriptors1 = new Mat(); detector.detect(img1Mat, keypoints1); extractor.compute(img1Mat, keypoints1, descriptors1); MatOfKeyPoint keypoints2 = new MatOfKeyPoint(); Mat descriptors2 = new Mat(); detector.detect(img2Mat, keypoints2); extractor.compute(img2Mat, keypoints2, descriptors2); DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); MatOfDMatch matches = new MatOfDMatch(); matcher.match(descriptors1, descriptors2, matches); List <MatOfDMatch> lista = new List <MatOfDMatch>(); lista.Add(matches); matcher.knnMatch(descriptors1, descriptors2, lista, 2); long total = 0; foreach (MatOfDMatch item in lista) { if (item.toList()[0].distance < 0.75 * item.toList()[1].distance) { total++; } } long number_keypoints = 0; if (keypoints1.elemSize() <= keypoints2.elemSize()) { number_keypoints = keypoints1.elemSize(); } else { number_keypoints = keypoints2.elemSize(); } Debug.Log(total / number_keypoints * 100); return((total / number_keypoints * 100) >= 70); }
public void FindContours(string sLeftPictureFile, string sRightPictureFile) { Mat tokuLeft = new Mat(); Mat tokuRight = new Mat(); Mat output = new Mat(); AKAZE akaze = AKAZE.Create(); KeyPoint[] keyPointsLeft; KeyPoint[] keyPointsRight; Mat descriptorLeft = new Mat(); Mat descriptorRight = new Mat(); DescriptorMatcher matcher; //マッチング方法 DMatch[] matches; //特徴量ベクトル同士のマッチング結果を格納する配列 //画像をグレースケールとして読み込み、平滑化する Mat Lsrc = new Mat(sLeftPictureFile, ImreadModes.Color); //画像をグレースケールとして読み込み、平滑化する Mat Rsrc = new Mat(sRightPictureFile, ImreadModes.Color); //特徴量の検出と特徴量ベクトルの計算 akaze.DetectAndCompute(Lsrc, null, out keyPointsLeft, descriptorLeft); akaze.DetectAndCompute(Rsrc, null, out keyPointsRight, descriptorRight); //画像1の特徴点をoutput1に出力 Cv2.DrawKeypoints(Lsrc, keyPointsLeft, tokuLeft); Image imageLeftToku = BitmapConverter.ToBitmap(tokuLeft); pictureBox3.SizeMode = PictureBoxSizeMode.Zoom; pictureBox3.Image = imageLeftToku; tokuLeft.SaveImage("result/LeftToku.jpg"); //画像2の特徴点をoutput1に出力 Cv2.DrawKeypoints(Rsrc, keyPointsRight, tokuRight); Image imageRightToku = BitmapConverter.ToBitmap(tokuRight); pictureBox4.SizeMode = PictureBoxSizeMode.Zoom; pictureBox4.Image = imageRightToku; tokuRight.SaveImage("result/RightToku.jpg"); //総当たりマッチング matcher = DescriptorMatcher.Create("BruteForce"); matches = matcher.Match(descriptorLeft, descriptorRight); Cv2.DrawMatches(Lsrc, keyPointsLeft, Rsrc, keyPointsRight, matches, output); output.SaveImage(@"result\output.jpg"); int size = matches.Count(); var getPtsSrc = new Vec2f[size]; var getPtsTarget = new Vec2f[size]; int count = 0; foreach (var item in matches) { var ptSrc = keyPointsLeft[item.QueryIdx].Pt; var ptTarget = keyPointsRight[item.TrainIdx].Pt; getPtsSrc[count][0] = ptSrc.X; getPtsSrc[count][1] = ptSrc.Y; getPtsTarget[count][0] = ptTarget.X; getPtsTarget[count][1] = ptTarget.Y; count++; } // SrcをTargetにあわせこむ変換行列homを取得する。ロバスト推定法はRANZAC。 var hom = Cv2.FindHomography( InputArray.Create(getPtsSrc), InputArray.Create(getPtsTarget), HomographyMethods.Ransac); // 行列homを用いてSrcに射影変換を適用する。 Mat WarpedSrcMat = new Mat(); Cv2.WarpPerspective( Lsrc, WarpedSrcMat, hom, new OpenCvSharp.Size(Rsrc.Width, Rsrc.Height)); WarpedSrcMat.SaveImage(@"result\Warap.jpg"); //画像1の特徴点をoutput1に出力 Image imageLeftSyaei = BitmapConverter.ToBitmap(WarpedSrcMat); pictureBox5.SizeMode = PictureBoxSizeMode.Zoom; pictureBox5.Image = imageLeftSyaei; //画像2の特徴点をoutput1に出力 Image imageRightSyaei = BitmapConverter.ToBitmap(Rsrc); pictureBox6.SizeMode = PictureBoxSizeMode.Zoom; pictureBox6.Image = imageRightSyaei; Mat LmatFloat = new Mat(); WarpedSrcMat.ConvertTo(LmatFloat, MatType.CV_16SC3); Mat[] LmatPlanes = LmatFloat.Split(); Mat RmatFloat = new Mat(); Rsrc.ConvertTo(RmatFloat, MatType.CV_16SC3); Mat[] RmatPlanes = RmatFloat.Split(); Mat diff0 = new Mat(); Mat diff1 = new Mat(); Mat diff2 = new Mat(); Cv2.Absdiff(LmatPlanes[0], RmatPlanes[0], diff0); Cv2.Absdiff(LmatPlanes[1], RmatPlanes[1], diff1); Cv2.Absdiff(LmatPlanes[2], RmatPlanes[2], diff2); Cv2.MedianBlur(diff0, diff0, 5); Cv2.MedianBlur(diff1, diff1, 5); Cv2.MedianBlur(diff2, diff2, 5); diff0.SaveImage("result/diff0.jpg"); diff1.SaveImage("result/diff1.jpg"); diff2.SaveImage("result/diff2.jpg"); Mat wiseMat = new Mat(); Cv2.BitwiseOr(diff0, diff1, wiseMat); Cv2.BitwiseOr(wiseMat, diff2, wiseMat); wiseMat.SaveImage("result/wiseMat.jpg"); Mat openingMat = new Mat(); Cv2.MorphologyEx(wiseMat, openingMat, MorphTypes.Open, new Mat()); Mat dilationMat = new Mat(); Cv2.Dilate(openingMat, dilationMat, new Mat()); Cv2.Threshold(dilationMat, dilationMat, 100, 255, ThresholdTypes.Binary); dilationMat.SaveImage(@"result\dilationMat.jpg"); Mat LaddMat = new Mat(); Mat RaddMat = new Mat(); Console.WriteLine(dilationMat.GetType()); Console.WriteLine(Rsrc.GetType()); // dilationMatはグレースケールなので合成先のMatと同じ色空間に変換する Mat dilationScaleMat = new Mat(); Mat dilationColorMat = new Mat(); Cv2.ConvertScaleAbs(dilationMat, dilationScaleMat); Cv2.CvtColor(dilationScaleMat, dilationColorMat, ColorConversionCodes.GRAY2RGB); Cv2.AddWeighted(WarpedSrcMat, 0.3, dilationColorMat, 0.7, 0, LaddMat); Cv2.AddWeighted(Rsrc, 0.3, dilationColorMat, 0.7, 0, RaddMat); Image LaddImage = BitmapConverter.ToBitmap(LaddMat); pictureBox7.SizeMode = PictureBoxSizeMode.Zoom; pictureBox7.Image = LaddImage; Image RaddImage = BitmapConverter.ToBitmap(RaddMat); pictureBox8.SizeMode = PictureBoxSizeMode.Zoom; pictureBox8.Image = RaddImage; RaddMat.SaveImage(@"result\Result.jpg"); MessageBox.Show("Done!"); }