public static Mat GetHomography(Mat mMain, Mat mSecondary) { KeyPoint[] keypoints = null; KeyPoint[] keypoints2 = null; using (SIFT sIFT = SIFT.Create(1000)) { using (Mat mat = new Mat()) { using (Mat mat2 = new Mat()) { sIFT.DetectAndCompute(mMain, new Mat(), out keypoints, mat); sIFT.DetectAndCompute(mSecondary, new Mat(), out keypoints2, mat2); FlannBasedMatcher flannBasedMatcher = new FlannBasedMatcher(); DMatch[] array = new DMatch[0]; array = flannBasedMatcher.Match(mat, mat2); List <Point2f> list = new List <Point2f>(); List <Point2f> list2 = new List <Point2f>(); for (int i = 0; i < array.Length; i++) { list.Add(keypoints[array[i].QueryIdx].Pt); list2.Add(keypoints2[array[i].TrainIdx].Pt); } return(Cv2.FindHomography(InputArray.Create(list2), InputArray.Create(list), HomographyMethods.Ransac)); } } } }
public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography) { int k = 2; double uniquenessThreshold = 0.80; double hessianThresh = 100; Stopwatch watch; homography = null; modelKeyPoints = new VectorOfKeyPoint(); observedKeyPoints = new VectorOfKeyPoint(); using (UMat uModelImage = modelImage.GetUMat(AccessType.Read)) using (UMat uObservedImage = observedImage.GetUMat(AccessType.Read)) { SURF surfCPU = new SURF(hessianThresh); SIFT siftCPU = new SIFT(); //extract features from the object image UMat modelDescriptors = new UMat(); //surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false); siftCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false); watch = Stopwatch.StartNew(); // extract features from the observed image UMat observedDescriptors = new UMat(); //surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false); siftCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false); BFMatcher matcher = new BFMatcher(DistanceType.L2); matcher.Add(modelDescriptors); matcher.KnnMatch(observedDescriptors, matches, k, null); mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); //Features2DToolbox.VoteForUniqueness(matches, 1, mask); int nonZeroCount = CvInvoke.CountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2); } } watch.Stop(); } matchTime = watch.ElapsedMilliseconds; }
public static void FindMatch(string modelFileName, string observedFileName, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask) { int k = 2; double uniquenessThreshold = 0.8; modelKeyPoints = new VectorOfKeyPoint(); observedKeyPoints = new VectorOfKeyPoint(); { using (UMat uModelImage = CvInvoke.Imread(modelFileName, ImreadModes.Color).GetUMat(AccessType.Read)) using (UMat uObservedImage = CvInvoke.Imread(observedFileName, ImreadModes.Color).GetUMat(AccessType.Read)) { SIFT sift = new SIFT(); UMat modelDescriptors = new UMat(); sift.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false); UMat observedDescriptors = new UMat(); sift.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false); BFMatcher matcher = new BFMatcher(DistanceType.L2); matcher.Add(modelDescriptors); matcher.KnnMatch(observedDescriptors, matches, k, null); mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); } } }
public static Mat Draw(Mat modelImage, Mat observedImage) { var sift = new SIFT(); var modelKeyPoints = new VectorOfKeyPoint(); var observedKeyPoints = new VectorOfKeyPoint(); UMat modelDescriptors = new UMat(); UMat observedDescriptors = new UMat(); sift.DetectAndCompute(modelImage, null, modelKeyPoints, modelDescriptors, false); sift.DetectAndCompute(observedImage, null, observedKeyPoints, observedDescriptors, false); BFMatcher matcher = new BFMatcher(DistanceType.L2); matcher.Add(modelDescriptors); var matches = new VectorOfVectorOfDMatch(); matcher.KnnMatch(observedDescriptors, matches, 2, null); var mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, 0.8, mask); Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20); var homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 10); var result = new Mat(); Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints, matches, result, new MCvScalar(255, 255, 255), new MCvScalar(0, 0, 0), mask, Features2DToolbox.KeypointDrawType.NotDrawSinglePoints); Rectangle rect = new Rectangle(Point.Empty, modelImage.Size); PointF[] pts = { new PointF(rect.Left, rect.Bottom), new PointF(rect.Right, rect.Bottom), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Top) }; pts = CvInvoke.PerspectiveTransform(pts, homography); Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round); using (VectorOfPoint vp = new VectorOfPoint(points)) { CvInvoke.Polylines(result, vp, true, new MCvScalar(0, 255, 0, 55), 2); } return(result); }
public static List <System.Drawing.Point> func(Bitmap bitmap1, Bitmap bitmap2) { //Mat img1 = new Mat(@"roll/0.png", ImreadModes.Unchanged); //Mat img2 = new Mat(@"roll/1.png", ImreadModes.Unchanged); Mat img1 = BitmapToMat(bitmap1); Mat img2 = BitmapToMat(bitmap2); SIFT sift = SIFT.Create(20); //KeyPoint[] k = sift.Detect(img1); // Detect the keypoints and generate their descriptors using SIFT KeyPoint[] keypoints1, keypoints2; var descriptors1 = new Mat <float>(); var descriptors2 = new Mat <float>(); sift.DetectAndCompute(img1, null, out keypoints1, descriptors1); sift.DetectAndCompute(img2, null, out keypoints2, descriptors2); // Match descriptor vectors var bfMatcher = new BFMatcher(NormTypes.L2, false); var flannMatcher = new FlannBasedMatcher(); DMatch[] bfMatches = bfMatcher.Match(descriptors1, descriptors2); DMatch[] flannMatches = flannMatcher.Match(descriptors1, descriptors2); // Draw matches var bfView = new Mat(); Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, bfMatches, bfView); var flannView = new Mat(); Cv2.DrawMatches(img1, keypoints1, img2, keypoints2, flannMatches, flannView); using (new Window("SIFT matching (by BFMather)", bfView)) using (new Window("SIFT matching (by FlannBasedMatcher)", flannView)) { Cv2.WaitKey(); } List <System.Drawing.Point> points = new List <System.Drawing.Point>(); foreach (DMatch match in bfMatches) { System.Drawing.Point p = new System.Drawing.Point(); p.X = (int)(keypoints1[match.QueryIdx].Pt.X - keypoints2[match.TrainIdx].Pt.X); p.Y = (int)(keypoints1[match.QueryIdx].Pt.Y - keypoints2[match.TrainIdx].Pt.Y); points.Add(p); } return(points); }
public static List <CriteriaImageModel> CreateCriteriaArrays(FileInfo[] criteriaFiles) { var criteriaImages = new List <CriteriaImageModel>(); foreach (var o in criteriaFiles) { using (var image = CvInvoke.Imread(o.FullName, ImreadModes.Grayscale)) { var mdlImage = new Mat(); CvInvoke.Threshold(image, mdlImage, 127.0, 255.0, ThresholdType.BinaryInv); var uModelImage = mdlImage.GetUMat(AccessType.Read); var modelDescriptors = new Mat(); var modelKeyPoints = new VectorOfKeyPoint(); using (var featureDetector = new SIFT(0, 3, 0.04, 10.0, 1.6)) { featureDetector.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false); } criteriaImages.Add(new CriteriaImageModel { Info = o, Image = uModelImage, ModelDescriptors = modelDescriptors, ModelKeyPoints = modelKeyPoints }); } } return(criteriaImages); }
static void test_match() { Bitmap b1 = new Bitmap(@"C:\test\test_1\temp_menu.jpg"); Bitmap sl = new Bitmap(@"C:\test\scroll_left.jpg"); Image <Gray, Byte> slicon = new Image <Gray, byte>(sl); slicon = slicon.Not(); slicon.Save("temp_1.jpg"); Image <Gray, Byte> test = new Image <Gray, Byte>(b1); //long l; //Mat r = DrawMatches.Draw(slicon.Mat, test.Mat, out l); //r.Save("temp_2.jpg"); //SURF surfCPU = new SURF(400); //Brisk surfCPU = new Brisk(); SIFT surfCPU = new SIFT(); VectorOfKeyPoint modelKeyPoints = new VectorOfKeyPoint(); VectorOfKeyPoint observedKeyPoints = new VectorOfKeyPoint(); UMat modelDescriptors = new UMat(); UMat observedDescriptors = new UMat(); surfCPU.DetectAndCompute(slicon, null, modelKeyPoints, modelDescriptors, false); surfCPU.DetectAndCompute(test, null, observedKeyPoints, observedDescriptors, false); var indices = new Matrix <int>(observedDescriptors.Rows, 2); var dists = new Matrix <float>(observedDescriptors.Rows, 2); var flannIndex = new Index(modelDescriptors, new KMeansIndexParams()); flannIndex.KnnSearch(observedDescriptors, indices, dists, 2); for (int i = 0; i < indices.Rows; i++) { if (dists.Data[i, 0] < (0.6 * dists.Data[i, 1])) { int idx1 = indices[i, 0]; int idx2 = indices[i, 1]; Program.logIt(string.Format("{0}-{1}", indices[i, 0], indices[i, 1])); MKeyPoint p1 = modelKeyPoints[idx1]; MKeyPoint p2 = observedKeyPoints[idx2]; Program.logIt(string.Format("{0}-{1}", p1.Point, p2.Point)); } } }
public void ShowKeyPoints() { lstMat.Clear(); lstModelDescriptors.Clear(); var featureDetector = new SIFT(); Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams(); Emgu.CV.Flann.SearchParams sp = new SearchParams(); DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp); Rectangle cropRect = new Rectangle(842, 646, 70, 70); Mat mask = new Mat(new Size(70, 70), DepthType.Cv8U, 1); CvInvoke.Rectangle(mask, new Rectangle(0, 0, 70, 70), new MCvScalar(255, 255, 255), -1); CvInvoke.Circle(mask, new Point(35, 37), 22, new MCvScalar(0, 0, 0), -1); lstMat.Add(mask); String[] folders = { @"Linage2\Main\PartyAuto", @"Linage2\Main\PartyManual" }; foreach (String folder in folders) { DirectoryInfo imageFolder = new DirectoryInfo(folder); FileInfo[] files = Utils.GetFilesByExtensions(imageFolder, ".jpg", ".png").ToArray(); foreach (FileInfo finfo in files) { Mat img = CvInvoke.Imread(finfo.FullName, ImreadModes.Color); Mat crop = CVUtil.crop_color_frame(img, cropRect); //lstMat.Add(crop); VectorOfKeyPoint modelKeyPoints = new VectorOfKeyPoint(); Mat modelDescriptors = new Mat(); featureDetector.DetectAndCompute(crop, mask, modelKeyPoints, modelDescriptors, false); lstModelDescriptors.Add(modelDescriptors); Mat result = new Mat(); Features2DToolbox.DrawKeypoints(crop, modelKeyPoints, result, new Bgr(Color.Red)); lstMat.Add(result); //BOWImgDescriptorExtractor bow = new BOWImgDescriptorExtractor(featureDetector, matcher); } } /*BOWKMeansTrainer bowtrainer = new BOWKMeansTrainer(1000, new MCvTermCriteria(10, 0.001), 1, Emgu.CV.CvEnum.KMeansInitType.PPCenters); * foreach (Mat m in lstModelDescriptors) { * bowtrainer.Add(m); * } * Mat dict = new Mat(); * bowtrainer.Cluster(); * StringBuilder sb = new StringBuilder(); * Image<Bgr, Byte> imgsave = dict.ToImage<Bgr, Byte>(); * * (new XmlSerializer(typeof(Image<Bgr, Byte>))).Serialize(new StringWriter(sb), imgsave); * Console.WriteLine(sb.ToString());*/ }
private void FillImageSet(List <ImageData> set, string prefix) { UtilityHelper.refreshDirectory(prefix); if (dialog.ShowDialog() == DialogResult.OK) { var files = Directory.GetFiles(dialog.SelectedPath, "*.dcm"); foreach (var file in files) { var ds = new DicomImage(file); var dsBones = new DicomImage(file) { WindowWidth = 100, WindowCenter = 500 }; var image = ds.RenderImage().AsBitmap(); var imageBones = dsBones.RenderImage().AsBitmap(); string newName = prefix + "/" + Path.GetFileName(file).Replace(".dcm", ".jpg"); string newBonesName = prefix + "/" + Path.GetFileName(file).Replace(".dcm", "_bones.jpg"); image.Save(newName); imageBones.Save(newBonesName); Feature2D s; switch (algorithm) { case Algo.ORB: s = new ORBDetector(); break; case Algo.SURF: s = new SURF(0.8); break; default: s = new SIFT(); break; } Mat mat = CvInvoke.Imread(newBonesName, ImreadModes.Grayscale); Mat matOrig = CvInvoke.Imread(newName, ImreadModes.Unchanged); var vec = new VectorOfKeyPoint(); Mat modelDescriptors = new Mat(); s.DetectAndCompute(mat, null, vec, modelDescriptors, false); ImageData id = new ImageData(matOrig, mat) { KeyPoints = vec, Descriptors = modelDescriptors }; set.Add(id); } } }
public KeyPoints SIFTDescriptor() { KeyPoints result = new KeyPoints(); //SiFT Descriptor SIFT siftAlgo = null; VectorOfKeyPoint modelKeyPointsSift = null; try { siftAlgo = new SIFT(); modelKeyPointsSift = new VectorOfKeyPoint(); MKeyPoint[] siftPoints = siftAlgo.Detect(preProcessedImageInGrayScale); modelKeyPointsSift.Push(siftPoints); UMat siftDescriptors = new UMat(); siftAlgo.DetectAndCompute(preProcessedImageInGrayScale, null, modelKeyPointsSift, siftDescriptors, true); Image <Gray, Byte> outputImage = new Image <Gray, byte>( preProcessedImageInGrayScale.Width, preProcessedImageInGrayScale.Height); Features2DToolbox.DrawKeypoints( preProcessedImageInGrayScale, modelKeyPointsSift, outputImage, new Bgr(255, 255, 255), Features2DToolbox.KeypointDrawType.Default); string folderName = @"C:\Projects\LeafService\SiftImage"; string pathString = System.IO.Path.Combine(folderName, "Sift" + DateTime.UtcNow.Ticks); System.IO.Directory.CreateDirectory(pathString); if (Directory.Exists(pathString)) { string newFilePath = Path.Combine(pathString, "SiftImage" + DateTime.UtcNow.Ticks); outputImage.Save(folderName + ".jpg"); outputImage.Save(@"C:\Projects\LeafService\SIFTgray.jpg"); } //outputImage.Save("sift.jpg"); result.Descriptor = siftDescriptors; result.Points = siftPoints; return(result); } finally { siftAlgo.Dispose(); modelKeyPointsSift.Dispose(); } }
static void Main(string[] args) { VectorOfKeyPoint modelKeyPoints, observedKeyPoints = new VectorOfKeyPoint(); Mat img0 = new Mat(@"D:\Desktop\SI4\testImg0.jpg"); Mat img1 = new Mat(@"D:\Desktop\SI4\testImg1.jpg"); VectorOfVectorOfDMatch vectorOfDMatch = new VectorOfVectorOfDMatch(); FastDetector fd = new FastDetector(); MKeyPoint[] points0 = fd.Detect(img0); MKeyPoint[] points1 = fd.Detect(img1); Console.WriteLine(points0.Length); Console.WriteLine(points1.Length); MKeyPoint[] points2 = fd.Detect(img1, img0); Console.WriteLine(points2.Length); SIFT sift = new SIFT(); GpuMat outputArray = new GpuMat(); sift.DetectAndCompute(img0, img1, new VectorOfKeyPoint(points2), outputArray, false); }
public void FindMatches(Image <Rgb, byte> SubMap, out VectorOfKeyPoint VectorSubMapKeyPoint, out Mat SubMapDiscriptors, out VectorOfVectorOfDMatch matches, out Mat mask, out System.Drawing.Rectangle zone, out Mat homography, int k, double uniquenessThreshold, SIFTParametrs parametrs) { VectorSubMapKeyPoint = new VectorOfKeyPoint(); SubMapDiscriptors = new Mat(); matches = new VectorOfVectorOfDMatch(); zone = new System.Drawing.Rectangle(); using (SIFT siftCPU = new SIFT(parametrs.nFeatures, parametrs.nOctaveLayers, parametrs.contrastThreshold, parametrs.edgeThreshold, parametrs.sigma)) { siftCPU.DetectAndCompute(SubMap, null, VectorSubMapKeyPoint, SubMapDiscriptors, false); } matches = new VectorOfVectorOfDMatch(); using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams()) using (Emgu.CV.Flann.SearchParams sp = new SearchParams()) using (Emgu.CV.Features2D.DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp)) { matcher.Add(SubMapDiscriptors); matcher.KnnMatch(MapDiscriptors, matches, k, null); } mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); homography = null; int nonZeroCount = CvInvoke.CountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(VectorSubMapKeyPoint, VectorMapKeyPoint, matches, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures( VectorSubMapKeyPoint, VectorMapKeyPoint, matches, mask, 2); } } }
public void DrawSIFTDescriptor(string inputFile, string outputFile) { //SiFT Descriptor SIFT siftAlgo = null; VectorOfKeyPoint modelKeyPointsSift = null; try { siftAlgo = new SIFT(); modelKeyPointsSift = new VectorOfKeyPoint(); using (Image <Bgr, byte> inputImage = new Image <Bgr, byte>(inputFile)) { MKeyPoint[] siftPoints = siftAlgo.Detect(inputImage); modelKeyPointsSift.Push(siftPoints); UMat siftDescriptors = new UMat(); siftAlgo.DetectAndCompute(inputImage, null, modelKeyPointsSift, siftDescriptors, true); using (Image <Gray, Byte> outputImage = new Image <Gray, byte>( inputImage.Width, inputImage.Height)) { Features2DToolbox.DrawKeypoints( inputImage, modelKeyPointsSift, outputImage, new Bgr(255, 255, 255), Features2DToolbox.KeypointDrawType.Default); outputImage.Save(outputFile); } } } finally { siftAlgo.Dispose(); modelKeyPointsSift.Dispose(); } }
private void testToolStripMenuItem_Click(object sender, EventArgs e) { lstMat.Clear(); VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch(); Mat testImage = CvInvoke.Imread(@"Linage2\Main\PartyAuto\2e35av2fwbk.png", ImreadModes.Color); Mat modelImage = CVUtil.crop_color_frame(testImage, new Rectangle(842, 646, 70, 70)); log(modelImage.ToString()); Image <Bgr, Byte> img = modelImage.ToImage <Bgr, Byte>(); CvInvoke.cvSetImageROI(img, new Rectangle(0, 0, 35, 35)); //UMat uModelImage = modelImage.GetUMat(AccessType.Read); var featureDetector = new SIFT(); Mat modelDescriptors = new Mat(); VectorOfKeyPoint modelKeyPoints = new VectorOfKeyPoint(); VectorOfKeyPoint observedKeyPoints = new VectorOfKeyPoint(); featureDetector.DetectAndCompute(modelImage, null, modelKeyPoints, modelDescriptors, false); log("model size = " + modelKeyPoints.Size); Mat observedDescriptors = new Mat(); featureDetector.DetectAndCompute(testImage, null, observedKeyPoints, observedDescriptors, false); int k = 2; double uniquenessThreshold = 0.80; Mat mask; Mat homography = null; // Bruteforce, slower but more accurate // You can use KDTree for faster matching with slight loss in accuracy using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams()) using (Emgu.CV.Flann.SearchParams sp = new SearchParams()) using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp)) { matcher.Add(modelDescriptors); matcher.KnnMatch(observedDescriptors, matches, k, null); mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); int nonZeroCount = CvInvoke.CountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2); PointF[] src = { new PointF(0, 0), new PointF(0, modelImage.Height - 1), new PointF(modelImage.Width - 1, modelImage.Height - 1), new PointF(modelImage.Width - 1, 0) }; PointF[] points = CvInvoke.PerspectiveTransform(src, homography); foreach (var p in points) { Console.WriteLine(p.ToString()); } Point[] ap = Array.ConvertAll(points, new Converter <PointF, Point>(CVUtil.PointFToPoint)); CvInvoke.Polylines(testImage, ap, true, new MCvScalar(255, 0, 0)); CvInvoke.Rectangle(testImage, new Rectangle(0, 0, 100, 100), new MCvScalar(255, 255, 0)); CvInvoke.Circle(testImage, new Point(100, 100), 50, new MCvScalar(255, 255, 0), -1); lstMat.Add(testImage); } //Mat modelMatches = new Mat(); //Features2DToolbox.DrawKeypoints(modelImage, modelKeyPoints, result, new Bgr(Color.Red)); //Features2DToolbox.DrawKeypoints(testImage, observedKeyPoints, result, new Bgr(Color.Red)); //Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, testImage, observedKeyPoints, matches, modelMatches, // new MCvScalar(255, 0, 0), new MCvScalar(0, 255, 0)); //lstMat.Add(modelMatches); //Mat model1 = new Mat(); //Features2DToolbox.DrawKeypoints(modelImage, modelKeyPoints, model1, new Bgr(Color.Red)); //lstMat.Add(model1); //modelMatches = crop_color_frame(testImage,new Rectangle(842,646,70,70)); } } log("Done " + mask.Size); Refresh(); }
public FeatureResult GetFeature(Bitmap bmpSrc) { Mat matTest = CVUtil.BitmapToMat(bmpSrc); matTest = ProcessImage(matTest); /*Bitmap bmp = Utils.cropImage(bmpSrc, cropRect); * Mat matTest = CVUtil.BitmapToMat(bmp); * if (colorIndex != -1) * { * matTest = matTest.Split()[colorIndex]; * }*/ Mat observedDescriptors = new Mat(); VectorOfKeyPoint observedKeyPoints = new VectorOfKeyPoint(); featureDetector.DetectAndCompute(matTest, mask, observedKeyPoints, observedDescriptors, false); int k = 2; double uniquenessThreshold = 0.80; //Mat homography = null; // Bruteforce, slower but more accurate // You can use KDTree for faster matching with slight loss in accuracy using (Emgu.CV.Flann.KdTreeIndexParams ip = new Emgu.CV.Flann.KdTreeIndexParams()) using (Emgu.CV.Flann.SearchParams sp = new SearchParams()) using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp)) { VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch(); foreach (SimpleFeatureData sd in this) { matcher.Add(sd.descriptors); //break; } matcher.KnnMatch(observedDescriptors, matches, k, null); lastMatches = matches; lastObserved = matTest; lastObservedKeyPoint = observedKeyPoints; //Mat mat = new Mat(); //Features2DToolbox.DrawKeypoints(matTest, observedKeyPoints, mat, new Bgr(Color.Blue)); //FormOpenCV.lstMat.Add(mat); //Console.WriteLine(CVUtil.ToString(observedDescriptors)); //Console.WriteLine(CVUtil.ToString(observedKeyPoints)); //Console.WriteLine(CVUtil.ToString(matches)); //Console.WriteLine(MatchesToString(matches)); Mat uniqueMask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); uniqueMask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, uniqueMask); int nonZeroCount = CvInvoke.CountNonZero(uniqueMask); if (nonZeroCount > 4) { //Console.WriteLine(CVUtil.ToString(uniqueMask)); String retLabel = GetLabelFromMatches(matches, uniqueMask); SimpleFeatureData mfd = lastMatchFeatureData; try { //int nonZeroCount2 = Features2DToolbox.VoteForSizeAndOrientation(mfd.keyPoints, observedKeyPoints, matches, uniqueMask, 1.5, 20); //Console.WriteLine("nonZeroCount2=" + nonZeroCount2); if (nonZeroCount > 4) { Mat homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(mfd.keyPoints, observedKeyPoints, matches, uniqueMask, 2); /*Console.WriteLine(CVUtil.ToString(homography)); * Rectangle rect = CVUtil.GetRect(mfd.keyPoints); * PointF[] src = { * new PointF(rect.X,rect.Y),new PointF(rect.X,rect.Y + rect.Height-1), * new PointF(rect.X + rect.Width-1,rect.Y + rect.Height-1),new PointF(rect.X + rect.Width-1,rect.Y) * }; * PointF[] points = CvInvoke.PerspectiveTransform(src, homography); * foreach (var p in points) * { * Console.WriteLine(p.ToString()); * } * Point[] ap = Array.ConvertAll(points, * new Converter<PointF, Point>(CVUtil.PointFToPoint)); * Mat testImage = matTest.Clone(); * CvInvoke.Polylines(testImage, ap, true, new MCvScalar(255, 0, 0)); */ //CvInvoke.Rectangle(testImage, new Rectangle(0, 0, 100, 100), new MCvScalar(255, 255, 0)); //CvInvoke.Circle(testImage, new Point(100, 100), 50, new MCvScalar(255, 255, 0), -1); //lstMat.Add(testImage); FeatureResult ret = new FeatureResult(); ret.keyPoint = observedKeyPoints; ret.label = retLabel; ret.homography = homography; ret.matchFeatureData = mfd; return(ret); } }catch (Exception ex) { } } return(null); } }
List <float> ExtractSiftFeatureVector(TaggedImage image, int keyPointCount, SiftSortingMethod sortingMethod, bool doDrawImage) { // use the emgu functions to gather keypoints VectorOfKeyPoint vectorOfKeypoints = new VectorOfKeyPoint(); Mat output = image.GetMat().Clone(); // only needed for drawing sift.DetectAndCompute(image.GetMat(), null, vectorOfKeypoints, output, false); // put it into useful data formats List <MKeyPoint> keyPoints = new List <MKeyPoint>(vectorOfKeypoints.ToArray()); // sort switch (sortingMethod) { case SiftSortingMethod.Response: keyPoints.Sort((p1, p2) => p1.Response < p2.Response ? 1 : (p1.Response == p2.Response ? 0 : -1)); break; case SiftSortingMethod.Size: keyPoints.Sort((p1, p2) => p1.Size < p2.Size ? 1 : (p1.Size == p2.Size ? 0 : -1)); break; case SiftSortingMethod.None: default: break; } // expand/trim while (keyPoints.Count < keyPointCount) { keyPoints.Add(new MKeyPoint()); } if (keyPoints.Count > keyPointCount) { keyPoints.RemoveRange(keyPointCount, keyPoints.Count - keyPointCount); } // visualize if (doDrawImage) { vectorOfKeypoints = new VectorOfKeyPoint(keyPoints.ToArray()); Features2DToolbox.DrawKeypoints(image.GetMat(), vectorOfKeypoints, output, new Bgr(0, 0, 255), Features2DToolbox.KeypointDrawType.DrawRichKeypoints); String win1 = "SIFT"; //The name of the window CvInvoke.NamedWindow(win1); //Create the window using the specific name CvInvoke.Imshow(win1, output); //Show the image CvInvoke.WaitKey(0); //Wait for the key pressing event CvInvoke.DestroyWindow(win1); //Destroy the window if key is pressed } // convert to list List <float> result = new List <float>(5 * keyPointCount); for (int i = 0; i < keyPoints.Count; ++i) { MKeyPoint current = keyPoints[i]; result.Add(current.Point.X / (float)output.Size.Width); result.Add(current.Point.Y / (float)output.Size.Height); result.Add(current.Size); result.Add(current.Angle); result.Add(current.Response); } return(result); }
public Bitmap DrawSift(Image <Rgb, byte> modelimage, Image <Rgb, byte> observedimage) { int k = 2; double uniquenessThreshold = 0.80; VectorOfKeyPoint modelKeyPoints = new VectorOfKeyPoint(), observedKeyPoints = new VectorOfKeyPoint(); Mat modeldiscriptors = new Mat(); Mat observeddiscriptors = new Mat(); //observedKeyPoints = observedKeyPoints.Resize(1.0 / Compression, Inter.Area); using (SIFT siftCPU = new SIFT(0, 5, 0.04, 10.0, 1.6)) { siftCPU.DetectAndCompute(modelimage, null, modelKeyPoints, modeldiscriptors, false); observedKeyPoints = new VectorOfKeyPoint(siftCPU.Detect(observedimage)); siftCPU.Compute(observedimage, observedKeyPoints, observeddiscriptors); } VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch(); using (Emgu.CV.Flann.LinearIndexParams ip = new Emgu.CV.Flann.LinearIndexParams()) using (Emgu.CV.Flann.SearchParams sp = new SearchParams()) using (Emgu.CV.Features2D.DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp)) { matcher.Add(modeldiscriptors); matcher.KnnMatch(observeddiscriptors, matches, k, null); } Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); Mat homography = null; int nonZeroCount = CvInvoke.CountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2); } } observedimage = new Image <Rgb, byte>(DrawZone(observedimage.Mat, observedKeyPoints, matches, mask).Bitmap); //modelKeyPoints.FilterByPixelsMask(new Image<Gray, byte>(mask.Bitmap)); //observedKeyPoints.FilterByPixelsMask(new Image<Gray, byte>(mask.Bitmap)); Mat result = new Mat(); //Draw the matched keypoints Features2DToolbox.DrawMatches(modelimage, modelKeyPoints, observedimage, observedKeyPoints, matches, result, new MCvScalar(0, 255, 0), new MCvScalar(255, 0, 0), mask); if (homography != null) { //draw a rectangle along the projected model SD.Rectangle rect = new SD.Rectangle(SD.Point.Empty, modelimage.Size); PointF[] pts = new PointF[] { new PointF(rect.Left, rect.Bottom), new PointF(rect.Right, rect.Bottom), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Top) }; pts = CvInvoke.PerspectiveTransform(pts, homography); #if NETFX_CORE Point[] points = Extensions.ConvertAll <PointF, Point>(pts, Point.Round); #else SD.Point[] points = Array.ConvertAll <PointF, SD.Point>(pts, SD.Point.Round); #endif using (VectorOfPoint vp = new VectorOfPoint(points)) { CvInvoke.Polylines(result, vp, true, new MCvScalar(0, 0, 255), 2); } } return(result.Bitmap); }
public ConcurrentDictionary <string, MatOfFloat> CalcSiftHashes(IEnumerable <ImageInfo> infos, out Task result, int thumbSize = 100) { Trace.WriteLine("CalcSiftHashes started"); EnablePublishingProgress(); var hashesDict = new ConcurrentDictionary <string, MatOfFloat>(); var tasks = new List <Task>(); foreach (ImageInfo info in infos) { var task = new Task(() => { Thread.CurrentThread.Priority = ThreadPriority.Lowest; //Thread.Sleep(1); //var mem = new MemoryStream(); //// copy to byte array //int stride = ((BitmapImage)info.Image).PixelWidth * 4; //byte[] buffer = new byte[stride * ((BitmapImage)info.Image).PixelHeight]; //((BitmapImage)info.Image).CopyPixels(buffer, stride, 0); //// create bitmap //Bitmap bitmap = new Bitmap(((BitmapImage)info.Image).PixelWidth, ((BitmapImage)info.Image).PixelHeight, PixelFormat.Format32bppArgb); //// lock bitmap data //BitmapData bitmapData = bitmap.LockBits(new Rectangle(0, 0, bitmap.Width, bitmap.Height), ImageLockMode.WriteOnly, bitmap.PixelFormat); //// copy byte array to bitmap data //Marshal.Copy(buffer, 0, bitmapData.Scan0, buffer.Length); //// unlock //bitmap.UnlockBits(bitmapData); //bitmap.Save(mem,ImageFormat.Bmp); //Mat sourceMat = Cv2.ImDecode(mem.GetBuffer(), ImreadModes.Unchanged); //var resizedMat = sourceMat.Resize(new OpenCvSharp.Size(thumbSize, thumbSize), 0, 0, InterpolationFlags.Nearest); //var scale = (double)thumbSize / Max(sourceMat.Width, sourceMat.Height); //var resizedMat = sourceMat.Resize(new OpenCvSharp.Size(0, 0), scale, scale, InterpolationFlags.Nearest); //var grayScaledMat = new Mat(); //Cv2.CvtColor(resizedMat, grayScaledMat, ColorConversionCodes.BGR2GRAY); //var siftPoints = SURF.Create(400); SIFT siftPoints = SIFT.Create(); MatOfFloat descriptors = new MatOfFloat(); //var keypoints = siftPoints.Detect(info.StoredMat).ToArray(); //siftPoints.Compute(info.StoredMat, ref keypoints, descriptors); double scale = Math.Min((float)thumbSize / info.StoredMat.Width, (float)thumbSize / info.StoredMat.Height); Mat resized = info.StoredMat.Resize(new OpenCvSharp.Size(0, 0), scale, scale, InterpolationFlags.Area); siftPoints.DetectAndCompute(resized, null, out KeyPoint[] keypoints, descriptors); resized.Release(); if (!ValidateDescriptor(descriptors)) { descriptors.Release(); descriptors = new MatOfFloat(thumbSize, thumbSize); } hashesDict.TryAdd(info.FilePath, descriptors); //resizedMat?.Dispose(); siftPoints.Dispose(); //grayScaledMat.Dispose(); //resizedMat.Release(); //sourceMat.Release(); UpdateIterationsCount(); }); tasks.Add(task); } SetProgressIterationsScope(tasks); foreach (var task in tasks) { task.Start(); } result = Task.WhenAll(tasks.ToArray()); result.ContinueWith(o => DisiblePublishingProgress()); return(hashesDict); }
static void Main(string[] args) { //Read Images String modelImageLocation = @"C:\EMT\Image\EMT_Lab7\model.png"; String observedImageLocation = @"C:\EMT\Image\EMT_Lab7\observed3.png"; Mat modelImage = CvInvoke.Imread(modelImageLocation, ImreadModes.Grayscale); Mat observedImage = CvInvoke.Imread(observedImageLocation, ImreadModes.Grayscale); //Create a sift variable int threshold = 170; SIFT siftCPU = new SIFT(threshold); //Extract features from the model image //Store Keypoint and Descriptors VectorOfKeyPoint modelKeyPoints = new VectorOfKeyPoint(); Mat modelDescriptors = new Mat(); siftCPU.DetectAndCompute(modelImage, null, modelKeyPoints, modelDescriptors, false); //Extract features from the observed image //Store Keypoint and Descriptors VectorOfKeyPoint observedKeypoints = new VectorOfKeyPoint(); Mat observedDescriptors = new Mat(); siftCPU.DetectAndCompute(observedImage, null, observedKeypoints, observedDescriptors, false); //Create a Brute-Force Matcher to match modelDescriptors to observedDescriptors //using DistanceType - Squared Eucledian distance //Stores results of matching to matches BFMatcher matcher = new BFMatcher(DistanceType.L2); VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch(); int k = 2; // number of nearest neighbours to search matcher.Add(modelDescriptors); matcher.KnnMatch(observedDescriptors, matches, k, null); //Eliminate the matched features whose scale and rotation // do not agree with the majority's scale and rotation //Create a mask to store the matches Mat mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); //Create Uniqueness threshold to set limit to uniqueness //Store results in mask double uniquenessThreshold = 0.8; Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); //Calculate the Homography of the model and observed images Mat homography = null; int nonZeroCount = CvInvoke.CountNonZero(mask); if (nonZeroCount >= 4) { nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeypoints, matches, mask, 1.5, 20); if (nonZeroCount >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeypoints, matches, mask, 2); } } //Re-read the images in color format modelImage = CvInvoke.Imread(modelImageLocation, ImreadModes.AnyColor); observedImage = CvInvoke.Imread(observedImageLocation, ImreadModes.AnyColor); // Draw Match lines between matched points in the model and observed image Mat result = new Mat(); Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeypoints, matches, result, new MCvScalar(255, 255, 255), new MCvScalar(255, 255, 255), mask); //draw a rectangle of the matched object at the observed image if (homography != null) { Rectangle rect = new Rectangle(Point.Empty, modelImage.Size); PointF[] pts = new PointF[] { new PointF(rect.Left, rect.Bottom), new PointF(rect.Right, rect.Bottom), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Top) }; pts = CvInvoke.PerspectiveTransform(pts, homography); Point[] points = Array.ConvertAll <PointF, Point>(pts, Point.Round); using (VectorOfPoint vp = new VectorOfPoint(points)) { CvInvoke.Polylines(result, vp, true, new MCvScalar(255, 0, 0, 255), 5); } } CvInvoke.Imshow("Keypoint Image", result); CvInvoke.WaitKey(0); }
public Mat Stitch() { Mat src1Gray = new Mat(); Mat src2Gray = new Mat(); Cv2.CvtColor(src1Color, src1Gray, ColorConversionCodes.BGR2GRAY); Cv2.CvtColor(src2Color, src2Gray, ColorConversionCodes.BGR2GRAY); // Setting hyperparameters int numBestMatch = 10; // Detect the keypoints and generate their descriptors using SIFT SIFT sift = SIFT.Create(); KeyPoint[] keypoints1, keypoints2; MatOfFloat descriptors1 = new MatOfFloat(); MatOfFloat descriptors2 = new MatOfFloat(); sift.DetectAndCompute(src1Gray, null, out keypoints1, descriptors1); sift.DetectAndCompute(src2Gray, null, out keypoints2, descriptors2); // Matching descriptor vectors with a brute force matcher BFMatcher matcher = new BFMatcher(); DMatch[] matches = matcher.Match(descriptors1, descriptors2); // Sort the match points Comparison <DMatch> DMatchComparison = delegate(DMatch match1, DMatch match2) { if (match1 < match2) { return(-1); } else { return(1); } }; Array.Sort(matches, DMatchComparison); // Get the best n match points int n = Math.Min(numBestMatch, keypoints1.Length); Point2f[] imagePoints1 = new Point2f[n]; Point2f[] imagePoints2 = new Point2f[n]; DMatch[] bestMatches = new DMatch[n]; for (int i = 0; i < n; i++) { imagePoints1[i] = keypoints1[matches[i].QueryIdx].Pt; imagePoints2[i] = keypoints2[matches[i].TrainIdx].Pt; bestMatches[i] = matches[i]; } // visiualize match result Mat matchImg = new Mat(); Cv2.DrawMatches(src1Color, keypoints1, src2Color, keypoints2, bestMatches, matchImg, Scalar.All(-1), Scalar.All(-1), null, DrawMatchesFlags.NotDrawSinglePoints); using (new OpenCvSharp.Window("SIFT matching", WindowMode.AutoSize, matchImg)) { Cv2.WaitKey(); } // Get homographic matrix that represents a transformation. // The size of such matrix is 3x3, which can represents every possible matrix transformation in 2-D plane. Mat h**o = Cv2.FindHomography(InputArray.Create <Point2f>(imagePoints2), InputArray.Create <Point2f>(imagePoints1)); // calculate the transformed location of the second image's conor // use this value to calculate the size of result image Point2f[] transfromedConors = transfromConors(src2Color.Size(), h**o); // make sure the result image is large enough double maxWidth = src1Color.Width; double maxHeight = src1Color.Height; for (int i = 0; i < 4; i++) { if (transfromedConors[i].X > maxWidth) { maxWidth = transfromedConors[i].X; } if (transfromedConors[i].Y > maxHeight) { maxHeight = transfromedConors[i].Y; } } OpenCvSharp.Size resultSize = new OpenCvSharp.Size(maxWidth, maxHeight); // the position that the first image should be copied to in the final result int src1StartPositonY = 0; int src1StartPositonX = 0; // if still some X coordinate is less than 0, do shift operation along x-axis bool shouldShiftX = false; double shiftDistanceX = double.MinValue; for (int i = 0; i < 4; i++) { if (transfromedConors[i].X < 0) { shouldShiftX = true; shiftDistanceX = Math.Max(shiftDistanceX, -transfromedConors[i].X); } } if (shouldShiftX) { /* * matrix for shifting algong x-axis * 1 0 d * 0 1 0 * 0 0 1 */ Mat shiftMatrix = new Mat(3, 3, h**o.Type()); shiftMatrix.Set <double>(0, 0, 1); shiftMatrix.Set <double>(0, 1, 0); shiftMatrix.Set <double>(0, 2, shiftDistanceX); shiftMatrix.Set <double>(1, 0, 0); shiftMatrix.Set <double>(1, 1, 1); shiftMatrix.Set <double>(1, 2, 0); shiftMatrix.Set <double>(2, 0, 0); shiftMatrix.Set <double>(2, 1, 0); shiftMatrix.Set <double>(2, 2, 1); h**o = shiftMatrix * h**o; resultSize.Width = resultSize.Width + (int)shiftDistanceX; src1StartPositonX = (int)shiftDistanceX; } // if still some Y coordinate is less than 0, do shift operation along y-axis bool shouldShiftY = false; double shiftDistanceY = double.MinValue; for (int i = 0; i < 4; i++) { if (transfromedConors[i].Y < 0) { shouldShiftY = true; shiftDistanceY = Math.Max(shiftDistanceY, -transfromedConors[i].Y); } } if (shouldShiftY) { /* * matrix for shifting algong y-axis * 1 0 0 * 0 1 d * 0 0 1 */ Mat shiftMatrix = new Mat(3, 3, h**o.Type()); shiftMatrix.Set <double>(0, 0, 1); shiftMatrix.Set <double>(0, 1, 0); shiftMatrix.Set <double>(0, 2, 0); shiftMatrix.Set <double>(1, 0, 0); shiftMatrix.Set <double>(1, 1, 1); shiftMatrix.Set <double>(1, 2, shiftDistanceY); shiftMatrix.Set <double>(2, 0, 0); shiftMatrix.Set <double>(2, 1, 0); shiftMatrix.Set <double>(2, 2, 1); h**o = shiftMatrix * h**o; resultSize.Height = resultSize.Height + (int)shiftDistanceY; src1StartPositonY = (int)shiftDistanceY; } Mat result = new Mat(); Cv2.WarpPerspective(src2Color, result, h**o, resultSize); src1Color.CopyTo(new Mat(result, new OpenCvSharp.Rect(src1StartPositonX, src1StartPositonY, src1Gray.Cols, src1Gray.Rows))); return(result); }
//public static long Classify(VectorOfKeyPoint modelKeyPoints, Mat modelDescriptors, Mat observedImage, double uniquenessThreshold, int k, int detectionType) public static long Classify(Mat modelDescriptors, Mat observedImage, double uniquenessThreshold, int k, int detectionType) { var score = 0L; using (var matches = new VectorOfVectorOfDMatch()) { Mat mask = null; //Mat homography = null; var observedKeyPoints = new VectorOfKeyPoint(); var obsImage = new Mat(); CvInvoke.Threshold(observedImage, obsImage, 127.0, 255.0, ThresholdType.BinaryInv); using (UMat uObservedImage = obsImage.GetUMat(AccessType.Read)) { switch (detectionType) { default: using (var featureDetector = new SIFT(0, 3, 0.04, 10.0, 1.6)) { var observedDescriptors = new Mat(); featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false); using (var ip = new KdTreeIndexParams()) using (var sp = new SearchParams()) using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp)) { matcher.Add(modelDescriptors); matcher.KnnMatch(observedDescriptors, matches, k, null); mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); score = 0; for (int i = 0; i < matches.Size; i++) { if (mask.GetData(i)[0] == 0) { continue; } foreach (var e in matches[i].ToArray()) { ++score; } } //var nonZeroCount = CvInvoke.CountNonZero(mask); //if (nonZeroCount >= 4) //{ // nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20); // if (nonZeroCount >= 4) // homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2); //} } } break; case 1: using (var featureDetector = new KAZE()) { var observedDescriptors = new Mat(); featureDetector.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false); using (var ip = new KdTreeIndexParams()) using (var sp = new SearchParams()) using (DescriptorMatcher matcher = new FlannBasedMatcher(ip, sp)) { matcher.Add(modelDescriptors); matcher.KnnMatch(observedDescriptors, matches, k, null); mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); score = 0; for (int i = 0; i < matches.Size; i++) { if (mask.GetData(i)[0] == 0) { continue; } foreach (var e in matches[i].ToArray()) { ++score; } } //var nonZeroCount = CvInvoke.CountNonZero(mask); //if (nonZeroCount >= 4) //{ // nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20); // if (nonZeroCount >= 4) // homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2); //} } } break; } } } return(score); }