/// <summary> /// Initializes a new instance of the <see cref="PatternDetector"/> class. /// </summary> /// <param name="detector">Detector.</param> /// <param name="extractor">Extractor.</param> /// <param name="matcher">Matcher.</param> /// <param name="ratioTest">If set to <c>true</c> ratio test.</param> public PatternDetector(ORB detector, ORB extractor, bool ratioTest = false) { if (detector == null) { detector = ORB.Create(); detector.MaxFeatures = 1000; } if (extractor == null) { extractor = ORB.Create(); extractor.MaxFeatures = 1000; } m_detector = detector; m_extractor = extractor; enableRatioTest = ratioTest; enableHomographyRefinement = true; homographyReprojectionThreshold = 3; m_queryKeypoints = new KeyPoint[] { }; m_queryDescriptors = new Mat(); m_grayImg = new Mat(); m_warpedImg = new Mat(); m_roughHomography = new Mat(); m_refinedHomography = new Mat(); warpedKeypoints = new KeyPoint[] { }; }
public void Mathing() { using (var img1 = Image("tsukuba_left.png", ImreadModes.GrayScale)) using (var img2 = Image("tsukuba_right.png", ImreadModes.GrayScale)) using (var orb = ORB.Create(500)) using (var descriptor1 = new Mat()) using (var descriptor2 = new Mat()) { KeyPoint[] keyPoints1, keyPoints2; orb.DetectAndCompute(img1, null, out keyPoints1, descriptor1); orb.DetectAndCompute(img2, null, out keyPoints2, descriptor2); // Flann needs the descriptors to be of type CV_32F Assert.AreEqual(MatType.CV_8UC1, descriptor1.Type()); Assert.AreEqual(MatType.CV_8UC1, descriptor2.Type()); descriptor1.ConvertTo(descriptor1, MatType.CV_32F); descriptor2.ConvertTo(descriptor2, MatType.CV_32F); var matcher = new FlannBasedMatcher(); DMatch[] matches = matcher.Match(descriptor1, descriptor2); /* * using (var view = new Mat()) * using (var window = new Window()) * { * Cv2.DrawMatches(img1, keyPoints1, img2, keyPoints2, matches, view); * window.ShowImage(view); * Cv2.WaitKey(); * }*/ } }
public void MathingWithLshIndexParams() { using var img1 = Image("tsukuba_left.png", ImreadModes.Grayscale); using var img2 = Image("tsukuba_right.png", ImreadModes.Grayscale); using var orb = ORB.Create(500); using var descriptor1 = new Mat(); using var descriptor2 = new Mat(); orb.DetectAndCompute(img1, null, out _, descriptor1); orb.DetectAndCompute(img2, null, out _, descriptor2); using var indexParams = new LshIndexParams(12, 20, 2); Assert.Equal(MatType.CV_8UC1, descriptor1.Type()); Assert.Equal(MatType.CV_8UC1, descriptor2.Type()); // LshIndexParams requires Binary descriptor, so it must NOT convert to CV_32F. //descriptor1.ConvertTo(descriptor1, MatType.CV_32F); //descriptor2.ConvertTo(descriptor2, MatType.CV_32F); using var matcher = new FlannBasedMatcher(indexParams); DMatch[] matches = matcher.Match(descriptor1, descriptor2); Assert.NotEmpty(matches); /* * using (var view = new Mat()) * using (var window = new Window()) * { * Cv2.DrawMatches(img1, keyPoints1, img2, keyPoints2, matches, view); * window.ShowImage(view); * Cv2.WaitKey(); * }*/ }
public KeyPoint[] getKeyPoints(Mat camMat, int nKeyPoints) { orb = ORB.Create(nKeyPoints); KeyPoint[] keyPoints = orb.Detect(camMat); return(keyPoints); }
void OnFast() { Mat image01 = Cv2.ImRead(Application.streamingAssetsPath + "/bryce_01.jpg"); Mat image02 = Cv2.ImRead(Application.streamingAssetsPath + "/bryce_02.jpg"); Mat image1 = new Mat(), image2 = new Mat(); Cv2.CvtColor(image01, image1, ColorConversionCodes.RGB2GRAY); Cv2.CvtColor(image02, image2, ColorConversionCodes.RGB2GRAY); KeyPoint[] keyPoint1 = Cv2.FAST(image1, 50, true); KeyPoint[] keyPoint2 = Cv2.FAST(image2, 50, true); using (Mat descriptor1 = new Mat()) using (Mat descriptor2 = new Mat()) using (var orb = ORB.Create(50)) using (var matcher = new BFMatcher()) { orb.Compute(image1, ref keyPoint1, descriptor1); orb.Compute(image2, ref keyPoint2, descriptor2); Debug.Log(string.Format("keyPoints has {0},{1} items.", keyPoint1.Length, keyPoint2.Length)); Debug.Log(string.Format("descriptor has {0},{1} items.", descriptor1.Rows, descriptor2.Rows)); List <DMatch> goodMatchePoints = new List <DMatch>(); var dm = matcher.KnnMatch(descriptor1, descriptor2, 2); #region matched 175 for (int i = 0; i < dm.Length; i++) { if (dm[i][0].Distance < 0.6 * dm[i][1].Distance) { goodMatchePoints.Add(dm[i][0]); } } #endregion #region matched 90 float minRatio = 1.0f / 1.5f; for (int i = 0; i < dm.Length; i++) { DMatch bestMatch = dm[i][0]; DMatch betterMatch = dm[i][1]; float distanceRatio = bestMatch.Distance / betterMatch.Distance; if (distanceRatio < minRatio) { goodMatchePoints.Add(bestMatch); } } #endregion var dstMat = new Mat(); Debug.Log(string.Format("matchePoints has {0} items", goodMatchePoints.Count)); Cv2.DrawMatches(image01, keyPoint1, image02, keyPoint2, goodMatchePoints, dstMat); t2d = Utils.MatToTexture2D(dstMat); } Sprite dst_sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero); SrcSprite.sprite = dst_sp; }
public void Detect() { // This parameter should introduce same result of http://opencv.jp/wordpress/wp-content/uploads/lenna_SURF-150x150.png using var gray = Image("lenna.png", 0); using var orb = ORB.Create(500); var keyPoints = orb.Detect(gray); Console.WriteLine($"KeyPoint has {keyPoints.Length} items."); }
/// <summary> /// 测试函数 /// </summary> void Detect() { var gray = new Mat(Application.streamingAssetsPath + "/Textures/p1.jpg", ImreadModes.GrayScale); KeyPoint[] keyPoints = null; using (var orb = ORB.Create(500)) { keyPoints = orb.Detect(gray); Debug.Log($"KeyPoint has {keyPoints.Length} items."); } }
static private void CreateORB( Mat imgGray, KeyPoint[] keypoints, out MatOfFloat descriptors) { descriptors = new MatOfFloat(); ORB orb1 = ORB.Create(); orb1.Compute(imgGray, ref keypoints, descriptors); }
void Detect() { var gray = new Mat(Application.streamingAssetsPath + "/bryce_01.jpg", ImreadModes.GrayScale); KeyPoint[] keyPoints = null; using (var orb = ORB.Create(500)) { keyPoints = orb.Detect(gray); Debug.Log(string.Format("KeyPoint has {0} items.", keyPoints.Length)); } }
public void DetectAndCompute() { using (var gray = Image("lenna.png", ImreadModes.Grayscale)) using (var orb = ORB.Create(500)) using (Mat descriptor = new Mat()) { orb.DetectAndCompute(gray, null, out var keyPoints, descriptor); Console.WriteLine($"keyPoints has {keyPoints.Length} items."); Console.WriteLine($"descriptor has {descriptor.Rows} items."); } }
static IEnumerable <DMatch> BFMatch(Mat image1, Mat image2) { Mat dst1 = new Mat(); Mat dst2 = new Mat(); var orb = ORB.Create(); orb.DetectAndCompute(image1, null, out var kp1, dst1); orb.DetectAndCompute(image2, null, out var kp2, dst2); BFMatcher matcher = new BFMatcher(); return(matcher.Match(dst1, dst2)); }
void DetectAndCompute() { var gray = new Mat(Application.streamingAssetsPath + "/bryce_01.jpg", ImreadModes.GrayScale); KeyPoint[] keyPoints = null; using (var orb = ORB.Create(500)) using (Mat descriptor = new Mat()) { orb.DetectAndCompute(gray, new Mat(), out keyPoints, descriptor); Debug.Log(string.Format("keyPoints has {0} items.", keyPoints.Length)); Debug.Log(string.Format("descriptor has {0} items.", descriptor.Rows)); } }
void DetectAndCompute() { var gray = new Mat(Application.streamingAssetsPath + "/Textures/p1.jpg", ImreadModes.GrayScale); KeyPoint[] keyPoints = null; using (var orb = ORB.Create(500)) using (Mat descriptor = new Mat()) { orb.DetectAndCompute(gray, new Mat(), out keyPoints, descriptor); Debug.Log($"keyPoints has {keyPoints.Length} items."); Debug.Log($"descriptor has {descriptor.Rows} items."); } }
public override void RunTest() { using var img1 = new Mat(ImagePath.Match1, ImreadModes.Color); using var img2 = new Mat(ImagePath.Match2, ImreadModes.Color); using var orb = ORB.Create(1000); using var descriptors1 = new Mat(); using var descriptors2 = new Mat(); orb.DetectAndCompute(img1, null, out var keyPoints1, descriptors1); orb.DetectAndCompute(img2, null, out var keyPoints2, descriptors2); using var bf = new BFMatcher(NormTypes.Hamming, crossCheck: true); var matches = bf.Match(descriptors1, descriptors2); var goodMatches = matches .OrderBy(x => x.Distance) .Take(10) .ToArray(); var srcPts = goodMatches.Select(m => keyPoints1[m.QueryIdx].Pt).Select(p => new Point2d(p.X, p.Y)); var dstPts = goodMatches.Select(m => keyPoints2[m.TrainIdx].Pt).Select(p => new Point2d(p.X, p.Y)); using var homography = Cv2.FindHomography(srcPts, dstPts, HomographyMethods.Ransac, 5, null); int h = img1.Height, w = img1.Width; var img2Bounds = new[] { new Point2d(0, 0), new Point2d(0, h - 1), new Point2d(w - 1, h - 1), new Point2d(w - 1, 0), }; var img2BoundsTransformed = Cv2.PerspectiveTransform(img2Bounds, homography); using var view = img2.Clone(); var drawingPoints = img2BoundsTransformed.Select(p => (Point)p).ToArray(); Cv2.Polylines(view, new [] { drawingPoints }, true, Scalar.Red, 3); using (new Window("view", view)) { Cv2.WaitKey(); } }
public PatternDetector(bool ratioTest) { m_detector = ORB.Create(1000); m_extractor = ORB.Create(1000); //BFMatcher bfMatcher = new BFMatcher(NormTypes.Hamming, true); m_matcher = new BFMatcher(NormTypes.Hamming); //m_matcher = DescriptorMatcher.Create("BRUTEFORCE_HAMMING"); enableRatioTest = ratioTest; enableHomographyRefinement = true; homographyReprojectionThreshold = 3; //m_queryKeypoints = new MatOfKeyPoint (); m_queryDescriptors = new Mat(); //m_matches = new MatOfDMatch (); //m_knnMatches = new List<MatOfDMatch> (); m_grayImg = new Mat(); m_warpedImg = new Mat(); m_roughHomography = new Mat(); m_refinedHomography = new Mat(); }
public void Run() { var gray = new Mat(FilePath.Image.Lenna, ImreadModes.GrayScale); var dst = new Mat(FilePath.Image.Lenna, ImreadModes.Color); // ORB var orb = ORB.Create(1000); KeyPoint[] keypoints = orb.Detect(gray); // FREAK FREAK freak = FREAK.Create(); Mat freakDescriptors = new Mat(); freak.Compute(gray, ref keypoints, freakDescriptors); if (keypoints != null) { var color = new Scalar(0, 255, 0); foreach (KeyPoint kpt in keypoints) { float r = kpt.Size / 2; Cv2.Circle(dst, kpt.Pt, (int)r, color); Cv2.Line(dst, new Point2f(kpt.Pt.X + r, kpt.Pt.Y + r), new Point2f(kpt.Pt.X - r, kpt.Pt.Y - r), color); Cv2.Line(dst, new Point2f(kpt.Pt.X - r, kpt.Pt.Y + r), new Point2f(kpt.Pt.X + r, kpt.Pt.Y - r), color); } } using (new Window("FREAK", dst)) { Cv2.WaitKey(); } }
/// <summary> /// Orb特征提取 /// </summary> void OnOrb() { Mat image01 = Cv2.ImRead(Application.streamingAssetsPath + "/Textures/p1.jpg"); Mat image02 = Cv2.ImRead(Application.streamingAssetsPath + "/Textures/p2.jpg"); //灰度图转换 Mat image1 = new Mat(), image2 = new Mat(); Cv2.CvtColor(image01, image1, ColorConversionCodes.RGB2GRAY); Cv2.CvtColor(image02, image2, ColorConversionCodes.RGB2GRAY); KeyPoint[] keyPoint1 = null; KeyPoint[] keyPoint2 = null; using (ORB orb = ORB.Create(500)) using (Mat descriptor1 = new Mat()) using (Mat descriptor2 = new Mat()) using (var matcher = new BFMatcher()) { //特征点提取并计算 orb.DetectAndCompute(image1, new Mat(), out keyPoint1, descriptor1); orb.DetectAndCompute(image2, new Mat(), out keyPoint2, descriptor2); Debug.Log($"keyPoints has {keyPoint1.Length},{keyPoint2.Length} items."); Debug.Log($"descriptor has {descriptor1.Rows},{descriptor2.Rows} items."); //特征点匹配 DMatch[] matchePoints = null; matchePoints = matcher.Match(descriptor1, descriptor2); dstMat = new Mat(); Cv2.DrawMatches(image01, keyPoint1, image02, keyPoint2, matchePoints, dstMat); t2d = Utils.MatToTexture2D(dstMat); } Sprite dst_sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero); m_srcImage.sprite = dst_sp; m_srcImage.preserveAspect = true; }
//-------------------------------------------------------------------------------------------------- static void Main() { Mat srcOri = new Mat("C:/Users/Li&Ao/Desktop/Test/5.JPG", ImreadModes.Grayscale); Mat dstOri = new Mat("C:/Users/Li&Ao/Desktop/Test/6.JPG", ImreadModes.Grayscale); Mat src = new Mat(); Mat dst = new Mat(); RotateAndResize(srcOri, out src, true); RotateAndResize(dstOri, out dst, true); // Step1: Detect the keypoints and generate their descriptors using SURF ORB orb = ORB.Create(); KeyPoint[] kp1, kp2; Mat desc1 = new Mat(); Mat desc2 = new Mat(); orb.DetectAndCompute(src, null, out kp1, desc1); orb.DetectAndCompute(dst, null, out kp2, desc2); // Step2: Matching descriptor vectors with a brute force matcher var bfMatcher = new BFMatcher(); var matches = bfMatcher.KnnMatch(desc1, desc2, k: 2); // Step3: Ratio test for outlier removal var betterKp1 = new List <Point2f>(); var betterKp2 = new List <Point2f>(); var betterMatches = new List <DMatch>(); foreach (DMatch[] items in matches) { if (items[0].Distance < 0.8 * items[1].Distance) { betterKp1.Add(kp1[items[0].QueryIdx].Pt); betterKp2.Add(kp2[items[0].TrainIdx].Pt); betterMatches.Add(items[0]); } } // Step4: RANSAC for outlier removal Point2d Point2fToPoint2d(Point2f pf) => new Point2d(((double)pf.X), ((double)pf.Y)); var betterKp1_tmp = betterKp1.ConvertAll(Point2fToPoint2d); var betterKp2_tmp = betterKp2.ConvertAll(Point2fToPoint2d); var bestTuple = RansacMethod(betterKp1_tmp, betterKp2_tmp, src.Cols, src.Rows); var bestKp1 = bestTuple.Item1; var bestKp2 = bestTuple.Item2; //Step5:draw matches after ransac var plotMatches = new List <DMatch>(); foreach (DMatch[] items in matches) { var p1 = Point2fToPoint2d(kp1[items[0].QueryIdx].Pt); var p2 = Point2fToPoint2d(kp2[items[0].TrainIdx].Pt); if (bestKp1.Contains(p1) && bestKp2.Contains(p2)) { plotMatches.Add(items[0]); } } Mat outImg = new Mat(); Cv2.DrawMatches(src, kp1, dst, kp2, plotMatches, outImg); Cv2.ImShow("outImg", outImg); Cv2.Resize(outImg, outImg, new Size(outImg.Rows / 2, outImg.Cols / 2)); Cv2.ImWrite("C:/Users/Li&Ao/Desktop/Test/output.JPG", outImg); //Calculate R matrix Matrix <double> A = Matrix <double> .Build.Dense(3, bestKp1.Count); Matrix <double> B = Matrix <double> .Build.Dense(3, bestKp2.Count); for (int i = 0; i < bestKp1.Count; i++) { Vector <double> p1 = From2dTo3d(bestKp1[i], src.Cols, src.Rows); Vector <double> p2 = From2dTo3d(bestKp2[i], src.Cols, src.Rows); A.SetColumn(i, p1); B.SetColumn(i, p2); } var R = CalcRotation(A, B); Console.WriteLine("R matrix is:" + R); Cv2.WaitKey(); }
public void CreateAndDispose() { var surf = ORB.Create(400); surf.Dispose(); }
void OnHarris() { Mat image01 = Cv2.ImRead(Application.streamingAssetsPath + "/bryce_01.jpg"); Mat image02 = Cv2.ImRead(Application.streamingAssetsPath + "/bryce_02.jpg"); Mat image1 = new Mat(), image2 = new Mat(); Cv2.CvtColor(image01, image1, ColorConversionCodes.RGB2GRAY); Cv2.CvtColor(image02, image2, ColorConversionCodes.RGB2GRAY); KeyPoint[] keyPoint1 = null, keyPoint2 = null; using (var gFTTDetector = GFTTDetector.Create(500)) using (var orb = ORB.Create(20)) using (Mat descriptor1 = new Mat()) using (Mat descriptor2 = new Mat()) using (var matcher = new BFMatcher(NormTypes.L2)) { keyPoint1 = gFTTDetector.Detect(image1); keyPoint2 = gFTTDetector.Detect(image2); orb.Compute(image1, ref keyPoint1, descriptor1); orb.Compute(image2, ref keyPoint2, descriptor2); List <DMatch> goodMatchePoints = new List <DMatch>(); DMatch[][] dm = matcher.KnnMatch(descriptor1, descriptor2, 2); #region matched 30 //for (int i = 0; i < dm.Length; i++) //{ // if (dm[i][0].Distance < 0.6 * dm[i][1].Distance) // { // goodMatchePoints.Add(dm[i][0]); // } //} #endregion #region matched 48 float minRatio = 1.0f / 1.5f; for (int i = 0; i < dm.Length; i++) { DMatch bestMatch = dm[i][0]; DMatch betterMatch = dm[i][1]; float distanceRatio = bestMatch.Distance / betterMatch.Distance; if (distanceRatio < minRatio) { goodMatchePoints.Add(bestMatch); } } #endregion var dstMat = new Mat(); Debug.Log(string.Format("matchePoints has {0} items", goodMatchePoints.Count)); Cv2.DrawMatches(image01, keyPoint1, image02, keyPoint2, goodMatchePoints, dstMat); t2d = VideoDetectorExample.Utils.MatToTexture2D(dstMat); } Sprite dst_sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero); SrcSprite.sprite = dst_sp; }
/// <summary> /// Orb特征提取 /// </summary> void OnOrb(string path2_) { Debug.Log(path2_); Mat image01 = Cv2.ImRead(Application.streamingAssetsPath + "/Textures/p1.jpg"); Mat image02 = Cv2.ImRead(Application.streamingAssetsPath + path2_); //灰度图转换 Mat image1 = new Mat(), image2 = new Mat(); Cv2.CvtColor(image01, image1, ColorConversionCodes.RGB2GRAY); Cv2.CvtColor(image02, image2, ColorConversionCodes.RGB2GRAY); KeyPoint[] keyPoint1 = null; KeyPoint[] keyPoint2 = null; using (ORB orb = ORB.Create(500)) using (Mat descriptor1 = new Mat()) using (Mat descriptor2 = new Mat()) using (var matcher = new BFMatcher()) { //特征点提取并计算 orb.DetectAndCompute(image1, new Mat(), out keyPoint1, descriptor1); orb.DetectAndCompute(image2, new Mat(), out keyPoint2, descriptor2); Debug.Log("image1 keyPoints: " + keyPoint1.Length + " descriptor: " + descriptor1.Rows); Debug.Log("image2 keyPoints: " + keyPoint2.Length + " descriptor: " + descriptor2.Rows); //特征点匹配 //DMatch[] matchePoint = null; //matchePoint = matcher.Match(descriptor2, descriptor1); //Debug.Log(matchePoint.Length); // Lowe's algorithm,获取优秀匹配点 DMatch[][] matchePoints = null; DMatch[][] matchePointssss = null; // 使用knnMatch最邻近匹配 matchePoints = matcher.KnnMatch(descriptor1, descriptor2, 2); List <DMatch> GoodMatchePoints = new List <DMatch>(); for (int i = 0; i < matchePoints.Length; i++) { float minRatio = 0.9f; float disRation = matchePoints[i][0].Distance / matchePoints[i][1].Distance; if (disRation < minRatio) { GoodMatchePoints.Add(matchePoints[i][0]); } } DMatch[] matchePointss = GoodMatchePoints.ToArray(); Debug.Log("手写文字图总特征点: " + descriptor2.Rows); Debug.Log("匹配符合的文字特征点: " + matchePointss.Length); //float zongVa = (matchePoints.Length / 2); Debug.Log("相识度比例: " + ((float)matchePointss.Length / descriptor2.Rows)); dstMat = new Mat(); Cv2.DrawMatches(image01, keyPoint1, image02, keyPoint2, matchePointss, dstMat); t2d = MatToTexture2D(dstMat); } Sprite dst_sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero); m_srcImage.sprite = dst_sp; m_srcImage.preserveAspect = true; }
public Place(string[] images, int nORBFeatures, float scaleFactor) { m_orbDetector = ORB.Create(nORBFeatures); GridDescriptor = new Dictionary <Tuple <int, int, int>, List <Mat> >(); RepImages = new Dictionary <Tuple <int, int, int>, string>(); m_nORBFeatures = nORBFeatures; // Iterate over all image paths in images foreach (var imagePath in images) { Tuple <int, int, int> currentImagePose = null; // Read the current Image var currentImageMat = Cv2.ImRead(imagePath, ImreadModes.Grayscale); Cv2.Resize(currentImageMat, currentImageMat, new OpenCvSharp.Size(currentImageMat.Width * scaleFactor, currentImageMat.Height * scaleFactor)); var temp = imagePath.Split('\\'); var fileName = temp[temp.Length - 1]; // Tokenize to extract the information from the image string[] tokens = fileName.Split('_'); if (tokens.Length == 5) { int x, y, rotIdx; bool bParseSuccess = true; bParseSuccess = Int32.TryParse(tokens[1], out x); if (bParseSuccess == false) { // Parse Failed.. Console.WriteLine("Check names of images"); } bParseSuccess = Int32.TryParse(tokens[2], out y); if (bParseSuccess == false) { // Parse Failed.. Console.WriteLine("Check names of images"); } bParseSuccess = Int32.TryParse(tokens[3], out rotIdx); if (bParseSuccess == false) { // Parse Failed.. Console.WriteLine("Check names of images"); } // Allocate the coordinate for this image PlaceName = tokens[0]; currentImagePose = new Tuple <int, int, int>(x, y, rotIdx); // Increment the count of images for this place Size++; // If there is no key (currentImageLocation), allocate if (GridDescriptor.ContainsKey(currentImagePose) == false) { GridDescriptor.Add(currentImagePose, new List <Mat>(0)); RepImages.Add(currentImagePose, imagePath); } // Detect and compute using ORB Mat descriptor = new Mat(); KeyPoint[] keypoints; m_orbDetector.DetectAndCompute(currentImageMat, null, out keypoints, descriptor); GridDescriptor[currentImagePose].Add(descriptor); Console.WriteLine("Storing Images.. Place = {0}, (x={1}, y={2}, thetaIdx={3})", PlaceName, currentImagePose.Item1, currentImagePose.Item2, currentImagePose.Item3); } } }