void OnOrb() { Mat image01 = Cv2.ImRead(Application.streamingAssetsPath + "/bryce_01.jpg"); Mat image02 = Cv2.ImRead(Application.streamingAssetsPath + "/bryce_02.jpg"); Mat image1 = new Mat(), image2 = new Mat(); Cv2.CvtColor(image01, image1, ColorConversionCodes.RGB2GRAY); Cv2.CvtColor(image02, image2, ColorConversionCodes.RGB2GRAY); KeyPoint[] keyPoint1 = null; KeyPoint[] keyPoint2 = null; using (ORB orb = ORB.Create(500)) using (Mat descriptor1 = new Mat()) using (Mat descriptor2 = new Mat()) using (var matcher = new BFMatcher()) { orb.DetectAndCompute(image1, new Mat(), out keyPoint1, descriptor1); orb.DetectAndCompute(image2, new Mat(), out keyPoint2, descriptor2); Debug.Log(string.Format("keyPoints has {0},{1} items.", keyPoint1.Length, keyPoint2.Length)); Debug.Log(string.Format("descriptor has {0},{1} items.", descriptor1.Rows, descriptor2.Rows)); List <DMatch> goodMatchePoints = new List <DMatch>(); var dm = matcher.KnnMatch(descriptor1, descriptor2, 2); #region matched 90 float minRatio = 1.0f / 1.5f; for (int i = 0; i < dm.Length; i++) { DMatch bestMatch = dm[i][0]; DMatch betterMatch = dm[i][1]; float distanceRatio = bestMatch.Distance / betterMatch.Distance; if (distanceRatio < minRatio) { goodMatchePoints.Add(bestMatch); } } #endregion var dstMat = new Mat(); Debug.Log(string.Format("matchePoints has {0} items", goodMatchePoints.Count)); Cv2.DrawMatches(image01, keyPoint1, image02, keyPoint2, goodMatchePoints, dstMat); t2d = Utils.MatToTexture2D(dstMat); } Sprite dst_sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero); SrcSprite.sprite = dst_sp; //SrcSprite.preserveAspect = true; }
//public void GetNearestOne(string givenImageFullPath, out Tuple<int, int, int> mostSimilarPose, out float maxSimilarity) //{ // KeyPoint[] keypoints = null; // Mat queryDescriptor = new Mat(); // var queryMat = Cv2.ImRead(givenImageFullPath, OpenCvSharp.ImreadModes.Grayscale); // Cv2.Resize(queryMat, queryMat, new OpenCvSharp.Size(queryMat.Width / 2, queryMat.Height / 2)); // m_orbDetector.DetectAndCompute(queryMat, null, out keypoints, queryDescriptor); // GetNearestOne(queryMat, out mostSimilarPose, out maxSimilarity); //} public void GetNearestOne(Mat givenImageMat, out Tuple <int, int, int> mostSimilarPose, out float maxSimilarity) { KeyPoint[] keypoints = null; Mat queryDescriptor = new Mat(); m_orbDetector.DetectAndCompute(givenImageMat, null, out keypoints, queryDescriptor); Tuple <int, int, int> currentMaxPose = null; float currentMaxVal = 0.0f; // For each pose, find the nearest image! foreach (var eachPose in this.GridDescriptor.Keys) { List <float> similaritiesForEachPose = new List <float>(); var correspondingDescriptorList = GridDescriptor[eachPose]; foreach (var eachDescriptor in correspondingDescriptorList) { CalculateSimilarity(queryDescriptor, eachDescriptor, out List <float> distances); similaritiesForEachPose.Add(distances.Count); } if (currentMaxVal <= similaritiesForEachPose.Max()) { currentMaxPose = eachPose; currentMaxVal = similaritiesForEachPose.Max(); } } mostSimilarPose = currentMaxPose; maxSimilarity = currentMaxVal / m_nORBFeatures; }
/// <summary> /// Orb特征提取 /// </summary> void OnOrb() { Mat image01 = Cv2.ImRead(Application.streamingAssetsPath + "/Textures/p1.jpg"); Mat image02 = Cv2.ImRead(Application.streamingAssetsPath + "/Textures/p2.jpg"); //灰度图转换 Mat image1 = new Mat(), image2 = new Mat(); Cv2.CvtColor(image01, image1, ColorConversionCodes.RGB2GRAY); Cv2.CvtColor(image02, image2, ColorConversionCodes.RGB2GRAY); KeyPoint[] keyPoint1 = null; KeyPoint[] keyPoint2 = null; using (ORB orb = ORB.Create(500)) using (Mat descriptor1 = new Mat()) using (Mat descriptor2 = new Mat()) using (var matcher = new BFMatcher()) { //特征点提取并计算 orb.DetectAndCompute(image1, new Mat(), out keyPoint1, descriptor1); orb.DetectAndCompute(image2, new Mat(), out keyPoint2, descriptor2); Debug.Log($"keyPoints has {keyPoint1.Length},{keyPoint2.Length} items."); Debug.Log($"descriptor has {descriptor1.Rows},{descriptor2.Rows} items."); //特征点匹配 DMatch[] matchePoints = null; matchePoints = matcher.Match(descriptor1, descriptor2); dstMat = new Mat(); Cv2.DrawMatches(image01, keyPoint1, image02, keyPoint2, matchePoints, dstMat); t2d = Utils.MatToTexture2D(dstMat); } Sprite dst_sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero); m_srcImage.sprite = dst_sp; m_srcImage.preserveAspect = true; }
//-------------------------------------------------------------------------------------------------- static void Main() { Mat srcOri = new Mat("C:/Users/Li&Ao/Desktop/Test/5.JPG", ImreadModes.Grayscale); Mat dstOri = new Mat("C:/Users/Li&Ao/Desktop/Test/6.JPG", ImreadModes.Grayscale); Mat src = new Mat(); Mat dst = new Mat(); RotateAndResize(srcOri, out src, true); RotateAndResize(dstOri, out dst, true); // Step1: Detect the keypoints and generate their descriptors using SURF ORB orb = ORB.Create(); KeyPoint[] kp1, kp2; Mat desc1 = new Mat(); Mat desc2 = new Mat(); orb.DetectAndCompute(src, null, out kp1, desc1); orb.DetectAndCompute(dst, null, out kp2, desc2); // Step2: Matching descriptor vectors with a brute force matcher var bfMatcher = new BFMatcher(); var matches = bfMatcher.KnnMatch(desc1, desc2, k: 2); // Step3: Ratio test for outlier removal var betterKp1 = new List <Point2f>(); var betterKp2 = new List <Point2f>(); var betterMatches = new List <DMatch>(); foreach (DMatch[] items in matches) { if (items[0].Distance < 0.8 * items[1].Distance) { betterKp1.Add(kp1[items[0].QueryIdx].Pt); betterKp2.Add(kp2[items[0].TrainIdx].Pt); betterMatches.Add(items[0]); } } // Step4: RANSAC for outlier removal Point2d Point2fToPoint2d(Point2f pf) => new Point2d(((double)pf.X), ((double)pf.Y)); var betterKp1_tmp = betterKp1.ConvertAll(Point2fToPoint2d); var betterKp2_tmp = betterKp2.ConvertAll(Point2fToPoint2d); var bestTuple = RansacMethod(betterKp1_tmp, betterKp2_tmp, src.Cols, src.Rows); var bestKp1 = bestTuple.Item1; var bestKp2 = bestTuple.Item2; //Step5:draw matches after ransac var plotMatches = new List <DMatch>(); foreach (DMatch[] items in matches) { var p1 = Point2fToPoint2d(kp1[items[0].QueryIdx].Pt); var p2 = Point2fToPoint2d(kp2[items[0].TrainIdx].Pt); if (bestKp1.Contains(p1) && bestKp2.Contains(p2)) { plotMatches.Add(items[0]); } } Mat outImg = new Mat(); Cv2.DrawMatches(src, kp1, dst, kp2, plotMatches, outImg); Cv2.ImShow("outImg", outImg); Cv2.Resize(outImg, outImg, new Size(outImg.Rows / 2, outImg.Cols / 2)); Cv2.ImWrite("C:/Users/Li&Ao/Desktop/Test/output.JPG", outImg); //Calculate R matrix Matrix <double> A = Matrix <double> .Build.Dense(3, bestKp1.Count); Matrix <double> B = Matrix <double> .Build.Dense(3, bestKp2.Count); for (int i = 0; i < bestKp1.Count; i++) { Vector <double> p1 = From2dTo3d(bestKp1[i], src.Cols, src.Rows); Vector <double> p2 = From2dTo3d(bestKp2[i], src.Cols, src.Rows); A.SetColumn(i, p1); B.SetColumn(i, p2); } var R = CalcRotation(A, B); Console.WriteLine("R matrix is:" + R); Cv2.WaitKey(); }
public Place(string[] images, int nORBFeatures, float scaleFactor) { m_orbDetector = ORB.Create(nORBFeatures); GridDescriptor = new Dictionary <Tuple <int, int, int>, List <Mat> >(); RepImages = new Dictionary <Tuple <int, int, int>, string>(); m_nORBFeatures = nORBFeatures; // Iterate over all image paths in images foreach (var imagePath in images) { Tuple <int, int, int> currentImagePose = null; // Read the current Image var currentImageMat = Cv2.ImRead(imagePath, ImreadModes.Grayscale); Cv2.Resize(currentImageMat, currentImageMat, new OpenCvSharp.Size(currentImageMat.Width * scaleFactor, currentImageMat.Height * scaleFactor)); var temp = imagePath.Split('\\'); var fileName = temp[temp.Length - 1]; // Tokenize to extract the information from the image string[] tokens = fileName.Split('_'); if (tokens.Length == 5) { int x, y, rotIdx; bool bParseSuccess = true; bParseSuccess = Int32.TryParse(tokens[1], out x); if (bParseSuccess == false) { // Parse Failed.. Console.WriteLine("Check names of images"); } bParseSuccess = Int32.TryParse(tokens[2], out y); if (bParseSuccess == false) { // Parse Failed.. Console.WriteLine("Check names of images"); } bParseSuccess = Int32.TryParse(tokens[3], out rotIdx); if (bParseSuccess == false) { // Parse Failed.. Console.WriteLine("Check names of images"); } // Allocate the coordinate for this image PlaceName = tokens[0]; currentImagePose = new Tuple <int, int, int>(x, y, rotIdx); // Increment the count of images for this place Size++; // If there is no key (currentImageLocation), allocate if (GridDescriptor.ContainsKey(currentImagePose) == false) { GridDescriptor.Add(currentImagePose, new List <Mat>(0)); RepImages.Add(currentImagePose, imagePath); } // Detect and compute using ORB Mat descriptor = new Mat(); KeyPoint[] keypoints; m_orbDetector.DetectAndCompute(currentImageMat, null, out keypoints, descriptor); GridDescriptor[currentImagePose].Add(descriptor); Console.WriteLine("Storing Images.. Place = {0}, (x={1}, y={2}, thetaIdx={3})", PlaceName, currentImagePose.Item1, currentImagePose.Item2, currentImagePose.Item3); } } }
/// <summary> /// Orb特征提取 /// </summary> void OnOrb(string path2_) { Debug.Log(path2_); Mat image01 = Cv2.ImRead(Application.streamingAssetsPath + "/Textures/p1.jpg"); Mat image02 = Cv2.ImRead(Application.streamingAssetsPath + path2_); //灰度图转换 Mat image1 = new Mat(), image2 = new Mat(); Cv2.CvtColor(image01, image1, ColorConversionCodes.RGB2GRAY); Cv2.CvtColor(image02, image2, ColorConversionCodes.RGB2GRAY); KeyPoint[] keyPoint1 = null; KeyPoint[] keyPoint2 = null; using (ORB orb = ORB.Create(500)) using (Mat descriptor1 = new Mat()) using (Mat descriptor2 = new Mat()) using (var matcher = new BFMatcher()) { //特征点提取并计算 orb.DetectAndCompute(image1, new Mat(), out keyPoint1, descriptor1); orb.DetectAndCompute(image2, new Mat(), out keyPoint2, descriptor2); Debug.Log("image1 keyPoints: " + keyPoint1.Length + " descriptor: " + descriptor1.Rows); Debug.Log("image2 keyPoints: " + keyPoint2.Length + " descriptor: " + descriptor2.Rows); //特征点匹配 //DMatch[] matchePoint = null; //matchePoint = matcher.Match(descriptor2, descriptor1); //Debug.Log(matchePoint.Length); // Lowe's algorithm,获取优秀匹配点 DMatch[][] matchePoints = null; DMatch[][] matchePointssss = null; // 使用knnMatch最邻近匹配 matchePoints = matcher.KnnMatch(descriptor1, descriptor2, 2); List <DMatch> GoodMatchePoints = new List <DMatch>(); for (int i = 0; i < matchePoints.Length; i++) { float minRatio = 0.9f; float disRation = matchePoints[i][0].Distance / matchePoints[i][1].Distance; if (disRation < minRatio) { GoodMatchePoints.Add(matchePoints[i][0]); } } DMatch[] matchePointss = GoodMatchePoints.ToArray(); Debug.Log("手写文字图总特征点: " + descriptor2.Rows); Debug.Log("匹配符合的文字特征点: " + matchePointss.Length); //float zongVa = (matchePoints.Length / 2); Debug.Log("相识度比例: " + ((float)matchePointss.Length / descriptor2.Rows)); dstMat = new Mat(); Cv2.DrawMatches(image01, keyPoint1, image02, keyPoint2, matchePointss, dstMat); t2d = MatToTexture2D(dstMat); } Sprite dst_sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero); m_srcImage.sprite = dst_sp; m_srcImage.preserveAspect = true; }