private void Start() { webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>(); webCamTextureToMatHelper.Initialize(); grayMat = new Mat(); makerGrayMat = new Mat(originMakerTexture.height, originMakerTexture.width, CvType.CV_8UC1); makerTexture = new Texture2D(originMakerTexture.width, originMakerTexture.height); Graphics.CopyTexture(originMakerTexture, makerTexture); detector = ORB.create(); extractor = ORB.create(); // Get Key Points of Maker makerMat = new Mat(originMakerTexture.height, originMakerTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(makerTexture, makerMat, false); makerKeyPoints = new MatOfKeyPoint(); makerDescriptors = new Mat(); Imgproc.cvtColor(makerMat, makerGrayMat, Imgproc.COLOR_BGR2GRAY); detector.detect(makerGrayMat, makerKeyPoints); extractor.compute(makerGrayMat, makerKeyPoints, makerDescriptors); matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); }
/// <summary> /// Initializes a new instance of the <see cref="PatternDetector"/> class. /// </summary> /// <param name="detector">Detector.</param> /// <param name="extractor">Extractor.</param> /// <param name="matcher">Matcher.</param> /// <param name="ratioTest">If set to <c>true</c> ratio test.</param> public PatternDetector(ORB detector, ORB extractor, DescriptorMatcher matcher, bool ratioTest = false) { if (detector == null) { detector = ORB.create(); detector.setMaxFeatures(1000); } if (extractor == null) { extractor = ORB.create(); extractor.setMaxFeatures(1000); } if (matcher == null) { matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING); } m_detector = detector; m_extractor = extractor; m_matcher = matcher; enableRatioTest = ratioTest; enableHomographyRefinement = true; homographyReprojectionThreshold = 3; m_queryKeypoints = new MatOfKeyPoint(); m_queryDescriptors = new Mat(); m_matches = new MatOfDMatch(); m_knnMatches = new List <MatOfDMatch>(); m_grayImg = new Mat(); m_warpedImg = new Mat(); m_roughHomography = new Mat(); m_refinedHomography = new Mat(); }
// Use this for initialization void Start() { Texture2D imgTexture = Resources.Load("lena") as Texture2D; Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, img1Mat); Debug.Log("img1Mat dst ToString " + img1Mat.ToString()); Mat img2Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, img2Mat); Debug.Log("img2Mat dst ToString " + img2Mat.ToString()); float angle = UnityEngine.Random.Range(0, 360), scale = 1.0f; Point center = new Point(img2Mat.cols() * 0.5f, img2Mat.rows() * 0.5f); Mat affine_matrix = Imgproc.getRotationMatrix2D(center, angle, scale); Imgproc.warpAffine(img1Mat, img2Mat, affine_matrix, img2Mat.size()); FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB); DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB); MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); Mat descriptors1 = new Mat(); detector.detect(img1Mat, keypoints1); extractor.compute(img1Mat, keypoints1, descriptors1); MatOfKeyPoint keypoints2 = new MatOfKeyPoint(); Mat descriptors2 = new Mat(); detector.detect(img2Mat, keypoints2); extractor.compute(img2Mat, keypoints2, descriptors2); DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); MatOfDMatch matches = new MatOfDMatch(); matcher.match(descriptors1, descriptors2, matches); Mat resultImg = new Mat(); Features2d.drawMatches(img1Mat, keypoints1, img2Mat, keypoints2, matches, resultImg); Texture2D texture = new Texture2D(resultImg.cols(), resultImg.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(resultImg, texture); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; }
public bool descriptorsORB_Old(Mat RGB, Mat cameraFeed, string targetName)//找出特徵的顏色方法三(可運行但效率不佳放棄) { if (RGB == null) { Debug.Log("RGB Mat is Null"); return(false); } //將傳入的RGB存入Src Mat SrcMat = new Mat(); RGB.copyTo(SrcMat); //比對樣本 Texture2D imgTexture = Resources.Load(targetName) as Texture2D; // Texture2D imgTexture2 = Resources.Load("lenaK") as Texture2D; //Texture2D轉Mat Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, img1Mat); //創建 ORB的特徵點裝置 FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB); DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB); //產生存放特徵點Mat MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); Mat descriptors1 = new Mat(); MatOfKeyPoint keypointsSrc = new MatOfKeyPoint(); Mat descriptorsSrc = new Mat(); //找特徵點圖1 detector.detect(img1Mat, keypoints1); extractor.compute(img1Mat, keypoints1, descriptors1); //找特徵點圖Src detector.detect(SrcMat, keypointsSrc); extractor.compute(SrcMat, keypointsSrc, descriptorsSrc); DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); MatOfDMatch matches = new MatOfDMatch(); matcher.match(descriptors1, descriptorsSrc, matches); DMatch[] arrayDmatch = matches.toArray(); for (int i = arrayDmatch.Length - 1; i >= 0; i--) { // Debug.Log("match " + i + ": " + arrayDmatch[i].distance); } //做篩選 double max_dist = 0; double min_dist = 100; //-- Quick calculation of max and min distances between keypoints double dist = new double(); for (int i = 0; i < matches.rows(); i++) { dist = arrayDmatch[i].distance; if (dist < min_dist) { min_dist = dist; } if (dist > max_dist) { max_dist = dist; } } Debug.Log("Max dist :" + max_dist); Debug.Log("Min dist :" + min_dist); //只畫好的點 List <DMatch> matchesGoodList = new List <DMatch>(); for (int i = 0; i < matches.rows(); i++) { //if (arrayDmatch[i].distance < RateDist.value * min_dist) //{ // //Debug.Log("match " + i + ": " + arrayDmatch[i].distance); // matchesGoodList.Add(arrayDmatch[i]); //} } MatOfDMatch matchesGood = new MatOfDMatch(); matchesGood.fromList(matchesGoodList); //Draw Keypoints Features2d.drawKeypoints(SrcMat, keypointsSrc, SrcMat); //做輸出的轉換予宣告 Mat resultImg = new Mat(); // Features2d.drawMatches(img1Mat, keypoints1, SrcMat, keypointsSrc, matchesGood, resultImg); List <Point> P1 = new List <Point>(); // List<Point> P2 = new List<Point>(); List <Point> pSrc = new List <Point>(); Debug.Log("MatchCount" + matchesGoodList.Count); for (int i = 0; i < matchesGoodList.Count; i++) { P1.Add(new Point(keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.x, keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.y)); pSrc.Add(new Point(keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.x, keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.y)); //Debug.Log("ID = " + matchesGoodList[i].queryIdx ); //Debug.Log("x,y =" + (int)keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.x + "," + (int)keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.y); //Debug.Log("x,y =" + (int)keypoints2.toArray()[matchesGoodList[i].trainIdx].pt.x + "," + (int)keypoints2.toArray()[matchesGoodList[i].trainIdx].pt.y); } MatOfPoint2f p2fTarget = new MatOfPoint2f(P1.ToArray()); MatOfPoint2f p2fSrc = new MatOfPoint2f(pSrc.ToArray()); Mat matrixH = Calib3d.findHomography(p2fTarget, p2fSrc, Calib3d.RANSAC, 3); List <Point> srcPointCorners = new List <Point>(); srcPointCorners.Add(new Point(0, 0)); srcPointCorners.Add(new Point(img1Mat.width(), 0)); srcPointCorners.Add(new Point(img1Mat.width(), img1Mat.height())); srcPointCorners.Add(new Point(0, img1Mat.height())); Mat originalRect = Converters.vector_Point2f_to_Mat(srcPointCorners); List <Point> srcPointCornersEnd = new List <Point>(); srcPointCornersEnd.Add(new Point(0, img1Mat.height())); srcPointCornersEnd.Add(new Point(0, 0)); srcPointCornersEnd.Add(new Point(img1Mat.width(), 0)); srcPointCornersEnd.Add(new Point(img1Mat.width(), img1Mat.height())); Mat changeRect = Converters.vector_Point2f_to_Mat(srcPointCornersEnd); Core.perspectiveTransform(originalRect, changeRect, matrixH); List <Point> srcPointCornersSave = new List <Point>(); Converters.Mat_to_vector_Point(changeRect, srcPointCornersSave); if ((srcPointCornersSave[2].x - srcPointCornersSave[0].x) < 5 || (srcPointCornersSave[2].y - srcPointCornersSave[0].y) < 5) { Debug.Log("Match Out Put image is to small"); SrcMat.copyTo(cameraFeed); SrcMat.release(); Imgproc.putText(cameraFeed, "X-S", new Point(10, 50), 0, 1, new Scalar(255, 255, 255), 2); return(false); } // Features2d.drawMatches(img1Mat, keypoints1, SrcMat, keypointsSrc, matchesGood, resultImg); Imgproc.line(SrcMat, srcPointCornersSave[0], srcPointCornersSave[1], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[1], srcPointCornersSave[2], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[2], srcPointCornersSave[3], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[3], srcPointCornersSave[0], new Scalar(255, 0, 0), 3); SrcMat.copyTo(cameraFeed); keypoints1.release(); img1Mat.release(); SrcMat.release(); return(true); }
//============================================================ //=================以下為沒有再使用的函式===================== //============================================================ //找出特徵的顏色方法三(ORB特徵點比對) public bool descriptorsORB(Mat RGB, Mat cameraFeed, string targetName) { if (RGB == null) { Debug.Log("RGB Mat is Null"); return(false); } //將傳入的RGB存入Src Mat SrcMat = new Mat(); RGB.copyTo(SrcMat); //比對樣本載入 Texture2D imgTexture = Resources.Load(targetName) as Texture2D; //Texture2D轉Mat Mat targetMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, targetMat); //創建 ORB的特徵點裝置 FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB); DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB); //產生存放特徵點Mat MatOfKeyPoint keypointsTarget = new MatOfKeyPoint(); Mat descriptorsTarget = new Mat(); MatOfKeyPoint keypointsSrc = new MatOfKeyPoint(); Mat descriptorsSrc = new Mat(); //找特徵點圖Target detector.detect(targetMat, keypointsTarget); extractor.compute(targetMat, keypointsTarget, descriptorsTarget); //找特徵點圖Src detector.detect(SrcMat, keypointsSrc); extractor.compute(SrcMat, keypointsSrc, descriptorsSrc); //創建特徵點比對物件 DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); MatOfDMatch matches = new MatOfDMatch(); //丟入兩影像的特徵點 matcher.match(descriptorsTarget, descriptorsSrc, matches); DMatch[] arrayDmatch = matches.toArray(); //做篩選 double max_dist = 0; double min_dist = 100; //-- Quick calculation of max and min distances between keypoints double dist = new double(); for (int i = 0; i < matches.rows(); i++) { dist = arrayDmatch[i].distance; if (dist < min_dist) { min_dist = dist; } if (dist > max_dist) { max_dist = dist; } } Debug.Log("Max dist :" + max_dist); Debug.Log("Min dist :" + min_dist); List <DMatch> matchesGoodList = new List <DMatch>(); MatOfDMatch matchesGood = new MatOfDMatch(); matchesGood.fromList(matchesGoodList); //Draw Keypoints Features2d.drawKeypoints(SrcMat, keypointsSrc, SrcMat); List <Point> pTarget = new List <Point>(); List <Point> pSrc = new List <Point>(); Debug.Log("MatchCount" + matchesGoodList.Count); for (int i = 0; i < matchesGoodList.Count; i++) { pTarget.Add(new Point(keypointsTarget.toArray()[matchesGoodList[i].queryIdx].pt.x, keypointsTarget.toArray()[matchesGoodList[i].queryIdx].pt.y)); pSrc.Add(new Point(keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.x, keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.y)); } MatOfPoint2f p2fTarget = new MatOfPoint2f(pTarget.ToArray()); MatOfPoint2f p2fSrc = new MatOfPoint2f(pSrc.ToArray()); Mat matrixH = Calib3d.findHomography(p2fTarget, p2fSrc, Calib3d.RANSAC, 3); List <Point> srcPointCorners = new List <Point>(); srcPointCorners.Add(new Point(0, 0)); srcPointCorners.Add(new Point(targetMat.width(), 0)); srcPointCorners.Add(new Point(targetMat.width(), targetMat.height())); srcPointCorners.Add(new Point(0, targetMat.height())); Mat originalRect = Converters.vector_Point2f_to_Mat(srcPointCorners); List <Point> srcPointCornersEnd = new List <Point>(); srcPointCornersEnd.Add(new Point(0, targetMat.height())); srcPointCornersEnd.Add(new Point(0, 0)); srcPointCornersEnd.Add(new Point(targetMat.width(), 0)); srcPointCornersEnd.Add(new Point(targetMat.width(), targetMat.height())); Mat changeRect = Converters.vector_Point2f_to_Mat(srcPointCornersEnd); Core.perspectiveTransform(originalRect, changeRect, matrixH); List <Point> srcPointCornersSave = new List <Point>(); Converters.Mat_to_vector_Point(changeRect, srcPointCornersSave); if ((srcPointCornersSave[2].x - srcPointCornersSave[0].x) < 5 || (srcPointCornersSave[2].y - srcPointCornersSave[0].y) < 5) { Debug.Log("Match Out Put image is to small"); SrcMat.copyTo(cameraFeed); SrcMat.release(); Imgproc.putText(cameraFeed, targetName, srcPointCornersSave[0], 0, 1, new Scalar(255, 255, 255), 2); return(false); } //畫出框框 Imgproc.line(SrcMat, srcPointCornersSave[0], srcPointCornersSave[1], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[1], srcPointCornersSave[2], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[2], srcPointCornersSave[3], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[3], srcPointCornersSave[0], new Scalar(255, 0, 0), 3); //畫中心 Point middlePoint = new Point((srcPointCornersSave[0].x + srcPointCornersSave[2].x) / 2, (srcPointCornersSave[0].y + srcPointCornersSave[2].y) / 2); Imgproc.line(SrcMat, middlePoint, middlePoint, new Scalar(0, 0, 255), 10); SrcMat.copyTo(cameraFeed); keypointsTarget.release(); targetMat.release(); SrcMat.release(); return(true); }
//public bool verificaImagem(Texture2D texture, Texture2D texture2) //{ // Texture2D camFoto = texture; // Texture2D printTela = texture2; // // Escala de cinza. CV_8UC1 // Mat img1Mat = new Mat(camFoto.height, camFoto.width, CvType.CV_8UC1); // Utils.texture2DToMat(camFoto, img1Mat); // // Escala de cinza. CV_8UC1 // Mat img2Mat = new Mat(printTela.height, printTela.width, CvType.CV_8UC1); // Utils.texture2DToMat(printTela, img2Mat); // Imgproc.GaussianBlur(img1Mat, img1Mat, new Size(5, 5), 0); // Imgproc.threshold(img1Mat, img1Mat, 100, 255, Imgproc.THRESH_BINARY); // Imgproc.GaussianBlur(img2Mat, img2Mat, new Size(5, 5), 0); // Imgproc.threshold(img2Mat, img2Mat, 240, 255, Imgproc.THRESH_BINARY); // //Create the result mat // int result_cols = img1Mat.cols() - img2Mat.cols() + 1; // int result_rows = img1Mat.rows() - img2Mat.rows() + 1; // Mat result = new Mat(result_rows, result_cols, CvType.CV_32FC1); // int match_method = Imgproc.TM_CCOEFF_NORMED; // Imgproc.matchTemplate(img1Mat, img2Mat, result, match_method); // Debug.Log(match_method); // return match_method <= 1; //} public bool verificaImagem(Texture2D textParam, Texture2D textParam2) { var bytes = textParam.EncodeToJPG(); //File.WriteAllBytes("imagem1_tratamento.png", bytes); //bytes = textParam2.EncodeToJPG(); //File.WriteAllBytes("imagem2_tratamento.png", bytes); //Texture2D imgTexture = Resources.Load("circulo") as Texture2D; Texture2D camFoto = textParam; Texture2D printTela = textParam2; // Escala de cinza. CV_8UC1 Mat img1Mat = new Mat(camFoto.height, camFoto.width, CvType.CV_8UC1); Utils.texture2DToMat(camFoto, img1Mat); // Escala de cinza. CV_8UC1 Mat img2Mat = new Mat(printTela.height, printTela.width, CvType.CV_8UC1); Utils.texture2DToMat(printTela, img2Mat); Imgproc.GaussianBlur(img1Mat, img1Mat, new Size(5, 5), 0); Texture2D tex3 = new Texture2D(img1Mat.cols(), img1Mat.rows(), TextureFormat.RGBA32, false); //Utils.matToTexture2D(img1Mat, tex3); //bytes = tex3.EncodeToJPG(); //File.WriteAllBytes("imagem1_tratamento_gaussian.png", bytes); Imgproc.threshold(img1Mat, img1Mat, 100, 255, Imgproc.THRESH_BINARY); tex3 = new Texture2D(img1Mat.cols(), img1Mat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(img1Mat, tex3); bytes = tex3.EncodeToJPG(); File.WriteAllBytes("imagem1_tratamento_threshold.png", bytes); Imgproc.GaussianBlur(img2Mat, img2Mat, new Size(5, 5), 0); Texture2D tex4 = new Texture2D(img2Mat.cols(), img2Mat.rows(), TextureFormat.RGBA32, false); //Utils.matToTexture2D(img2Mat, tex4); //bytes = tex4.EncodeToJPG(); //File.WriteAllBytes("imagem2_tratamento_gaussian.png", bytes); Imgproc.threshold(img2Mat, img2Mat, 240, 255, Imgproc.THRESH_BINARY); tex4 = new Texture2D(img2Mat.cols(), img2Mat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(img2Mat, tex4); bytes = tex4.EncodeToJPG(); File.WriteAllBytes("imagem2_tratamento_threshold.png", bytes); ORB detector = ORB.create(); ORB extractor = ORB.create(); MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); Mat descriptors1 = new Mat(); detector.detect(img1Mat, keypoints1); extractor.compute(img1Mat, keypoints1, descriptors1); MatOfKeyPoint keypoints2 = new MatOfKeyPoint(); Mat descriptors2 = new Mat(); detector.detect(img2Mat, keypoints2); extractor.compute(img2Mat, keypoints2, descriptors2); DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); MatOfDMatch matches = new MatOfDMatch(); matcher.match(descriptors1, descriptors2, matches); List <MatOfDMatch> lista = new List <MatOfDMatch>(); lista.Add(matches); matcher.knnMatch(descriptors1, descriptors2, lista, 2); long total = 0; foreach (MatOfDMatch item in lista) { if (item.toList()[0].distance < 0.75 * item.toList()[1].distance) { total++; } } long number_keypoints = 0; if (keypoints1.elemSize() <= keypoints2.elemSize()) { number_keypoints = keypoints1.elemSize(); } else { number_keypoints = keypoints2.elemSize(); } Debug.Log(total / number_keypoints * 100); return((total / number_keypoints * 100) >= 70); }
void Orb() { p1Mat = Imgcodecs.imread(Application.dataPath + "/Textures/1.jpg", 1); p2Mat = Imgcodecs.imread(Application.dataPath + "/Textures/3.jpg", 1); Imgproc.cvtColor(p1Mat, p1Mat, Imgproc.COLOR_BGR2RGB); Imgproc.cvtColor(p2Mat, p2Mat, Imgproc.COLOR_BGR2RGB); Imgproc.resize(p2Mat, p2Mat, new Size(p1Mat.width(), p1Mat.height())); Debug.Log(p2Mat); /* * //仿射变换(矩阵旋转) * float angle = UnityEngine.Random.Range(0, 360), scale = 1.0f; * Point center = new Point(img2Mat.cols() * 0.5f, img2Mat.rows() * 0.5f); * * Mat affine_matrix = Imgproc.getRotationMatrix2D(center, angle, scale); * Imgproc.warpAffine(img1Mat, img2Mat, affine_matrix, img2Mat.size()); * * Texture2D texture = new Texture2D(img2Mat.cols(), img2Mat.rows()); * Utils.matToTexture2D(img2Mat, texture); * outputRawImage.texture = texture; */ ORB detector = ORB.create(); ORB extractor = ORB.create(); //提取图一特征点 MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); Mat descriptors1 = new Mat(); detector.detect(p1Mat, keypoints1); extractor.compute(p1Mat, keypoints1, descriptors1); //提取图二特征点 MatOfKeyPoint keypoints2 = new MatOfKeyPoint(); Mat descriptors2 = new Mat(); detector.detect(p2Mat, keypoints2); extractor.compute(p2Mat, keypoints2, descriptors2); //第一次匹配结果(密密麻麻) DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); MatOfDMatch matches = new MatOfDMatch(); matcher.match(descriptors1, descriptors2, matches); //筛选(非官方) //计算向量距离的最大值/最小值 double max_dist = 0; double min_dist = 15; //通过距离控制需要的特征。 //(设到10,最终只有2个耳朵匹配。。。) //(设到15,尾巴也开始匹配。。。。。。) //新建两个容器存放筛选样本 List <DMatch> matchesArray = matches.toList(); //用Unity版API多转一步 //Debug.Log(matchesArray.Count); //500 List <DMatch> goodmatchesArray = new List <DMatch>(); //Debug.Log(img1Mat.rows()); //512 for (int i = 0; i < matchesArray.Count; i++) { Debug.Log("[" + i + "]" + matchesArray[i].distance); if (matchesArray[i].distance > max_dist) { //max_dist = matchesArray[i].distance; } if (matchesArray[i].distance < min_dist) { min_dist = matchesArray[i].distance; } } //Debug.Log("The max distance is: " + max_dist); Debug.Log("The min distance is: " + min_dist); for (int i = 0; i < matchesArray.Count; i++) { if (matchesArray[i].distance < 2 * min_dist) // { goodmatchesArray.Add(matchesArray[i]); } } MatOfDMatch newMatches = new MatOfDMatch(); newMatches.fromList(goodmatchesArray); Debug.Log(newMatches.toList().Count); //第二次筛选后符合的 //绘制第二次筛选结果 dstMat = new Mat(); Features2d.drawMatches(p1Mat, keypoints1, p2Mat, keypoints2, newMatches, dstMat); Texture2D t2d = new Texture2D(dstMat.width(), dstMat.height()); Utils.matToTexture2D(dstMat, t2d); Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero); m_dstImage.sprite = sp; m_dstImage.preserveAspect = true; }
public ImageString MatchFeatures(string base64image, List <string> base64imageList) { List <MatOfDMatch> winnerMatches = new List <MatOfDMatch>(); MatOfKeyPoint winnerKeyPoints = new MatOfKeyPoint(); Mat winnerImage = new Mat(); int winnerIndex = -1; int winnerValue = 0; Texture2D imgTexture = base64ImageToTexture(base64image); List <Texture2D> imgTextures = new List <Texture2D>(); for (int i = 0; i < base64imageList.Count; i++) { imgTextures.Add(base64ImageToTexture(base64imageList[i])); } //Create Mat from texture Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, img1Mat); MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); Mat descriptors1 = new Mat(); FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB); DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB); //Detect keypoints and compute descriptors from photo. detector.detect(img1Mat, keypoints1); extractor.compute(img1Mat, keypoints1, descriptors1); Debug.Log("Billede features: " + descriptors1.rows()); if (descriptors1.rows() < 10) { Debug.Log("ARRRRRRGH der er ikke mange descripters i mit original-billede"); return(new ImageString(base64image, winnerIndex)); } //Run through each image in list for (int i = 0; i < imgTextures.Count; i++) { Texture2D imgTexture2 = imgTextures[i]; //Create Mat from texture Mat img2Mat = new Mat(imgTexture2.height, imgTexture2.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture2, img2Mat); //Find keypoints and descriptors from image in list MatOfKeyPoint keypoints2 = new MatOfKeyPoint(); Mat descriptors2 = new Mat(); detector.detect(img2Mat, keypoints2); extractor.compute(img2Mat, keypoints2, descriptors2); //Match photo with image from list DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); Debug.Log("Billede2 features: " + descriptors2.rows()); if (descriptors2.rows() < 10) { Debug.Log("ARRRRRRGH der er ikke mange descripters i mit test billede: " + i); continue; } List <MatOfDMatch> matchList = new List <MatOfDMatch>(); matcher.knnMatch(descriptors1, descriptors2, matchList, 2); //Find the good matches and put them ind a list List <MatOfDMatch> good = new List <MatOfDMatch>(); foreach (MatOfDMatch match in matchList) { DMatch[] arrayDmatch = match.toArray(); if (arrayDmatch[0].distance < 0.7f * arrayDmatch[1].distance) { good.Add(match); } } //Find the best match image based on the good lists if (good.Count > winnerThreshold && good.Count > winnerValue) { winnerImage = img2Mat; winnerMatches = good; winnerKeyPoints = keypoints2; winnerIndex = i; winnerValue = good.Count; } } Debug.Log("The winner is image: " + winnerIndex + " with a value of: " + winnerValue); //If no winner just return the original image if (winnerIndex == -1) { Debug.Log("No winner"); return(new ImageString(base64image, winnerIndex)); } Debug.Log("No winner"); //Find the matching keypoints from the winner list. MatOfPoint2f queryPoints = new MatOfPoint2f(); MatOfPoint2f matchPoints = new MatOfPoint2f(); List <Point> queryPointsList = new List <Point>(); List <Point> matchPointsList = new List <Point>(); foreach (MatOfDMatch match in winnerMatches) { DMatch[] arrayDmatch = match.toArray(); queryPointsList.Add(keypoints1.toList()[arrayDmatch[0].queryIdx].pt); matchPointsList.Add(winnerKeyPoints.toList()[arrayDmatch[0].trainIdx].pt); } queryPoints.fromList(queryPointsList); matchPoints.fromList(matchPointsList); //Calculate the homography of the best matching image Mat homography = Calib3d.findHomography(queryPoints, matchPoints, Calib3d.RANSAC, 5.0); Mat resultImg = new Mat(); Imgproc.warpPerspective(img1Mat, resultImg, homography, winnerImage.size()); //Show image Texture2D texture = new Texture2D(winnerImage.cols(), winnerImage.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(resultImg, texture); return(new ImageString(Convert.ToBase64String(texture.EncodeToPNG()), winnerIndex)); }
public List <ImageObject> MatchFeatures(string base64image, List <string> base64imageList) { ImageObject myImage = new ImageObject(); ImageObject winnerImage = new ImageObject(); List <ImageObject> returnImageList = new List <ImageObject>(); Texture2D imgTexture = base64ImageToTexture(base64image); List <Texture2D> imgTextures = new List <Texture2D>(); for (int i = 0; i < base64imageList.Count; i++) { imgTextures.Add(base64ImageToTexture(base64imageList[i])); } //Create Mat from texture Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, img1Mat); MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); Mat descriptors1 = new Mat(); FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB); DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB); //Detect keypoints and compute descriptors from photo. detector.detect(img1Mat, keypoints1); extractor.compute(img1Mat, keypoints1, descriptors1); //Debug.Log("Billede features: " + descriptors1.rows()); myImage.image = base64image; myImage.keyPoints = keypoints1; myImage.imageMat = img1Mat; if (descriptors1.rows() < 10) { Debug.Log("ARRRRRRGH der er ikke mange descripters i mit original-billede"); //No winner as there is to few descriptors. return(returnImageList); } //Run through each image in list //------------------------------------------------------------- for (int i = 0; i < imgTextures.Count; i++) { Texture2D imgTexture2 = imgTextures[i]; //Create Mat from texture Mat img2Mat = new Mat(imgTexture2.height, imgTexture2.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture2, img2Mat); //Find keypoints and descriptors from image in list MatOfKeyPoint keypoints2 = new MatOfKeyPoint(); Mat descriptors2 = new Mat(); detector.detect(img2Mat, keypoints2); extractor.compute(img2Mat, keypoints2, descriptors2); //Match photo with image from list DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); //Debug.Log("Billede2 features: " + descriptors2.rows()); if (descriptors2.rows() < 10) { Debug.Log("ARRRRRRGH der er ikke mange descripters i mit test billede: " + i); continue; } List <MatOfDMatch> matchList = new List <MatOfDMatch>(); matcher.knnMatch(descriptors1, descriptors2, matchList, 2); //Find the good matches and put them ind a list List <MatOfDMatch> good = new List <MatOfDMatch>(); foreach (MatOfDMatch match in matchList) { DMatch[] arrayDmatch = match.toArray(); if (arrayDmatch[0].distance < 0.7f * arrayDmatch[1].distance) { good.Add(match); } } //Find the best match image based on the good lists if (good.Count > winnerThreshold && good.Count > winnerImage.value) { winnerImage.index = i; winnerImage.imageMat = img2Mat; winnerImage.keyPoints = keypoints2; winnerImage.value = good.Count; winnerImage.matches = good; } } // Run through done //------------------------------------------------------------- Debug.Log("The winner is image: " + winnerImage.index + " with a value of: " + winnerImage.value); //If no winner just return the original image if (winnerImage.index == -1) { Debug.Log("No winner"); return(returnImageList); } Texture2D imageTexture = new Texture2D(winnerImage.imageMat.cols(), winnerImage.imageMat.rows(), TextureFormat.RGBA32, false); winnerImage.image = Convert.ToBase64String(imageTexture.EncodeToPNG()); returnImageList.Add(myImage); returnImageList.Add(winnerImage); return(returnImageList); }