// Use this for initialization void Start() { Texture2D imgTexture = Resources.Load("lena") as Texture2D; Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, img1Mat); Debug.Log("img1Mat dst ToString " + img1Mat.ToString()); Mat img2Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, img2Mat); Debug.Log("img2Mat dst ToString " + img2Mat.ToString()); float angle = UnityEngine.Random.Range(0, 360), scale = 1.0f; Point center = new Point(img2Mat.cols() * 0.5f, img2Mat.rows() * 0.5f); Mat affine_matrix = Imgproc.getRotationMatrix2D(center, angle, scale); Imgproc.warpAffine(img1Mat, img2Mat, affine_matrix, img2Mat.size()); FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB); DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB); MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); Mat descriptors1 = new Mat(); detector.detect(img1Mat, keypoints1); extractor.compute(img1Mat, keypoints1, descriptors1); MatOfKeyPoint keypoints2 = new MatOfKeyPoint(); Mat descriptors2 = new Mat(); detector.detect(img2Mat, keypoints2); extractor.compute(img2Mat, keypoints2, descriptors2); DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); MatOfDMatch matches = new MatOfDMatch(); matcher.match(descriptors1, descriptors2, matches); Mat resultImg = new Mat(); Features2d.drawMatches(img1Mat, keypoints1, img2Mat, keypoints2, matches, resultImg); Texture2D texture = new Texture2D(resultImg.cols(), resultImg.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(resultImg, texture); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; }
// Use this for initialization void Start() { Texture2D imgTexture = Resources.Load("detect_blob") as Texture2D; Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC1); Utils.texture2DToMat(imgTexture, imgMat); Debug.Log("imgMat dst ToString " + imgMat.ToString()); Mat outImgMat = new Mat(); FeatureDetector blobDetector = FeatureDetector.create(FeatureDetector.SIMPLEBLOB); blobDetector.read(Utils.getFilePath("blobparams.yml")); MatOfKeyPoint keypoints = new MatOfKeyPoint(); blobDetector.detect(imgMat, keypoints); Features2d.drawKeypoints(imgMat, keypoints, outImgMat); Texture2D texture = new Texture2D(outImgMat.cols(), outImgMat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(outImgMat, texture); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; }
void UpdateCircles() { // Detect blobs. blobDetector.detect(grayMat, blobs); // Calculate the circles' screen coordinates // and world coordinates. // Clear the previous coordinates. circles.Clear(); // Iterate over the blobs. KeyPoint[] blobsArray = blobs.toArray(); int numBlobs = blobsArray.Length; for (int i = 0; i < numBlobs; i++) { // Convert blobs' image coordinates to // screen coordinates. KeyPoint blob = blobsArray[i]; Point imagePoint = blob.pt; Vector2 screenPosition = ConvertToScreenPosition( (float)imagePoint.x, (float)imagePoint.y); float screenDiameter = blob.size * screenPixelsPerImagePixel; // Convert screen coordinates to world // coordinates based on raycasting. Vector3 worldPosition = ConvertToWorldPosition( screenPosition); Circle circle = new Circle( screenPosition, screenDiameter, worldPosition); circles.Add(circle); } }
// Use this for initialization // Update is called once per frame public void UpdateScreen(Mat mat) { using (MatOfKeyPoint keypoints = new MatOfKeyPoint()) using (Mat descriptors = new Mat()) { detector.detect(mat, keypoints); //extractor.compute(mat, keypoints, descriptors); var Points = keypoints.toArray(); foreach (KeyPoint kp in Points) { int a = (int)kp.pt.x / mat.width() * 250; int b = (int)kp.pt.y / mat.height() * 250; Scalar color = new Scalar(255, b, a, 100); switch (Random.Range(0, 3)) { case 0: color = new Scalar(255, a, b, 100); break; case 1: color = new Scalar(a, 255, b, 100); break; case 2: color = new Scalar(a, b, 255, 100); break; } Imgproc.circle(mat, kp.pt, 4, color, -1); } text = string.Format("PointFeature Count : {0}.", Points.Length); DestroyImmediate(tex); tex = new Texture2D(ARCameraManager.Instance.Width, ARCameraManager.Instance.Height); OpenCVForUnity.Utils.matToTexture2D(mat, tex); ARCameraManager.Instance.UpdateScreenTexture(tex); } }
public bool descriptorsORB_Old(Mat RGB, Mat cameraFeed, string targetName)//找出特徵的顏色方法三(可運行但效率不佳放棄) { if (RGB == null) { Debug.Log("RGB Mat is Null"); return(false); } //將傳入的RGB存入Src Mat SrcMat = new Mat(); RGB.copyTo(SrcMat); //比對樣本 Texture2D imgTexture = Resources.Load(targetName) as Texture2D; // Texture2D imgTexture2 = Resources.Load("lenaK") as Texture2D; //Texture2D轉Mat Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, img1Mat); //創建 ORB的特徵點裝置 FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB); DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB); //產生存放特徵點Mat MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); Mat descriptors1 = new Mat(); MatOfKeyPoint keypointsSrc = new MatOfKeyPoint(); Mat descriptorsSrc = new Mat(); //找特徵點圖1 detector.detect(img1Mat, keypoints1); extractor.compute(img1Mat, keypoints1, descriptors1); //找特徵點圖Src detector.detect(SrcMat, keypointsSrc); extractor.compute(SrcMat, keypointsSrc, descriptorsSrc); DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); MatOfDMatch matches = new MatOfDMatch(); matcher.match(descriptors1, descriptorsSrc, matches); DMatch[] arrayDmatch = matches.toArray(); for (int i = arrayDmatch.Length - 1; i >= 0; i--) { // Debug.Log("match " + i + ": " + arrayDmatch[i].distance); } //做篩選 double max_dist = 0; double min_dist = 100; //-- Quick calculation of max and min distances between keypoints double dist = new double(); for (int i = 0; i < matches.rows(); i++) { dist = arrayDmatch[i].distance; if (dist < min_dist) { min_dist = dist; } if (dist > max_dist) { max_dist = dist; } } Debug.Log("Max dist :" + max_dist); Debug.Log("Min dist :" + min_dist); //只畫好的點 List <DMatch> matchesGoodList = new List <DMatch>(); for (int i = 0; i < matches.rows(); i++) { //if (arrayDmatch[i].distance < RateDist.value * min_dist) //{ // //Debug.Log("match " + i + ": " + arrayDmatch[i].distance); // matchesGoodList.Add(arrayDmatch[i]); //} } MatOfDMatch matchesGood = new MatOfDMatch(); matchesGood.fromList(matchesGoodList); //Draw Keypoints Features2d.drawKeypoints(SrcMat, keypointsSrc, SrcMat); //做輸出的轉換予宣告 Mat resultImg = new Mat(); // Features2d.drawMatches(img1Mat, keypoints1, SrcMat, keypointsSrc, matchesGood, resultImg); List <Point> P1 = new List <Point>(); // List<Point> P2 = new List<Point>(); List <Point> pSrc = new List <Point>(); Debug.Log("MatchCount" + matchesGoodList.Count); for (int i = 0; i < matchesGoodList.Count; i++) { P1.Add(new Point(keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.x, keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.y)); pSrc.Add(new Point(keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.x, keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.y)); //Debug.Log("ID = " + matchesGoodList[i].queryIdx ); //Debug.Log("x,y =" + (int)keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.x + "," + (int)keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.y); //Debug.Log("x,y =" + (int)keypoints2.toArray()[matchesGoodList[i].trainIdx].pt.x + "," + (int)keypoints2.toArray()[matchesGoodList[i].trainIdx].pt.y); } MatOfPoint2f p2fTarget = new MatOfPoint2f(P1.ToArray()); MatOfPoint2f p2fSrc = new MatOfPoint2f(pSrc.ToArray()); Mat matrixH = Calib3d.findHomography(p2fTarget, p2fSrc, Calib3d.RANSAC, 3); List <Point> srcPointCorners = new List <Point>(); srcPointCorners.Add(new Point(0, 0)); srcPointCorners.Add(new Point(img1Mat.width(), 0)); srcPointCorners.Add(new Point(img1Mat.width(), img1Mat.height())); srcPointCorners.Add(new Point(0, img1Mat.height())); Mat originalRect = Converters.vector_Point2f_to_Mat(srcPointCorners); List <Point> srcPointCornersEnd = new List <Point>(); srcPointCornersEnd.Add(new Point(0, img1Mat.height())); srcPointCornersEnd.Add(new Point(0, 0)); srcPointCornersEnd.Add(new Point(img1Mat.width(), 0)); srcPointCornersEnd.Add(new Point(img1Mat.width(), img1Mat.height())); Mat changeRect = Converters.vector_Point2f_to_Mat(srcPointCornersEnd); Core.perspectiveTransform(originalRect, changeRect, matrixH); List <Point> srcPointCornersSave = new List <Point>(); Converters.Mat_to_vector_Point(changeRect, srcPointCornersSave); if ((srcPointCornersSave[2].x - srcPointCornersSave[0].x) < 5 || (srcPointCornersSave[2].y - srcPointCornersSave[0].y) < 5) { Debug.Log("Match Out Put image is to small"); SrcMat.copyTo(cameraFeed); SrcMat.release(); Imgproc.putText(cameraFeed, "X-S", new Point(10, 50), 0, 1, new Scalar(255, 255, 255), 2); return(false); } // Features2d.drawMatches(img1Mat, keypoints1, SrcMat, keypointsSrc, matchesGood, resultImg); Imgproc.line(SrcMat, srcPointCornersSave[0], srcPointCornersSave[1], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[1], srcPointCornersSave[2], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[2], srcPointCornersSave[3], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[3], srcPointCornersSave[0], new Scalar(255, 0, 0), 3); SrcMat.copyTo(cameraFeed); keypoints1.release(); img1Mat.release(); SrcMat.release(); return(true); }
//============================================================ //=================以下為沒有再使用的函式===================== //============================================================ //找出特徵的顏色方法三(ORB特徵點比對) public bool descriptorsORB(Mat RGB, Mat cameraFeed, string targetName) { if (RGB == null) { Debug.Log("RGB Mat is Null"); return(false); } //將傳入的RGB存入Src Mat SrcMat = new Mat(); RGB.copyTo(SrcMat); //比對樣本載入 Texture2D imgTexture = Resources.Load(targetName) as Texture2D; //Texture2D轉Mat Mat targetMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, targetMat); //創建 ORB的特徵點裝置 FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB); DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB); //產生存放特徵點Mat MatOfKeyPoint keypointsTarget = new MatOfKeyPoint(); Mat descriptorsTarget = new Mat(); MatOfKeyPoint keypointsSrc = new MatOfKeyPoint(); Mat descriptorsSrc = new Mat(); //找特徵點圖Target detector.detect(targetMat, keypointsTarget); extractor.compute(targetMat, keypointsTarget, descriptorsTarget); //找特徵點圖Src detector.detect(SrcMat, keypointsSrc); extractor.compute(SrcMat, keypointsSrc, descriptorsSrc); //創建特徵點比對物件 DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); MatOfDMatch matches = new MatOfDMatch(); //丟入兩影像的特徵點 matcher.match(descriptorsTarget, descriptorsSrc, matches); DMatch[] arrayDmatch = matches.toArray(); //做篩選 double max_dist = 0; double min_dist = 100; //-- Quick calculation of max and min distances between keypoints double dist = new double(); for (int i = 0; i < matches.rows(); i++) { dist = arrayDmatch[i].distance; if (dist < min_dist) { min_dist = dist; } if (dist > max_dist) { max_dist = dist; } } Debug.Log("Max dist :" + max_dist); Debug.Log("Min dist :" + min_dist); List <DMatch> matchesGoodList = new List <DMatch>(); MatOfDMatch matchesGood = new MatOfDMatch(); matchesGood.fromList(matchesGoodList); //Draw Keypoints Features2d.drawKeypoints(SrcMat, keypointsSrc, SrcMat); List <Point> pTarget = new List <Point>(); List <Point> pSrc = new List <Point>(); Debug.Log("MatchCount" + matchesGoodList.Count); for (int i = 0; i < matchesGoodList.Count; i++) { pTarget.Add(new Point(keypointsTarget.toArray()[matchesGoodList[i].queryIdx].pt.x, keypointsTarget.toArray()[matchesGoodList[i].queryIdx].pt.y)); pSrc.Add(new Point(keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.x, keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.y)); } MatOfPoint2f p2fTarget = new MatOfPoint2f(pTarget.ToArray()); MatOfPoint2f p2fSrc = new MatOfPoint2f(pSrc.ToArray()); Mat matrixH = Calib3d.findHomography(p2fTarget, p2fSrc, Calib3d.RANSAC, 3); List <Point> srcPointCorners = new List <Point>(); srcPointCorners.Add(new Point(0, 0)); srcPointCorners.Add(new Point(targetMat.width(), 0)); srcPointCorners.Add(new Point(targetMat.width(), targetMat.height())); srcPointCorners.Add(new Point(0, targetMat.height())); Mat originalRect = Converters.vector_Point2f_to_Mat(srcPointCorners); List <Point> srcPointCornersEnd = new List <Point>(); srcPointCornersEnd.Add(new Point(0, targetMat.height())); srcPointCornersEnd.Add(new Point(0, 0)); srcPointCornersEnd.Add(new Point(targetMat.width(), 0)); srcPointCornersEnd.Add(new Point(targetMat.width(), targetMat.height())); Mat changeRect = Converters.vector_Point2f_to_Mat(srcPointCornersEnd); Core.perspectiveTransform(originalRect, changeRect, matrixH); List <Point> srcPointCornersSave = new List <Point>(); Converters.Mat_to_vector_Point(changeRect, srcPointCornersSave); if ((srcPointCornersSave[2].x - srcPointCornersSave[0].x) < 5 || (srcPointCornersSave[2].y - srcPointCornersSave[0].y) < 5) { Debug.Log("Match Out Put image is to small"); SrcMat.copyTo(cameraFeed); SrcMat.release(); Imgproc.putText(cameraFeed, targetName, srcPointCornersSave[0], 0, 1, new Scalar(255, 255, 255), 2); return(false); } //畫出框框 Imgproc.line(SrcMat, srcPointCornersSave[0], srcPointCornersSave[1], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[1], srcPointCornersSave[2], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[2], srcPointCornersSave[3], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[3], srcPointCornersSave[0], new Scalar(255, 0, 0), 3); //畫中心 Point middlePoint = new Point((srcPointCornersSave[0].x + srcPointCornersSave[2].x) / 2, (srcPointCornersSave[0].y + srcPointCornersSave[2].y) / 2); Imgproc.line(SrcMat, middlePoint, middlePoint, new Scalar(0, 0, 255), 10); SrcMat.copyTo(cameraFeed); keypointsTarget.release(); targetMat.release(); SrcMat.release(); return(true); }
/// <summary> /// ORB the point feature. /// ORBで特徴点取得 /// </summary> /// <param name="srcMat">Source mat.</param> /// <param name="dstKeypoints">Dst keypoints.</param> /// <param name="dstDescriptors">Dst descriptors.</param> public static void ORBPointFeature(this Mat srcMat, MatOfKeyPoint dstKeyPoints, Mat dstDescriptors) { ORBDetector.detect(srcMat, dstKeyPoints); ORBExtractor.compute(srcMat, dstKeyPoints, dstDescriptors); }
// Update is called once per frame public void UpdateScreen(Mat mat) { using (MatOfKeyPoint keypoints = new MatOfKeyPoint()) using (Mat descriptors = new Mat()) { detector.detect(mat, keypoints); extractor.compute(mat, keypoints, descriptors); int matchCount = 0; var trainPoints = keypoints.toArray(); var newBuffPointL = new List <List <Point> >(); var newBuffColorL = new List <Scalar>(); foreach (var keyPoint in trainPoints) { var points = new List <Point>(); points.Add(keyPoint.pt); newBuffPointL.Add(points); Scalar color = new Scalar(255, 225, 225, 100); var x = Random.Range(0, 256); switch (Random.Range(0, 6)) { case 0: color = new Scalar(255, 0, x, 100); break; case 1: color = new Scalar(0, 255, x, 100); break; case 2: color = new Scalar(0, x, 255, 100); break; case 3: color = new Scalar(225, x, 0, 100); break; case 4: color = new Scalar(x, 0, 255, 100); break; case 5: color = new Scalar(x, 255, 0, 100); break; } newBuffColorL.Add(color); } if (buffPointL.Count > 0) { using (MatOfDMatch matches = new MatOfDMatch()) using (MatOfDMatch crossMatches = new MatOfDMatch()) { matcher.match(buffDescriptors, descriptors, matches); matcher.match(descriptors, buffDescriptors, crossMatches); var matchL = matches.toArray(); var crossMatchL = crossMatches.toArray(); int i = 0; foreach (DMatch match in matchL) { bool flag = false; foreach (DMatch crossMatch in crossMatchL) { if (match.trainIdx == crossMatch.queryIdx && match.queryIdx == crossMatch.trainIdx) { flag = true; } } if (match.distance > filter) { flag = false; } if (flag) { var trainPoint = trainPoints[match.trainIdx]; var queryPoints = buffPointL[match.queryIdx]; int a = (int)trainPoint.pt.x / mat.width() * 250; int b = (int)trainPoint.pt.y / mat.height() * 250; Scalar color = buffColorL[match.queryIdx]; Imgproc.circle(mat, trainPoint.pt, 4, color, -1); Point startPoint = trainPoint.pt; foreach (Point queryPoint in queryPoints) { Imgproc.line(mat, startPoint, queryPoint, color, 2); Imgproc.circle(mat, queryPoint, 4, color, -1); startPoint = queryPoint; newBuffPointL[match.trainIdx].Add(queryPoint); } newBuffColorL[match.trainIdx] = buffColorL[match.queryIdx]; matchCount++; } i++; } } } buffDescriptors.Dispose(); buffPointL = newBuffPointL; buffColorL = newBuffColorL; buffDescriptors = descriptors.clone(); text = string.Format("Matching Count : {0}.", matchCount); DestroyImmediate(tex); tex = new Texture2D(ARCameraManager.Instance.Width, ARCameraManager.Instance.Height); OpenCVForUnity.Utils.matToTexture2D(mat, tex); ARCameraManager.Instance.UpdateScreenTexture(tex); } }
public void processFrame() { if (inversion) { //flip Core.bitwise_not(toneMat, toneMat); } if (resize) { Imgproc.resize(toneMat, toneMat, new Size((int)Math.Round(resizeRatio * toneMat.width()), (int)Math.Round(resizeRatio * toneMat.height()))); } // if (toneThreshold) { Imgproc.threshold(toneMat, toneMat, thresholdValue, 255, Imgproc.THRESH_BINARY); } if (blobs) { blobDetector.detect(toneMat, keypoints); Features2d.drawKeypoints(toneMat, keypoints, toneMat); } if (blur) { Imgproc.blur(toneMat, toneMat, new Size(blurSize, blurSize)); } if (centerPoint) { moments.Add(Imgproc.moments(toneMat, true)); WeightedCentroid.Add(new Point((int)Math.Round(moments[0].m10 / moments[0].m00), (int)Math.Round(moments[0].m01 / moments[0].m00))); Debug.Log("center: " + WeightedCentroid[0].x + ", " + WeightedCentroid[0].y); } if (edge) { Imgproc.Canny(toneMat, toneMat, thresholdValue * 0.5, thresholdValue); //Imgproc.findContours (toneMat, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE ); // // foreach(MatOfPoint i in contours){ // Debug.Log ("contour " + i + ": " + i.ToString()); // } //Debug.Log ("contours count: " + contours.Count); moments.Add(Imgproc.moments(toneMat, true)); if (WeightedCentroid.Count == 0) { moments.Add(Imgproc.moments(toneMat, true)); WeightedCentroid.Add(new Point(0, 0)); } WeightedCentroid.Add(new Point((int)Math.Round(moments[1].m10 / moments[1].m00), (int)Math.Round(moments[1].m01 / moments[1].m00))); if (thresholdValue >= thresholdValueCap && edgeCenterPoint == true) { Imgproc.ellipse(toneMat, WeightedCentroid [1], new Size(20, 20), 1, 0.1, 360, new Scalar(180), 10); Imgproc.putText(toneMat, " Edge center point", WeightedCentroid [1], 0, 1.5, new Scalar(180), 5); } } //draw center if (centerPoint) { Imgproc.ellipse(toneMat, WeightedCentroid [0], new Size(20, 20), 1, 0.1, 360, new Scalar(180), 10); Imgproc.putText(toneMat, " Tone center point", WeightedCentroid [0], 0, 1.5, new Scalar(180), 5); } if (resize) { Imgproc.resize(toneMat, toneMat, new Size((int)Math.Round((1 / resizeRatio) * toneMat.width()), (int)Math.Round((1 / resizeRatio) * toneMat.height()))); } //assign to display if (showProcessing) { rgbaMat = toneMat; } else { rgbaMat = cloneMat; } WeightedCentroid.Clear(); moments.Clear(); contours.Clear(); framesDropCount = 0; }
public ImageString MatchFeatures(string base64image, List <string> base64imageList) { List <MatOfDMatch> winnerMatches = new List <MatOfDMatch>(); MatOfKeyPoint winnerKeyPoints = new MatOfKeyPoint(); Mat winnerImage = new Mat(); int winnerIndex = -1; int winnerValue = 0; Texture2D imgTexture = base64ImageToTexture(base64image); List <Texture2D> imgTextures = new List <Texture2D>(); for (int i = 0; i < base64imageList.Count; i++) { imgTextures.Add(base64ImageToTexture(base64imageList[i])); } //Create Mat from texture Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, img1Mat); MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); Mat descriptors1 = new Mat(); FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB); DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB); //Detect keypoints and compute descriptors from photo. detector.detect(img1Mat, keypoints1); extractor.compute(img1Mat, keypoints1, descriptors1); Debug.Log("Billede features: " + descriptors1.rows()); if (descriptors1.rows() < 10) { Debug.Log("ARRRRRRGH der er ikke mange descripters i mit original-billede"); return(new ImageString(base64image, winnerIndex)); } //Run through each image in list for (int i = 0; i < imgTextures.Count; i++) { Texture2D imgTexture2 = imgTextures[i]; //Create Mat from texture Mat img2Mat = new Mat(imgTexture2.height, imgTexture2.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture2, img2Mat); //Find keypoints and descriptors from image in list MatOfKeyPoint keypoints2 = new MatOfKeyPoint(); Mat descriptors2 = new Mat(); detector.detect(img2Mat, keypoints2); extractor.compute(img2Mat, keypoints2, descriptors2); //Match photo with image from list DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); Debug.Log("Billede2 features: " + descriptors2.rows()); if (descriptors2.rows() < 10) { Debug.Log("ARRRRRRGH der er ikke mange descripters i mit test billede: " + i); continue; } List <MatOfDMatch> matchList = new List <MatOfDMatch>(); matcher.knnMatch(descriptors1, descriptors2, matchList, 2); //Find the good matches and put them ind a list List <MatOfDMatch> good = new List <MatOfDMatch>(); foreach (MatOfDMatch match in matchList) { DMatch[] arrayDmatch = match.toArray(); if (arrayDmatch[0].distance < 0.7f * arrayDmatch[1].distance) { good.Add(match); } } //Find the best match image based on the good lists if (good.Count > winnerThreshold && good.Count > winnerValue) { winnerImage = img2Mat; winnerMatches = good; winnerKeyPoints = keypoints2; winnerIndex = i; winnerValue = good.Count; } } Debug.Log("The winner is image: " + winnerIndex + " with a value of: " + winnerValue); //If no winner just return the original image if (winnerIndex == -1) { Debug.Log("No winner"); return(new ImageString(base64image, winnerIndex)); } Debug.Log("No winner"); //Find the matching keypoints from the winner list. MatOfPoint2f queryPoints = new MatOfPoint2f(); MatOfPoint2f matchPoints = new MatOfPoint2f(); List <Point> queryPointsList = new List <Point>(); List <Point> matchPointsList = new List <Point>(); foreach (MatOfDMatch match in winnerMatches) { DMatch[] arrayDmatch = match.toArray(); queryPointsList.Add(keypoints1.toList()[arrayDmatch[0].queryIdx].pt); matchPointsList.Add(winnerKeyPoints.toList()[arrayDmatch[0].trainIdx].pt); } queryPoints.fromList(queryPointsList); matchPoints.fromList(matchPointsList); //Calculate the homography of the best matching image Mat homography = Calib3d.findHomography(queryPoints, matchPoints, Calib3d.RANSAC, 5.0); Mat resultImg = new Mat(); Imgproc.warpPerspective(img1Mat, resultImg, homography, winnerImage.size()); //Show image Texture2D texture = new Texture2D(winnerImage.cols(), winnerImage.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(resultImg, texture); return(new ImageString(Convert.ToBase64String(texture.EncodeToPNG()), winnerIndex)); }
public List <ImageObject> MatchFeatures(string base64image, List <string> base64imageList) { ImageObject myImage = new ImageObject(); ImageObject winnerImage = new ImageObject(); List <ImageObject> returnImageList = new List <ImageObject>(); Texture2D imgTexture = base64ImageToTexture(base64image); List <Texture2D> imgTextures = new List <Texture2D>(); for (int i = 0; i < base64imageList.Count; i++) { imgTextures.Add(base64ImageToTexture(base64imageList[i])); } //Create Mat from texture Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, img1Mat); MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); Mat descriptors1 = new Mat(); FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB); DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB); //Detect keypoints and compute descriptors from photo. detector.detect(img1Mat, keypoints1); extractor.compute(img1Mat, keypoints1, descriptors1); //Debug.Log("Billede features: " + descriptors1.rows()); myImage.image = base64image; myImage.keyPoints = keypoints1; myImage.imageMat = img1Mat; if (descriptors1.rows() < 10) { Debug.Log("ARRRRRRGH der er ikke mange descripters i mit original-billede"); //No winner as there is to few descriptors. return(returnImageList); } //Run through each image in list //------------------------------------------------------------- for (int i = 0; i < imgTextures.Count; i++) { Texture2D imgTexture2 = imgTextures[i]; //Create Mat from texture Mat img2Mat = new Mat(imgTexture2.height, imgTexture2.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture2, img2Mat); //Find keypoints and descriptors from image in list MatOfKeyPoint keypoints2 = new MatOfKeyPoint(); Mat descriptors2 = new Mat(); detector.detect(img2Mat, keypoints2); extractor.compute(img2Mat, keypoints2, descriptors2); //Match photo with image from list DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); //Debug.Log("Billede2 features: " + descriptors2.rows()); if (descriptors2.rows() < 10) { Debug.Log("ARRRRRRGH der er ikke mange descripters i mit test billede: " + i); continue; } List <MatOfDMatch> matchList = new List <MatOfDMatch>(); matcher.knnMatch(descriptors1, descriptors2, matchList, 2); //Find the good matches and put them ind a list List <MatOfDMatch> good = new List <MatOfDMatch>(); foreach (MatOfDMatch match in matchList) { DMatch[] arrayDmatch = match.toArray(); if (arrayDmatch[0].distance < 0.7f * arrayDmatch[1].distance) { good.Add(match); } } //Find the best match image based on the good lists if (good.Count > winnerThreshold && good.Count > winnerImage.value) { winnerImage.index = i; winnerImage.imageMat = img2Mat; winnerImage.keyPoints = keypoints2; winnerImage.value = good.Count; winnerImage.matches = good; } } // Run through done //------------------------------------------------------------- Debug.Log("The winner is image: " + winnerImage.index + " with a value of: " + winnerImage.value); //If no winner just return the original image if (winnerImage.index == -1) { Debug.Log("No winner"); return(returnImageList); } Texture2D imageTexture = new Texture2D(winnerImage.imageMat.cols(), winnerImage.imageMat.rows(), TextureFormat.RGBA32, false); winnerImage.image = Convert.ToBase64String(imageTexture.EncodeToPNG()); returnImageList.Add(myImage); returnImageList.Add(winnerImage); return(returnImageList); }
private void TryProcessImage(int index) { if (UseWebCam == false) { CurrentTexture = Sources[index]; } else { CurrentTexture = webCamTexture; } using (Mat imgMat = new Mat(CurrentTexture.height, CurrentTexture.width, CvType.CV_8UC1)) using (FeatureDetector blobDetector = FeatureDetector.create(FeatureDetector.SIMPLEBLOB)) using (Mat outImgMat = new Mat()) using (MatOfKeyPoint keypoints = new MatOfKeyPoint()) { if (CurrentTexture is Texture2D) { Utils.texture2DToMat(CurrentTexture as Texture2D, imgMat); } else if (CurrentTexture is WebCamTexture) { Utils.webCamTextureToMat(CurrentTexture as WebCamTexture, imgMat); } else { Utils.textureToMat(CurrentTexture, imgMat); } Debug.Log("imgMat dst ToString " + imgMat.ToString()); Imgproc.threshold(imgMat, imgMat, 0, 255, Imgproc.THRESH_BINARY | Imgproc.THRESH_OTSU); Imgproc.erode(imgMat, imgMat, erodeMat, new Point(1, 1), 5); blobDetector.read(Utils.getFilePath("blobparams.yml")); blobDetector.detect(imgMat, keypoints); Features2d.drawKeypoints(imgMat, keypoints, outImgMat); KeyPoint[] points = keypoints.toArray(); ProcessKeyPoints(points, outImgMat); Mat finalMat = outImgMat; if (texture != null && (texture.width != finalMat.cols() || texture.height != finalMat.rows())) { DestroyImmediate(texture); texture = null; } if (texture == null) { texture = new Texture2D(finalMat.cols(), finalMat.rows(), TextureFormat.RGBA32, false); } Utils.matToTexture2D(finalMat, texture); gameObject.GetComponent <Renderer>().material.mainTexture = texture; } }