// Use this for initialization void Start() { Texture2D imgTexture = Resources.Load("detect_blob") as Texture2D; Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC1); Utils.texture2DToMat(imgTexture, imgMat); Debug.Log("imgMat dst ToString " + imgMat.ToString()); Mat outImgMat = new Mat(); FeatureDetector blobDetector = FeatureDetector.create(FeatureDetector.SIMPLEBLOB); blobDetector.read(Utils.getFilePath("blobparams.yml")); MatOfKeyPoint keypoints = new MatOfKeyPoint(); blobDetector.detect(imgMat, keypoints); Features2d.drawKeypoints(imgMat, keypoints, outImgMat); Texture2D texture = new Texture2D(outImgMat.cols(), outImgMat.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(outImgMat, texture); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; }
// Use this for initialization void Start() { Texture2D imgTexture = Resources.Load("lena") as Texture2D; Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, img1Mat); Debug.Log("img1Mat dst ToString " + img1Mat.ToString()); Mat img2Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, img2Mat); Debug.Log("img2Mat dst ToString " + img2Mat.ToString()); float angle = UnityEngine.Random.Range(0, 360), scale = 1.0f; Point center = new Point(img2Mat.cols() * 0.5f, img2Mat.rows() * 0.5f); Mat affine_matrix = Imgproc.getRotationMatrix2D(center, angle, scale); Imgproc.warpAffine(img1Mat, img2Mat, affine_matrix, img2Mat.size()); FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB); DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB); MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); Mat descriptors1 = new Mat(); detector.detect(img1Mat, keypoints1); extractor.compute(img1Mat, keypoints1, descriptors1); MatOfKeyPoint keypoints2 = new MatOfKeyPoint(); Mat descriptors2 = new Mat(); detector.detect(img2Mat, keypoints2); extractor.compute(img2Mat, keypoints2, descriptors2); DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); MatOfDMatch matches = new MatOfDMatch(); matcher.match(descriptors1, descriptors2, matches); Mat resultImg = new Mat(); Features2d.drawMatches(img1Mat, keypoints1, img2Mat, keypoints2, matches, resultImg); Texture2D texture = new Texture2D(resultImg.cols(), resultImg.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(resultImg, texture); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; }
void InitBlobDetector() { // Try to create the blob detector. blobDetector = FeatureDetector.create( FeatureDetector.SIMPLEBLOB); if (blobDetector == null) { Debug.LogError( "Unable to create blob detector"); Destroy(this); return; } // The blob detector parameters must be put inta a yaml file for unity string blobDetectorParams = @"%YAML:1.0 thresholdStep: 10.0 minThreshold: 50.0 maxThreshold: 220.0 minRepeatability: 2 minDistBetweenBlobs: 10.0 filterByColor: False blobColor: 0 filterByArea: True minArea: 50.0 maxArea: 5000.0 filterByCircularity: True minCircularity: 0.8 maxCircularity: 3.4028234663852886e+38 filterByInertia: False minInertiaRatio: 0.1 maxInertiaRatio: 3.4028234663852886e+38 filterByConvexity: False minConvexity: 0.95 maxConvexity: 3.4028234663852886e+38 "; // Try to write the blob detector's parameters // to a temporary file. string path = Application.persistentDataPath + "/blobDetectorParams.yaml"; File.WriteAllText(path, blobDetectorParams); if (!File.Exists(path)) { Debug.LogError( "Unable to write blob " + "detector's parameters to " + path); Destroy(this); return; } // Read the blob detector's parameters from the // temporary file. blobDetector.read(path); // Delete the temporary file. File.Delete(path); }
public bool descriptorsORB_Old(Mat RGB, Mat cameraFeed, string targetName)//找出特徵的顏色方法三(可運行但效率不佳放棄) { if (RGB == null) { Debug.Log("RGB Mat is Null"); return(false); } //將傳入的RGB存入Src Mat SrcMat = new Mat(); RGB.copyTo(SrcMat); //比對樣本 Texture2D imgTexture = Resources.Load(targetName) as Texture2D; // Texture2D imgTexture2 = Resources.Load("lenaK") as Texture2D; //Texture2D轉Mat Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, img1Mat); //創建 ORB的特徵點裝置 FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB); DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB); //產生存放特徵點Mat MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); Mat descriptors1 = new Mat(); MatOfKeyPoint keypointsSrc = new MatOfKeyPoint(); Mat descriptorsSrc = new Mat(); //找特徵點圖1 detector.detect(img1Mat, keypoints1); extractor.compute(img1Mat, keypoints1, descriptors1); //找特徵點圖Src detector.detect(SrcMat, keypointsSrc); extractor.compute(SrcMat, keypointsSrc, descriptorsSrc); DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); MatOfDMatch matches = new MatOfDMatch(); matcher.match(descriptors1, descriptorsSrc, matches); DMatch[] arrayDmatch = matches.toArray(); for (int i = arrayDmatch.Length - 1; i >= 0; i--) { // Debug.Log("match " + i + ": " + arrayDmatch[i].distance); } //做篩選 double max_dist = 0; double min_dist = 100; //-- Quick calculation of max and min distances between keypoints double dist = new double(); for (int i = 0; i < matches.rows(); i++) { dist = arrayDmatch[i].distance; if (dist < min_dist) { min_dist = dist; } if (dist > max_dist) { max_dist = dist; } } Debug.Log("Max dist :" + max_dist); Debug.Log("Min dist :" + min_dist); //只畫好的點 List <DMatch> matchesGoodList = new List <DMatch>(); for (int i = 0; i < matches.rows(); i++) { //if (arrayDmatch[i].distance < RateDist.value * min_dist) //{ // //Debug.Log("match " + i + ": " + arrayDmatch[i].distance); // matchesGoodList.Add(arrayDmatch[i]); //} } MatOfDMatch matchesGood = new MatOfDMatch(); matchesGood.fromList(matchesGoodList); //Draw Keypoints Features2d.drawKeypoints(SrcMat, keypointsSrc, SrcMat); //做輸出的轉換予宣告 Mat resultImg = new Mat(); // Features2d.drawMatches(img1Mat, keypoints1, SrcMat, keypointsSrc, matchesGood, resultImg); List <Point> P1 = new List <Point>(); // List<Point> P2 = new List<Point>(); List <Point> pSrc = new List <Point>(); Debug.Log("MatchCount" + matchesGoodList.Count); for (int i = 0; i < matchesGoodList.Count; i++) { P1.Add(new Point(keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.x, keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.y)); pSrc.Add(new Point(keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.x, keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.y)); //Debug.Log("ID = " + matchesGoodList[i].queryIdx ); //Debug.Log("x,y =" + (int)keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.x + "," + (int)keypoints1.toArray()[matchesGoodList[i].queryIdx].pt.y); //Debug.Log("x,y =" + (int)keypoints2.toArray()[matchesGoodList[i].trainIdx].pt.x + "," + (int)keypoints2.toArray()[matchesGoodList[i].trainIdx].pt.y); } MatOfPoint2f p2fTarget = new MatOfPoint2f(P1.ToArray()); MatOfPoint2f p2fSrc = new MatOfPoint2f(pSrc.ToArray()); Mat matrixH = Calib3d.findHomography(p2fTarget, p2fSrc, Calib3d.RANSAC, 3); List <Point> srcPointCorners = new List <Point>(); srcPointCorners.Add(new Point(0, 0)); srcPointCorners.Add(new Point(img1Mat.width(), 0)); srcPointCorners.Add(new Point(img1Mat.width(), img1Mat.height())); srcPointCorners.Add(new Point(0, img1Mat.height())); Mat originalRect = Converters.vector_Point2f_to_Mat(srcPointCorners); List <Point> srcPointCornersEnd = new List <Point>(); srcPointCornersEnd.Add(new Point(0, img1Mat.height())); srcPointCornersEnd.Add(new Point(0, 0)); srcPointCornersEnd.Add(new Point(img1Mat.width(), 0)); srcPointCornersEnd.Add(new Point(img1Mat.width(), img1Mat.height())); Mat changeRect = Converters.vector_Point2f_to_Mat(srcPointCornersEnd); Core.perspectiveTransform(originalRect, changeRect, matrixH); List <Point> srcPointCornersSave = new List <Point>(); Converters.Mat_to_vector_Point(changeRect, srcPointCornersSave); if ((srcPointCornersSave[2].x - srcPointCornersSave[0].x) < 5 || (srcPointCornersSave[2].y - srcPointCornersSave[0].y) < 5) { Debug.Log("Match Out Put image is to small"); SrcMat.copyTo(cameraFeed); SrcMat.release(); Imgproc.putText(cameraFeed, "X-S", new Point(10, 50), 0, 1, new Scalar(255, 255, 255), 2); return(false); } // Features2d.drawMatches(img1Mat, keypoints1, SrcMat, keypointsSrc, matchesGood, resultImg); Imgproc.line(SrcMat, srcPointCornersSave[0], srcPointCornersSave[1], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[1], srcPointCornersSave[2], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[2], srcPointCornersSave[3], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[3], srcPointCornersSave[0], new Scalar(255, 0, 0), 3); SrcMat.copyTo(cameraFeed); keypoints1.release(); img1Mat.release(); SrcMat.release(); return(true); }
//============================================================ //=================以下為沒有再使用的函式===================== //============================================================ //找出特徵的顏色方法三(ORB特徵點比對) public bool descriptorsORB(Mat RGB, Mat cameraFeed, string targetName) { if (RGB == null) { Debug.Log("RGB Mat is Null"); return(false); } //將傳入的RGB存入Src Mat SrcMat = new Mat(); RGB.copyTo(SrcMat); //比對樣本載入 Texture2D imgTexture = Resources.Load(targetName) as Texture2D; //Texture2D轉Mat Mat targetMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, targetMat); //創建 ORB的特徵點裝置 FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB); DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB); //產生存放特徵點Mat MatOfKeyPoint keypointsTarget = new MatOfKeyPoint(); Mat descriptorsTarget = new Mat(); MatOfKeyPoint keypointsSrc = new MatOfKeyPoint(); Mat descriptorsSrc = new Mat(); //找特徵點圖Target detector.detect(targetMat, keypointsTarget); extractor.compute(targetMat, keypointsTarget, descriptorsTarget); //找特徵點圖Src detector.detect(SrcMat, keypointsSrc); extractor.compute(SrcMat, keypointsSrc, descriptorsSrc); //創建特徵點比對物件 DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); MatOfDMatch matches = new MatOfDMatch(); //丟入兩影像的特徵點 matcher.match(descriptorsTarget, descriptorsSrc, matches); DMatch[] arrayDmatch = matches.toArray(); //做篩選 double max_dist = 0; double min_dist = 100; //-- Quick calculation of max and min distances between keypoints double dist = new double(); for (int i = 0; i < matches.rows(); i++) { dist = arrayDmatch[i].distance; if (dist < min_dist) { min_dist = dist; } if (dist > max_dist) { max_dist = dist; } } Debug.Log("Max dist :" + max_dist); Debug.Log("Min dist :" + min_dist); List <DMatch> matchesGoodList = new List <DMatch>(); MatOfDMatch matchesGood = new MatOfDMatch(); matchesGood.fromList(matchesGoodList); //Draw Keypoints Features2d.drawKeypoints(SrcMat, keypointsSrc, SrcMat); List <Point> pTarget = new List <Point>(); List <Point> pSrc = new List <Point>(); Debug.Log("MatchCount" + matchesGoodList.Count); for (int i = 0; i < matchesGoodList.Count; i++) { pTarget.Add(new Point(keypointsTarget.toArray()[matchesGoodList[i].queryIdx].pt.x, keypointsTarget.toArray()[matchesGoodList[i].queryIdx].pt.y)); pSrc.Add(new Point(keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.x, keypointsSrc.toArray()[matchesGoodList[i].trainIdx].pt.y)); } MatOfPoint2f p2fTarget = new MatOfPoint2f(pTarget.ToArray()); MatOfPoint2f p2fSrc = new MatOfPoint2f(pSrc.ToArray()); Mat matrixH = Calib3d.findHomography(p2fTarget, p2fSrc, Calib3d.RANSAC, 3); List <Point> srcPointCorners = new List <Point>(); srcPointCorners.Add(new Point(0, 0)); srcPointCorners.Add(new Point(targetMat.width(), 0)); srcPointCorners.Add(new Point(targetMat.width(), targetMat.height())); srcPointCorners.Add(new Point(0, targetMat.height())); Mat originalRect = Converters.vector_Point2f_to_Mat(srcPointCorners); List <Point> srcPointCornersEnd = new List <Point>(); srcPointCornersEnd.Add(new Point(0, targetMat.height())); srcPointCornersEnd.Add(new Point(0, 0)); srcPointCornersEnd.Add(new Point(targetMat.width(), 0)); srcPointCornersEnd.Add(new Point(targetMat.width(), targetMat.height())); Mat changeRect = Converters.vector_Point2f_to_Mat(srcPointCornersEnd); Core.perspectiveTransform(originalRect, changeRect, matrixH); List <Point> srcPointCornersSave = new List <Point>(); Converters.Mat_to_vector_Point(changeRect, srcPointCornersSave); if ((srcPointCornersSave[2].x - srcPointCornersSave[0].x) < 5 || (srcPointCornersSave[2].y - srcPointCornersSave[0].y) < 5) { Debug.Log("Match Out Put image is to small"); SrcMat.copyTo(cameraFeed); SrcMat.release(); Imgproc.putText(cameraFeed, targetName, srcPointCornersSave[0], 0, 1, new Scalar(255, 255, 255), 2); return(false); } //畫出框框 Imgproc.line(SrcMat, srcPointCornersSave[0], srcPointCornersSave[1], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[1], srcPointCornersSave[2], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[2], srcPointCornersSave[3], new Scalar(255, 0, 0), 3); Imgproc.line(SrcMat, srcPointCornersSave[3], srcPointCornersSave[0], new Scalar(255, 0, 0), 3); //畫中心 Point middlePoint = new Point((srcPointCornersSave[0].x + srcPointCornersSave[2].x) / 2, (srcPointCornersSave[0].y + srcPointCornersSave[2].y) / 2); Imgproc.line(SrcMat, middlePoint, middlePoint, new Scalar(0, 0, 255), 10); SrcMat.copyTo(cameraFeed); keypointsTarget.release(); targetMat.release(); SrcMat.release(); return(true); }
public ImageString MatchFeatures(string base64image, List <string> base64imageList) { List <MatOfDMatch> winnerMatches = new List <MatOfDMatch>(); MatOfKeyPoint winnerKeyPoints = new MatOfKeyPoint(); Mat winnerImage = new Mat(); int winnerIndex = -1; int winnerValue = 0; Texture2D imgTexture = base64ImageToTexture(base64image); List <Texture2D> imgTextures = new List <Texture2D>(); for (int i = 0; i < base64imageList.Count; i++) { imgTextures.Add(base64ImageToTexture(base64imageList[i])); } //Create Mat from texture Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, img1Mat); MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); Mat descriptors1 = new Mat(); FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB); DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB); //Detect keypoints and compute descriptors from photo. detector.detect(img1Mat, keypoints1); extractor.compute(img1Mat, keypoints1, descriptors1); Debug.Log("Billede features: " + descriptors1.rows()); if (descriptors1.rows() < 10) { Debug.Log("ARRRRRRGH der er ikke mange descripters i mit original-billede"); return(new ImageString(base64image, winnerIndex)); } //Run through each image in list for (int i = 0; i < imgTextures.Count; i++) { Texture2D imgTexture2 = imgTextures[i]; //Create Mat from texture Mat img2Mat = new Mat(imgTexture2.height, imgTexture2.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture2, img2Mat); //Find keypoints and descriptors from image in list MatOfKeyPoint keypoints2 = new MatOfKeyPoint(); Mat descriptors2 = new Mat(); detector.detect(img2Mat, keypoints2); extractor.compute(img2Mat, keypoints2, descriptors2); //Match photo with image from list DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); Debug.Log("Billede2 features: " + descriptors2.rows()); if (descriptors2.rows() < 10) { Debug.Log("ARRRRRRGH der er ikke mange descripters i mit test billede: " + i); continue; } List <MatOfDMatch> matchList = new List <MatOfDMatch>(); matcher.knnMatch(descriptors1, descriptors2, matchList, 2); //Find the good matches and put them ind a list List <MatOfDMatch> good = new List <MatOfDMatch>(); foreach (MatOfDMatch match in matchList) { DMatch[] arrayDmatch = match.toArray(); if (arrayDmatch[0].distance < 0.7f * arrayDmatch[1].distance) { good.Add(match); } } //Find the best match image based on the good lists if (good.Count > winnerThreshold && good.Count > winnerValue) { winnerImage = img2Mat; winnerMatches = good; winnerKeyPoints = keypoints2; winnerIndex = i; winnerValue = good.Count; } } Debug.Log("The winner is image: " + winnerIndex + " with a value of: " + winnerValue); //If no winner just return the original image if (winnerIndex == -1) { Debug.Log("No winner"); return(new ImageString(base64image, winnerIndex)); } Debug.Log("No winner"); //Find the matching keypoints from the winner list. MatOfPoint2f queryPoints = new MatOfPoint2f(); MatOfPoint2f matchPoints = new MatOfPoint2f(); List <Point> queryPointsList = new List <Point>(); List <Point> matchPointsList = new List <Point>(); foreach (MatOfDMatch match in winnerMatches) { DMatch[] arrayDmatch = match.toArray(); queryPointsList.Add(keypoints1.toList()[arrayDmatch[0].queryIdx].pt); matchPointsList.Add(winnerKeyPoints.toList()[arrayDmatch[0].trainIdx].pt); } queryPoints.fromList(queryPointsList); matchPoints.fromList(matchPointsList); //Calculate the homography of the best matching image Mat homography = Calib3d.findHomography(queryPoints, matchPoints, Calib3d.RANSAC, 5.0); Mat resultImg = new Mat(); Imgproc.warpPerspective(img1Mat, resultImg, homography, winnerImage.size()); //Show image Texture2D texture = new Texture2D(winnerImage.cols(), winnerImage.rows(), TextureFormat.RGBA32, false); Utils.matToTexture2D(resultImg, texture); return(new ImageString(Convert.ToBase64String(texture.EncodeToPNG()), winnerIndex)); }
public List <ImageObject> MatchFeatures(string base64image, List <string> base64imageList) { ImageObject myImage = new ImageObject(); ImageObject winnerImage = new ImageObject(); List <ImageObject> returnImageList = new List <ImageObject>(); Texture2D imgTexture = base64ImageToTexture(base64image); List <Texture2D> imgTextures = new List <Texture2D>(); for (int i = 0; i < base64imageList.Count; i++) { imgTextures.Add(base64ImageToTexture(base64imageList[i])); } //Create Mat from texture Mat img1Mat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture, img1Mat); MatOfKeyPoint keypoints1 = new MatOfKeyPoint(); Mat descriptors1 = new Mat(); FeatureDetector detector = FeatureDetector.create(FeatureDetector.ORB); DescriptorExtractor extractor = DescriptorExtractor.create(DescriptorExtractor.ORB); //Detect keypoints and compute descriptors from photo. detector.detect(img1Mat, keypoints1); extractor.compute(img1Mat, keypoints1, descriptors1); //Debug.Log("Billede features: " + descriptors1.rows()); myImage.image = base64image; myImage.keyPoints = keypoints1; myImage.imageMat = img1Mat; if (descriptors1.rows() < 10) { Debug.Log("ARRRRRRGH der er ikke mange descripters i mit original-billede"); //No winner as there is to few descriptors. return(returnImageList); } //Run through each image in list //------------------------------------------------------------- for (int i = 0; i < imgTextures.Count; i++) { Texture2D imgTexture2 = imgTextures[i]; //Create Mat from texture Mat img2Mat = new Mat(imgTexture2.height, imgTexture2.width, CvType.CV_8UC3); Utils.texture2DToMat(imgTexture2, img2Mat); //Find keypoints and descriptors from image in list MatOfKeyPoint keypoints2 = new MatOfKeyPoint(); Mat descriptors2 = new Mat(); detector.detect(img2Mat, keypoints2); extractor.compute(img2Mat, keypoints2, descriptors2); //Match photo with image from list DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); //Debug.Log("Billede2 features: " + descriptors2.rows()); if (descriptors2.rows() < 10) { Debug.Log("ARRRRRRGH der er ikke mange descripters i mit test billede: " + i); continue; } List <MatOfDMatch> matchList = new List <MatOfDMatch>(); matcher.knnMatch(descriptors1, descriptors2, matchList, 2); //Find the good matches and put them ind a list List <MatOfDMatch> good = new List <MatOfDMatch>(); foreach (MatOfDMatch match in matchList) { DMatch[] arrayDmatch = match.toArray(); if (arrayDmatch[0].distance < 0.7f * arrayDmatch[1].distance) { good.Add(match); } } //Find the best match image based on the good lists if (good.Count > winnerThreshold && good.Count > winnerImage.value) { winnerImage.index = i; winnerImage.imageMat = img2Mat; winnerImage.keyPoints = keypoints2; winnerImage.value = good.Count; winnerImage.matches = good; } } // Run through done //------------------------------------------------------------- Debug.Log("The winner is image: " + winnerImage.index + " with a value of: " + winnerImage.value); //If no winner just return the original image if (winnerImage.index == -1) { Debug.Log("No winner"); return(returnImageList); } Texture2D imageTexture = new Texture2D(winnerImage.imageMat.cols(), winnerImage.imageMat.rows(), TextureFormat.RGBA32, false); winnerImage.image = Convert.ToBase64String(imageTexture.EncodeToPNG()); returnImageList.Add(myImage); returnImageList.Add(winnerImage); return(returnImageList); }
private void TryProcessImage(int index) { if (UseWebCam == false) { CurrentTexture = Sources[index]; } else { CurrentTexture = webCamTexture; } using (Mat imgMat = new Mat(CurrentTexture.height, CurrentTexture.width, CvType.CV_8UC1)) using (FeatureDetector blobDetector = FeatureDetector.create(FeatureDetector.SIMPLEBLOB)) using (Mat outImgMat = new Mat()) using (MatOfKeyPoint keypoints = new MatOfKeyPoint()) { if (CurrentTexture is Texture2D) { Utils.texture2DToMat(CurrentTexture as Texture2D, imgMat); } else if (CurrentTexture is WebCamTexture) { Utils.webCamTextureToMat(CurrentTexture as WebCamTexture, imgMat); } else { Utils.textureToMat(CurrentTexture, imgMat); } Debug.Log("imgMat dst ToString " + imgMat.ToString()); Imgproc.threshold(imgMat, imgMat, 0, 255, Imgproc.THRESH_BINARY | Imgproc.THRESH_OTSU); Imgproc.erode(imgMat, imgMat, erodeMat, new Point(1, 1), 5); blobDetector.read(Utils.getFilePath("blobparams.yml")); blobDetector.detect(imgMat, keypoints); Features2d.drawKeypoints(imgMat, keypoints, outImgMat); KeyPoint[] points = keypoints.toArray(); ProcessKeyPoints(points, outImgMat); Mat finalMat = outImgMat; if (texture != null && (texture.width != finalMat.cols() || texture.height != finalMat.rows())) { DestroyImmediate(texture); texture = null; } if (texture == null) { texture = new Texture2D(finalMat.cols(), finalMat.rows(), TextureFormat.RGBA32, false); } Utils.matToTexture2D(finalMat, texture); gameObject.GetComponent <Renderer>().material.mainTexture = texture; } }