/** Background subtraction and check if mouse selected a target */ private OpenCVForUnity.Rect BgSub() { Mat fgmaskMat = new Mat(); roiRect = null; OpenCVForUnity.Rect output; //Background Subtraction backgroundSubstractorMOG2.apply(frame, fgmaskMat); //Closure *it is done to remove noise and close gaps that bgsub may leave where we don't want to Mat structuringElement = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(closingSize, closingSize)); Imgproc.dilate(fgmaskMat, fgmaskMat, structuringElement); Imgproc.erode(fgmaskMat, fgmaskMat, structuringElement); //Make mask binary Mat maskBinary = new Mat(); Imgproc.threshold(fgmaskMat, maskBinary, 123, 255, Imgproc.THRESH_BINARY); //Get Contours List <MatOfPoint> contours = new List <MatOfPoint>(); OpenCVForUnity.Mat hierarchy = new OpenCVForUnity.Mat(); Imgproc.findContours(maskBinary, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE); foreach (MatOfPoint contour in contours) { output = Imgproc.boundingRect(new MatOfPoint(contour.toArray())); Imgproc.rectangle(frame, output.tl(), output.br(), new Scalar(255, 0, 0), 2); rectanglesToPrint.Add(new ColoredRect(output, Color.white)); UnityEngine.Rect check_pos = CVtoUnityRect(output); if (Input.GetMouseButton(0) && check_pos.Contains(new Vector2(Input.mousePosition.x, Screen.height - Input.mousePosition.y))) { Debug.Log("Selected a target box"); Debug.Log(output); return(output); } } return(null); }
IEnumerator GetPicture() { yield return(new WaitForEndOfFrame()); //攝影機讀取到的Frame繪製完畢後才進行拍照 //calculate mat of selected region and whole rawimage OpenCVForUnity.Mat srcMat = new OpenCVForUnity.Mat(4, 1, CvType.CV_32FC2); OpenCVForUnity.Mat dstMat = new OpenCVForUnity.Mat(4, 1, CvType.CV_32FC2); dstMat.put(0, 0, rectTopLeft.x - rectBotLeft.x, rectTopLeft.y - rectBotLeft.y, rectTopRight.x - rectBotLeft.x, rectTopRight.y - rectBotLeft.y, rectBotLeft.x - rectBotLeft.x, rectBotLeft.y - rectBotLeft.y, rectBotRight.x - rectBotLeft.x, rectBotRight.y - rectBotLeft.y); //calculate transform matrix transformMat = new OpenCVForUnity.Mat(3, 3, CvType.CV_32FC1); transformMat = Imgproc.getPerspectiveTransform(srcMat, dstMat); Texture2D sourceTex = ScreenCapture.CaptureScreenshotAsTexture(); Color[] pix = sourceTex.GetPixels((int)rectBotLeft.x, (int)rectBotLeft.y, width, height); Texture2D destTex = new Texture2D(width, height); destTex.SetPixels(pix); destTex.Apply(); OpenCVForUnity.Size textureSize = new OpenCVForUnity.Size(width, height); OpenCVForUnity.Mat rawImageSrcMat = new OpenCVForUnity.Mat(textureSize, CvType.CV_8UC4); OpenCVForUnity.Mat rawImageSrcMatFlip = new OpenCVForUnity.Mat(textureSize, CvType.CV_8UC4); Utils.texture2DToMat(destTex, rawImageSrcMat); Core.flip(rawImageSrcMat, rawImageSrcMatFlip, 0); OpenCVForUnity.Mat rawImageDstMat = new OpenCVForUnity.Mat(textureSize, CvType.CV_8UC4); //Mat rawImageDstMatFlip = new Mat(textureSize, CvType.CV_8UC4); Imgproc.warpPerspective(rawImageSrcMatFlip, rawImageDstMat, transformMat, textureSize); texture = new Texture2D(width, height, TextureFormat.RGB24, false); Utils.matToTexture2D(rawImageDstMat, texture); rawImageRI.texture = texture; }
IEnumerator CalculateHomography() //void CalculateHomography() { //Debug.Log("CalculateHomography1"); //myCam.Pause(); yield return(new WaitForEndOfFrame()); //yield return new WaitForSeconds((float)0.5); //程式開始後至少要等0.3秒才會出現影像畫面,不然算sift一開始就會記憶體爆掉 //input camera image /*Texture2D sourceTex = ScreenCapture.CaptureScreenshotAsTexture(); * Color[] pix = sourceTex.GetPixels((int)rectBotLeft.x, (int)rectBotLeft.y, width, height); * Texture2D tex = new Texture2D(width, height); * tex.SetPixels(pix); * tex.Apply();*/ //Debug.Log("CalculateHomography2"); //rawimage position at (0,0),start from bottom left int xStart = (int)(Screen.width - rawImageRT.rect.width) / 2; int yStart = (int)(Screen.height - rawImageRT.rect.height) / 2; /*Debug.Log("xStart: "+xStart); * Debug.Log("yStart: "+yStart); * Debug.Log("Screen.width: "+Screen.width); * Debug.Log("Screen.height: "+Screen.height); * Debug.Log("rawImageRT.rect.width: "+rawImageRT.rect.width); * Debug.Log("rawImageRT.rect.height: "+rawImageRT.rect.height);*/ //get sign image with text Texture2D sourceTex = ScreenCapture.CaptureScreenshotAsTexture(); //rawImageRI.texture = sourceTex; //Color[] pix = sourceTex.GetPixels((int)rectBotLeft.x, (int)rectBotLeft.y, width, height); Color[] pix = sourceTex.GetPixels(xStart, yStart, (int)rawImageRT.rect.width, (int)rawImageRT.rect.height); tex = new Texture2D((int)rawImageRT.rect.width, (int)rawImageRT.rect.height); tex.SetPixels(pix); tex.Apply(); //Debug.Log("tex.width: "+tex.width); //Debug.Log("tex.height: "+tex.height); //input fixed image /*Texture2D tex = new Texture2D(2,2); * string imgPath = "../signboard-rectangle/test-199-fast-628.jpg"; * byte [] binaryImageData = File.ReadAllBytes(imgPath); * tex.LoadImage(binaryImageData);*/ //scale texture to make it smaller TextureScale.Bilinear(tex, tex.width / 2, tex.height / 2); //必要 防止記憶體爆炸 tex = TextureGray.ToGray(tex); //rawImageRI.texture = tex; mat = Unity.TextureToMat(tex); Destroy(sourceTex); Destroy(tex); //Cv2.ImShow("img", mat); ok //OpenCvSharp.Mat mat = Cv2.ImRead(imgPath, ImreadModes.Unchanged); //Debug.Log("mat: "+mat.ToString()); //string imgPath = "../signboard-rectangle/test-199-fast-628.jpg"; //OpenCvSharp.Mat mat = Cv2.ImRead(imgPath); InputArray imgCam = InputArray.Create(mat); desCam = OutputArray.Create(mat); //Cv2.ImShow("img", mat); ok //OpenCvSharp.Mat mat2 = mat; //sift = SIFT.Create(); //System.Diagnostics.Stopwatch time = new System.Diagnostics.Stopwatch(); //time.Start (); //卡卡 OpenCvSharp.KeyPoint[] kpCam = sift.Detect(mat); //OpenCvSharp.KeyPoint[] kpCam = surf.Detect(mat); //OpenCvSharp.KeyPoint[] kpCam = orb.Detect(mat); //OpenCvSharp.KeyPoint[] kpCam = brief.Detect(mat); //time.Stop(); //Debug.Log("執行 " + time.Elapsed.TotalSeconds + " 秒"); //myCam.Pause(); //rawImageRI.texture = tex; //Cv2.ImShow("img", mat); ok //Cv2.ImShow("img", mat2); ok sift.Compute(imgCam, ref kpCam, desCam); //surf.Compute(img2, ref kpCam, desCam); //orb.Compute(img2, ref kpCam, desCam); //brief.Compute(img2, ref kpCam, desCam); //Cv2.ImShow("img", mat); //Cv2.ImShow("img", mat2); 爆炸 OpenCvSharp.Mat desCammat = desCam.GetMat(); //Debug.Log("desCammat: "+desCammat); //if (!M) 如果還沒計算出homography M { //desFirstCatch = desCam; //OutputArray descriptors_object = des1; OpenCvSharp.Mat des1mat = des1.GetMat(); OpenCvSharp.Mat des2mat = des2.GetMat(); //OpenCvSharp.Mat des3mat = des3.GetMat(); //Debug.Log("des1mat: "+des1mat); OpenCvSharp.DMatch[] dmatch1 = descriptorMatcher.Match(des1mat, desCammat); OpenCvSharp.DMatch[] dmatch2 = descriptorMatcher.Match(des2mat, desCammat); //OpenCvSharp.DMatch[] dmatch3 = descriptorMatcher.Match(des3mat, desCammat); //Debug.Log("damtch1[0]: "+dmatch1[0].ToString()); //} //else { //OpenCvSharp.Mat desFirstCatchmat = desFirstCatch.GetMat(); // OpenCvSharp.DMatch[] dmatch = descriptorMatcher.Match(desFirstCatchmat, desCammat); // OutputArray descriptors_object = desFirstCatch; //} double max_dist1 = 0; double min_dist1 = 100; double max_dist2 = 0; double min_dist2 = 100; //double max_dist3 = 0; //double min_dist3 = 100; //Cv2.ImShow("img", mat); 爆炸 //Quick calculation of max and min distances between keypoints foreach (OpenCvSharp.DMatch d in dmatch1) { double dist = d.Distance; if (dist < min_dist1) { min_dist1 = dist; } if (dist > max_dist1) { max_dist1 = dist; } } foreach (OpenCvSharp.DMatch d in dmatch2) { double dist = d.Distance; if (dist < min_dist2) { min_dist2 = dist; } if (dist > max_dist2) { max_dist2 = dist; } } /*foreach (OpenCvSharp.DMatch d in dmatch3){ * double dist = d.Distance; * if( dist < min_dist3 ) min_dist3 = dist; * if( dist > max_dist3 ) max_dist3 = dist; * }*/ //Draw only "good" matches (i.e. whose distance is less than 3*min_dist ) List <OpenCvSharp.DMatch> goodMatch1 = new List <OpenCvSharp.DMatch>(); foreach (OpenCvSharp.DMatch d in dmatch1) { if (d.Distance < 3 * min_dist1) { goodMatch1.Add(d); } } List <OpenCvSharp.DMatch> goodMatch2 = new List <OpenCvSharp.DMatch>(); foreach (OpenCvSharp.DMatch d in dmatch2) { if (d.Distance < 3 * min_dist2) { goodMatch2.Add(d); } } /*List<OpenCvSharp.DMatch> goodMatch3 = new List<OpenCvSharp.DMatch>(); * foreach (OpenCvSharp.DMatch d in dmatch3){ * if( d.Distance < 3*min_dist3 ) * goodMatch3.Add(d); * }*/ List <OpenCvSharp.Point2f> srcPts1 = new List <OpenCvSharp.Point2f>(); List <OpenCvSharp.Point2f> dstPts1 = new List <OpenCvSharp.Point2f>(); foreach (OpenCvSharp.DMatch d in goodMatch1) { //-- Get the keypoints from the good matches srcPts1.Add(kp1[d.QueryIdx].Pt); dstPts1.Add(kpCam[d.TrainIdx].Pt); //Debug.Log("kp1[d.QueryIdx].Pt: "+kp1[d.QueryIdx].Pt); } List <OpenCvSharp.Point2f> srcPts2 = new List <OpenCvSharp.Point2f>(); List <OpenCvSharp.Point2f> dstPts2 = new List <OpenCvSharp.Point2f>(); foreach (OpenCvSharp.DMatch d in goodMatch2) { //-- Get the keypoints from the good matches srcPts2.Add(kp2[d.QueryIdx].Pt); dstPts2.Add(kpCam[d.TrainIdx].Pt); //Debug.Log("kp1[d.QueryIdx].Pt: "+kp1[d.QueryIdx].Pt); } /*List<OpenCvSharp.Point2f> srcPts3 = new List<OpenCvSharp.Point2f>(); * List<OpenCvSharp.Point2f> dstPts3 = new List<OpenCvSharp.Point2f>(); * foreach (OpenCvSharp.DMatch d in goodMatch3){ * //-- Get the keypoints from the good matches * srcPts3.Add(kp3[d.QueryIdx].Pt); * dstPts3.Add(kpCam[d.TrainIdx].Pt); * //Debug.Log("kp1[d.QueryIdx].Pt: "+kp1[d.QueryIdx].Pt); * }*/ //jump to next iteration if less than certain number of keypoints matched if (srcPts1.Count < 200 && srcPts2.Count < 200) { yield break; } if (srcPts1.Count >= srcPts2.Count) { srcPts = new List <OpenCvSharp.Point2f>(srcPts1); dstPts = new List <OpenCvSharp.Point2f>(dstPts1); text1.enabled = true; text2.enabled = false; num1++; //text3.enabled = false; } /*else if(srcPts2.Count >= srcPts1.Count && srcPts2.Count >= srcPts3.Count){ * srcPts = new List<OpenCvSharp.Point2f>(srcPts2); * dstPts = new List<OpenCvSharp.Point2f>(dstPts2); * text2.enabled = true; * text1.enabled = false; * text3.enabled = false; * }*/ else { srcPts = new List <OpenCvSharp.Point2f>(srcPts2); dstPts = new List <OpenCvSharp.Point2f>(dstPts2); text2.enabled = true; text1.enabled = false; num2++; //text2.enabled = false; } if (num1 > num2 + 10) { text1.enabled = true; text2.enabled = false; } if (num2 > num1 + 10) { text2.enabled = true; text1.enabled = false; } if (num1 > 60 || num2 > 60) { num1 = 0; num2 = 0; } //OpenCvSharp.Mat mat2 = mat; //Cv2.DrawKeypoints(mat, kpCam, mat2); //Cv2.ImShow("img", mat); 亂碼圖 //Texture2D tex2 = new Texture2D(8, 8); //tex2 = Unity.MatToTexture(mat); //rawImageRI.texture = tex2; //myCam.Pause(); //Cv2.ImShow("img", mat2); 亂碼圖 Texture2D emptyTex = new Texture2D(8, 8); OpenCvSharp.Mat outputImg = Unity.TextureToMat(emptyTex); //Debug.Log("outputImg: "+outputImg.ToString()); InputArray srcArr = InputArray.Create <OpenCvSharp.Point2f>(srcPts); InputArray dstArr = InputArray.Create <OpenCvSharp.Point2f>(dstPts); OutputArray mask = OutputArray.Create(outputImg); OpenCvSharp.Mat M = Cv2.FindHomography(srcArr, dstArr, HomographyMethods.Ransac, 5, mask); OpenCVForUnity.Mat transMat = new OpenCVForUnity.Mat(3, 3, CvType.CV_32FC1); transMat.put(0, 0, M.Get <double>(0, 0), M.Get <double>(0, 1), M.Get <double>(0, 2), M.Get <double>(1, 0), M.Get <double>(1, 1), M.Get <double>(1, 2), M.Get <double>(2, 0), M.Get <double>(2, 1), M.Get <double>(2, 2)); //Debug.Log("transMat: "+transMat.dump()); //Debug.Log("mask: "+mask); //OpenCvSharp.Mat maskMat = mask.GetMat(); //Debug.Log("maskMat: "+maskMat.ToString()); //maskMoB = new OpenCvSharp.MatOfByte(maskMat); //-- Get the corners from the image_1 ( the object to be "detected" ) /*OpenCvSharp.Point2f[] obj_corners = new OpenCvSharp.Point2f[4]; * obj_corners[0] = new OpenCvSharp.Point2f(0, 0); * obj_corners[1] = new OpenCvSharp.Point2f(inputTex.width, 0); * obj_corners[2] = new OpenCvSharp.Point2f(inputTex.width, inputTex.height); * obj_corners[3] = new OpenCvSharp.Point2f(0, inputTex.height); * * //OpenCvSharp.Point2f[] scene_corners = new OpenCvSharp.Point2f[4]; * //scene_corners = Cv2.PerspectiveTransform(obj_corners, M); * * //if (!M) 如果還沒計算出homography M { * //Cv2.DrawMatches(inputImg, kp1, mat, kpCam, goodMatch, outputImg, OpenCvSharp.Scalar.All(-1), * //OpenCvSharp.Scalar.All(-1), maskMoB.ToArray(), DrawMatchesFlags.NotDrawSinglePoints); * //else { * * //Texture2D outputTex = Unity.MatToTexture(outputImg); * //rawImageRI.texture = outputTex; * * //-- Draw lines between the corners (the mapped object in the scene - image_2 ) * //Cv2.Line(outputImg, scene_corners[0] + obj_corners[1], scene_corners[1] + obj_corners[1], OpenCvSharp.Scalar.LightBlue, 4); * //Cv2.Line(outputImg, scene_corners[1] + obj_corners[1], scene_corners[2] + obj_corners[1], OpenCvSharp.Scalar.LightBlue, 4); * //Cv2.Line(outputImg, scene_corners[2] + obj_corners[1], scene_corners[3] + obj_corners[1], OpenCvSharp.Scalar.LightBlue, 4); * //Cv2.Line(outputImg, scene_corners[3] + obj_corners[1], scene_corners[0] + obj_corners[1], OpenCvSharp.Scalar.LightBlue, 4); * * //OpenCvSharp.Mat outimg = Unity.TextureToMat(emptyTex); * //inputImg = Unity.TextureToMat(emptyTex); * //Cv2.DrawKeypoints(mat, kpCam, outimg, OpenCvSharp.Scalar.LightBlue); * * //show image with text after homography * /*string imgPath2 = "../signboard-rectangle/test-IMG_0204-text.PNG"; * textTex = new Texture2D(2,2); * byte [] binaryImageData2 = File.ReadAllBytes(imgPath2); * textTex.LoadImage(binaryImageData2); * rawImageRI.texture = textTex;*/ /*OpenCVForUnity.Mat inputTextImg = new OpenCVForUnity.Mat(new OpenCVForUnity.Size(textTex.width, textTex.height), CvType.CV_8UC4); * Utils.texture2DToMat(textTex, inputTextImg); * OpenCVForUnity.Mat outputTextImg = new OpenCVForUnity.Mat(new OpenCVForUnity.Size(textTex.width, textTex.height), CvType.CV_8UC4); * * Imgproc.warpPerspective(inputTextImg, outputTextImg, transMat, new OpenCVForUnity.Size(textTex.width, textTex.height)); * * Texture2D outputTex = new Texture2D((int)textTex.width, (int)textTex.height, TextureFormat.RGB24, false); * Utils.matToTexture2D(outputTextImg, outputTex);*/ //TextureScale.Bilinear(outputTex, outputTex.width/5, outputTex.height/5); //rawImageRI.texture = outputTex; //text.enabled = true; /*Vector3 scale; * scale.x = new Vector4((float)M.Get<double>(0,0), (float)M.Get<double>(1,0), (float)M.Get<double>(2,0), 0).magnitude; * scale.y = new Vector4((float)M.Get<double>(0,1), (float)M.Get<double>(1,1), (float)M.Get<double>(2,1), 0).magnitude; * scale.z = new Vector4((float)M.Get<double>(0,2), (float)M.Get<double>(1,2), (float)M.Get<double>(2,2), 0).magnitude; * * Vector3 forward; * forward.x = (float)M.Get<double>(0,2); * forward.y = (float)M.Get<double>(1,2); * forward.z = (float)M.Get<double>(2,2); * * Vector3 upwards; * upwards.x = (float)M.Get<double>(0,1); * upwards.y = (float)M.Get<double>(1,1); * upwards.z = (float)M.Get<double>(2,1); * * //textRT.localScale = scale; * //textRT.rotation = Quaternion.LookRotation(forward, upwards);*/ Matrix4x4 matrix = new Matrix4x4(); /*matrix.SetRow(0, new Vector4((float)M.Get<double>(0,0), (float)M.Get<double>(0,1), (float)M.Get<double>(0,2),0)); * matrix.SetRow(1, new Vector4((float)M.Get<double>(1,0), (float)M.Get<double>(1,1), (float)M.Get<double>(1,2),0)); * matrix.SetRow(2, new Vector4(0,0,1,0)); * matrix.SetRow(3, new Vector4(0,0,0,1));*/ //inverse效果還行 matrix.SetRow(0, new Vector4((float)M.Get <double>(0, 0), (float)M.Get <double>(0, 1), 0, (float)M.Get <double>(0, 2))); matrix.SetRow(1, new Vector4((float)M.Get <double>(1, 0), (float)M.Get <double>(1, 1), 0, (float)M.Get <double>(1, 2))); matrix.SetRow(2, new Vector4(0, 0, 1, 0)); matrix.SetRow(3, new Vector4(0, 0, 0, 1)); Matrix4x4 inverse = matrix.inverse; //textRT.localScale = matrix.lossyScale; //textRT.rotation = matrix.rotation; //rotation跟eulerangles效果一樣 textRT1.rotation = inverse.rotation; textRT2.rotation = inverse.rotation; //textRT3.rotation = inverse.rotation; Destroy(emptyTex); //calculate euler angle /*double angleX = Math.Asin(-M.Get<double>(2,1)); * double angleY = Math.Atan2(M.Get<double>(2,0), M.Get<double>(2,2)); * double angleZ = Math.Atan2(M.Get<double>(0,1), M.Get<double>(1,1)); * //textRT.eulerAngles = new Vector3((float)angleX, (float)angleY, (float)angleZ); * //Debug.Log("textRT.eulerAngles: "+textRT.eulerAngles.ToString()); * * //calculate quaternion * double w = Math.Sqrt(1 + M.Get<double>(0,0) + M.Get<double>(1,1) + M.Get<double>(2,2))/2; * double w4 = w*4; * double qx = (M.Get<double>(2,1) - M.Get<double>(1,2))/w4 ; * double qy = (M.Get<double>(0,2) - M.Get<double>(2,0))/w4 ; * double qz = (M.Get<double>(1,0) - M.Get<double>(0,1))/w4 ; * //textRT.rotation = new Quaternion((float)qx, (float)qy, (float)qz, 1); * * double tr = M.Get<double>(0,0) + M.Get<double>(1,1) + M.Get<double>(2,2); * Debug.Log("tr: "+tr);*/ //Cv2.ImShow("img", mat); //myCam.Pause(); }
private OpenCVForUnity.Rect BgSub() { backgroundSubstractorMOG2.apply(rgbMat, fgmaskMat); roiRect = null; fgmaskMatRoi = fgmaskMat; Mat kernelD = new Mat(40, 40, CvType.CV_8UC1, new Scalar(255, 255, 255)); Mat kernelE = new Mat(20, 20, CvType.CV_8UC1, new Scalar(255, 255, 255)); Mat kernelDRoi = new Mat(1, 1, CvType.CV_8UC1, new Scalar(255, 255, 255)); Mat kernelERoi = new Mat(1, 1, CvType.CV_8UC1, new Scalar(255, 255, 255)); Imgproc.dilate(fgmaskMat, fgmaskMatDilate, kernelD); Imgproc.erode(fgmaskMatDilate, fgmaskMatDilate, kernelE); Imgproc.dilate(fgmaskMatRoi, fgmaskMatDilateRoi, kernelDRoi); Imgproc.erode(fgmaskMatDilateRoi, fgmaskMatDilateRoi, kernelERoi); mask_binary = new OpenCVForUnity.Mat(); mask_binaryRoi = new OpenCVForUnity.Mat(); Imgproc.threshold(fgmaskMatDilate, mask_binary, 123, 255, Imgproc.THRESH_BINARY); Imgproc.threshold(fgmaskMatDilateRoi, mask_binaryRoi, 123, 255, Imgproc.THRESH_BINARY); List <MatOfPoint> contours = new List <MatOfPoint>(); OpenCVForUnity.Mat hierarchy = new OpenCVForUnity.Mat(); Imgproc.findContours(mask_binary, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE); if (contours.Count == 0) { return(null); } else { int i = 0; color1 = new Color(0.8f, 0.8f, 0.95f, 0.25f); color2 = new Color(0.8f, 0.8f, 0.95f); foreach (MatOfPoint contour in contours) { //Debug.Log("number of target: " + i); MatOfPoint new_mat1 = new MatOfPoint(contour.toArray()); output = Imgproc.boundingRect(new_mat1); rgbMat.copyTo(dest, mask_binaryRoi); //SaveMatToFile("mask_binary" + ss, mask_binary); //SaveMatToFile("mask_binaryRoi" + ss, mask_binaryRoi); Imgproc.rectangle(rgbMat, output.tl(), output.br(), new Scalar(255, 0, 0), 2); output_ar.Add(output); Vector3 top_left_pos = new Vector3(output.x, Screen.height - output.y); Vector3 bottom_right_pos = new Vector3(output.x + output.width, Screen.height - (output.y + output.height)); UnityEngine.Rect check_pos = GetScreenRect(top_left_pos, bottom_right_pos); i++; if (Input.GetMouseButton(0) && check_pos.Contains(new Vector2(Input.mousePosition.x, Screen.height - Input.mousePosition.y))) { Debug.Log("take it!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"); //skipFrame = 50; //shouldStartCamShift = true; Debug.Log(output); return(output); } /*else * { * MatOfPoint new_mat2 = new MatOfPoint(contours[0].toArray()); //prende il blob più grande, è il primo perchè la funzione findcontours mette in ordine dal più grande al più piccolo. * output = Imgproc.boundingRect(new_mat2); * }*/ } //OnGUI(); return(null); } }