public int matchDice(Mat src, OpenCVForUnity.Rect rect, Mat temp) { Mat subRGB = new Mat(src, rect); //灰階 Mat grayMat = new Mat(); Imgproc.cvtColor(subRGB, grayMat, Imgproc.COLOR_RGB2GRAY); Mat hierarchy = new Mat(); List <MatOfPoint> contours = new List <MatOfPoint>(); //模糊.Canny.侵蝕膨脹 Imgproc.blur(grayMat, grayMat, new Size(3, 3)); Imgproc.Canny(grayMat, grayMat, 50, 150); morphOps(grayMat); //找輪廓 Imgproc.findContours(grayMat, contours, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE); for (int i = 0; i < contours.Count; i++) { Imgproc.drawContours(temp, contours, i, new Scalar(255, 255, 255), 2); } //回傳輪廓數目 return(contours.Count); }
private void GetCubies(List <MatOfPoint> contours, Mat imgMat, int index, List <Cubies> cubies) { MatOfPoint2f matOfPoint2f = new MatOfPoint2f(); MatOfPoint2f approxCurve = new MatOfPoint2f(); MatOfPoint approx = new MatOfPoint(); foreach (var contour in contours) { matOfPoint2f.fromList(contour.toList()); Imgproc.approxPolyDP(matOfPoint2f, approxCurve, 0.1 * Imgproc.arcLength(matOfPoint2f, true), true); try { approxCurve.convertTo(approx, CvType.CV_32S); OpenCVForUnity.Rect rect = Imgproc.boundingRect(approx); if (approx.total() == 4) { cubies.Add(new Cubies(rect.x, rect.y, colorsList[index])); Imgproc.rectangle(imgMat, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(255, 40, 150), 2); } } catch (ArgumentOutOfRangeException e) { } } print("Number of cubies: " + cubies.Count); }
void Start() { int width = source.width; int height = source.height; //Debug.Log(width + "*" + height); // 256*256, 打印查看一下图片尺寸 //rows 是行数,对应height //cols 是列数,对应width mat = new Mat(height, width, CvType.CV_8UC3); // Mat对象尺寸要和Texture2d一样 Utils.texture2DToMat(source, mat); for (int i = 0; i < 2; i++) { for (int t = 0; t < 2; t++) { int id = i * 2 + t; Debug.Log("[顺序]" + (id)); //0->1->2->3 // 按output数组中的顺序,裁切并赋予新图片 OpenCVForUnity.Rect rectCrop = new OpenCVForUnity.Rect(width / 2 * t, height / 2 * i, width / 2, height / 2); // 调试小心,超出取值范围,unity会奔溃 Mat croppedImage = new Mat(mat, rectCrop); result[id] = new Texture2D(croppedImage.width(), croppedImage.height()); // Texture2d尺寸也要和Mat一样 Utils.matToTexture2D(croppedImage, result[id]); output[id].texture = result[id]; } } }
Mat createForOne(Mat cameraMat, Region region) { Mat texture = Mat.zeros(region.parentSize, cameraMat.type()); Mat originalTexture = selectOriginalTextureImage(); if (region.rect.tl().x == 0) { var aspect = originalTexture.size().height / originalTexture.size().width; var height = (int)region.rect.size().height; var width = (int)(height / aspect); var y = (int)region.rect.tl().y; var x = (int)region.rect.br().x - width; var rect = new OpenCVForUnity.Rect(x, y, width, height); ARUtil.affineTransform(originalTexture, texture, rect); } else if (region.rect.br().x >= region.parentMat.size().width - 1) { var aspect = originalTexture.size().height / originalTexture.size().width; var height = (int)region.rect.size().height; var width = (int)(height / aspect); var y = (int)region.rect.tl().y; var x = (int)region.rect.tl().x; var rect = new OpenCVForUnity.Rect(x, y, width, height); ARUtil.affineTransform(originalTexture, texture, rect); } else { // オリジナルのテクスチャ画像を回転矩形にアフィン変換 ARUtil.affineTransform(originalTexture, texture, region.rotatedRect); } return(texture); }
void Update() { if (rawImage) { if (startCVCam && cvCameraMat && cvCameraMat.isPlaying() && cvCameraMat.didUpdateThisFrame() && cvCameraMat.didUpdateThisFrame() && cvCameraMat.didUpdateThisFrame() && texture != null) { Mat cvCamMat = cvCameraMat.GetMat(); if (cvCamMat != null && !cvCamMat.empty() && !faceCascade.empty()) { Mat grayMat = new Mat(); Imgproc.cvtColor(cvCamMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Mat equalizeHistMat = new Mat(); Imgproc.equalizeHist(grayMat, equalizeHistMat); MatOfRect faces = new MatOfRect(); faceCascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.13, equalizeHistMat.cols() * 0.13), new Size()); if (faces.rows() > 0) { List <OpenCVForUnity.Rect> rectsList = faces.toList(); for (int i = 0; i < rectsList.ToArray().Length; i++) { OpenCVForUnity.Rect faceRect = rectsList[i]; x = faceRect.x; y = faceRect.y; if (i > 0) { //左上が(0、0)、右下が(100、100) OpenCVForUnity.Rect beforeFaceRect = rectsList [i - 1]; formerX = beforeFaceRect.x; formerY = beforeFaceRect.y; dx = x - formerX; dy = y - formerY; _gameManager.setDxDy(dx, dy); Debug.Log(x + ":" + y); } } } } if (cvCamMat != null && !cvCamMat.empty()) { try { cvCameraMat.matToTexture2D(cvCamMat, texture); } catch (System.ArgumentException e) { Debug.Log(e.Message); } catch (System.Exception e) { Debug.Log("OtherError"); } cvCamMat = null; } } } else { Debug.LogError("NotFound:rawImage"); } }
public void perspectiveAlign() { if (nowDetected) { Mat align = new Mat(); orderRectCorners(nowRectPoints); Mat srcQuad = Converters.vector_Point_to_Mat(nowRectPoints, CvType.CV_32F); Mat dstQuad = Converters.vector_Point_to_Mat(dstRectPoints, CvType.CV_32F); Mat M = Imgproc.getPerspectiveTransform(srcQuad, dstQuad); Imgproc.warpPerspective(img_orig, align, M, new Size(1120, 860)); int diffX = 60; int diffY = 60; Rect lt = new Rect(new Point(0, 0), new Point(diffX, diffY)); Rect rt = new Rect(new Point(align.width() - 1 - diffX, 0), new Point(align.width() - 1, diffY)); Rect lb = new Rect(new Point(0, align.height() - 1 - diffY), new Point(diffX, align.height() - 1)); Rect rb = new Rect(new Point(align.width() - 1 - diffX, align.height() - 1 - diffY), new Point(align.width() - 1, align.height() - 1)); // left-top Imgproc.rectangle(align, lt.tl(), lt.br(), new Scalar(0, 255, 0, 255), 1); // right-top Imgproc.rectangle(align, rt.tl(), rt.br(), new Scalar(0, 255, 0, 255), 1); // left-bottom Imgproc.rectangle(align, lb.tl(), lb.br(), new Scalar(0, 255, 0, 255), 1); // right-bottom Imgproc.rectangle(align, rb.tl(), rb.br(), new Scalar(0, 255, 0, 255), 1); //for (int i = 0; i < 20; i++) //{ // Rect r = new Rect(new Point(435, 137.5 + 32.5 * i), new Point(435 + 110, 170 + 32.5 * i)); // int num = getAnswerNumber(align, r); // Imgproc.putText(align, " " + num, new Point(r.x - 40, r.y + 25), 1, 2, new Scalar(255, 0, 0, 255), 3, Core.LINE_AA, false); // Imgproc.rectangle(align, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 2); //} // //for (int i = 0; i < 20; i++) //{ // Rect r = new Rect(new Point(590, 137.5 + 32.5 * i), new Point(590 + 110, 170 + 32.5 * i)); // int num = getAnswerNumber(align, r); // Imgproc.putText(align, " " + num, new Point(r.x - 40, r.y + 25), 1, 2, new Scalar(255, 0, 0, 255), 3, Core.LINE_AA, false); // Imgproc.rectangle(align, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 2); //} // //for (int i = 0; i < 5; i++) //{ // Rect r = new Rect(new Point(750, 137.5 + 32.5 * i), new Point(750 + 110, 170 + 32.5 * i)); // int num = getAnswerNumber(align, r); // Imgproc.putText(align, " " + num, new Point(r.x - 40, r.y + 25), 1, 2, new Scalar(255, 0, 0, 255), 3, Core.LINE_AA, false); // Imgproc.rectangle(align, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 2); //} getAnswerNumber(align); result.GetComponent <Renderer>().material.mainTexture = result_texture; result_texture.Resize(align.width(), align.height()); result.gameObject.transform.localScale = new Vector3(align.width() / 2.5f, align.height() / 2.5f, 3); Utils.matToTexture2D(align, result_texture); } }
//平滑影像(若與上一張圖片相差過少將不更新畫面) private Mat SmoothesImage(Mat currentImage) { Mat hierarchy = new Mat(); List <MatOfPoint> contours = new List <MatOfPoint>(); Mat diffImage = new Mat(); if (_smoothesImage == null) { _smoothesImage = new Mat(currentImage.height(), currentImage.width(), CvType.CV_8UC1); currentImage.copyTo(_smoothesImage); } Core.absdiff(currentImage, _smoothesImage, diffImage); Imgproc.findContours(diffImage, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); for (int index = 0; index < contours.Count; index++) { OpenCVForUnity.Rect tempRect = Imgproc.boundingRect(contours[index]); //差異面積 if (tempRect.area() > (MatchWidth * MatchHeight * _smoothesImagePer)) { currentImage.copyTo(_smoothesImage); _DepthImageChangeFlag = true; return(currentImage); } } return(_smoothesImage); }
/// <summary> /// Raises the scan face mask button click event. /// </summary> public void OnScanFaceMaskButtonClick() { RemoveFaceMask(); // Capture webcam frame. if (webCamTextureToMatHelper.IsPlaying()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); faceRectInMask = DetectFace(rgbaMat); if (faceRectInMask.width == 0 && faceRectInMask.height == 0) { Debug.Log("A face could not be detected from the input image."); return; } OpenCVForUnity.Rect rect = new OpenCVForUnity.Rect((int)faceRectInMask.x, (int)faceRectInMask.y, (int)faceRectInMask.width, (int)faceRectInMask.height); rect.inflate(rect.x / 5, rect.y / 5); rect = rect.intersect(new OpenCVForUnity.Rect(0, 0, rgbaMat.width(), rgbaMat.height())); faceMaskTexture = new Texture2D(rect.width, rect.height, TextureFormat.RGBA32, false); faceMaskMat = new Mat(rgbaMat, rect).clone(); OpenCVForUnity.Utils.matToTexture2D(faceMaskMat, faceMaskTexture); Debug.Log("faceMaskMat ToString " + faceMaskMat.ToString()); faceRectInMask = DetectFace(faceMaskMat); faceLandmarkPointsInMask = DetectFaceLandmarkPoints(faceMaskMat, faceRectInMask); if (faceRectInMask.width == 0 && faceRectInMask.height == 0) { RemoveFaceMask(); Debug.Log("A face could not be detected from the input image."); } } }
public void onTouch(Mat rgbaMat, Point touchPoint) { int cols = rgbaMat.cols(); int rows = rgbaMat.rows(); int x = (int)touchPoint.x; int y = (int)touchPoint.y; if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) { return; } OpenCVForUnity.Rect touchedRect = new OpenCVForUnity.Rect(); touchedRect.x = (x > 5) ? x - 5 : 0; touchedRect.y = (y > 5) ? y - 5 : 0; touchedRect.width = (x + 5 < cols) ? x + 5 - touchedRect.x : cols - touchedRect.x; touchedRect.height = (y + 5 < rows) ? y + 5 - touchedRect.y : rows - touchedRect.y; Mat touchedRegionRgba = rgbaMat.submat(touchedRect); Mat touchedRegionHsv = new Mat(); Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL); blobColorHsv = Core.sumElems(touchedRegionHsv); int pointCount = touchedRect.width * touchedRect.height; for (int i = 0; i < blobColorHsv.val.Length; i++) { blobColorHsv.val[i] /= pointCount; } detector.setHsvColor(blobColorHsv); Imgproc.resize(detector.getSpectrum(), spectrumMat, SPECTRUM_SIZE); isColorSelected = true; touchedRegionRgba.release(); touchedRegionHsv.release(); }
// Use this for initialization void Start() { fpsMonitor = GetComponent <FpsMonitor> (); imageSizeDropdown.value = (int)imageSize; countDropdown.value = 2; fgTex = Resources.Load("lena") as Texture2D; bgTex = new Texture2D(fgTex.width, fgTex.height, TextureFormat.RGBA32, false); alphaTex = new Texture2D(fgTex.width, fgTex.height, TextureFormat.RGBA32, false); dstTex = new Texture2D(fgTex.width, fgTex.height, TextureFormat.RGBA32, false); fgMat = new Mat(fgTex.height, fgTex.width, CvType.CV_8UC3); bgMat = new Mat(fgTex.height, fgTex.width, CvType.CV_8UC3); alphaMat = new Mat(fgTex.height, fgTex.width, CvType.CV_8UC1); dstMat = new Mat(fgTex.height, fgTex.width, CvType.CV_8UC3, new Scalar(0, 0, 0)); // Generate fgMat. Utils.texture2DToMat(fgTex, fgMat); // Generate bgMat. Core.flip(fgMat, bgMat, 1); Core.bitwise_not(bgMat, bgMat); // Generate alphaMat. for (int r = 0; r < alphaMat.rows(); r++) { alphaMat.row(r).setTo(new Scalar(r / (alphaMat.rows() / 256))); } Imgproc.linearPolar(alphaMat, alphaMat, new Point(alphaMat.cols() / 2, alphaMat.rows() / 2), alphaMat.rows(), Imgproc.INTER_CUBIC | Imgproc.WARP_FILL_OUTLIERS | Imgproc.WARP_INVERSE_MAP); // Generate large size Mat. fgMatLarge = new Mat(); bgMatLarge = new Mat(); alphaMatLarge = new Mat(); dstMatLarge = new Mat(); Imgproc.resize(fgMat, fgMatLarge, new Size(), 2, 2, 0); Imgproc.resize(bgMat, bgMatLarge, new Size(), 2, 2, 0); Imgproc.resize(alphaMat, alphaMatLarge, new Size(), 2, 2, 0); Imgproc.resize(dstMat, dstMatLarge, new Size(), 2, 2, 0); // Generate small size Mat (ROI). OpenCVForUnity.Rect rect = new OpenCVForUnity.Rect(127, 127, 256, 256); fgMatROI = new Mat(fgMat, rect); bgMatROI = new Mat(bgMat, rect); alphaMatROI = new Mat(alphaMat, rect); dstMatROI = new Mat(dstMat, rect); Utils.matToTexture2D(fgMat, fgTex, true, 0, true); Utils.matToTexture2D(bgMat, bgTex, true, 0, true); Utils.matToTexture2D(alphaMat, alphaTex, true, 0, true); Utils.matToTexture2D(dstMat, dstTex, true, 0, true); fgQuad.GetComponent <Renderer> ().material.mainTexture = fgTex; bgQuad.GetComponent <Renderer> ().material.mainTexture = bgTex; alphaQuad.GetComponent <Renderer> ().material.mainTexture = alphaTex; dstQuad.GetComponent <Renderer> ().material.mainTexture = dstTex; }
//public int d = 60; //offset di selezione all'interno dello schermo // Use this for initialization void Start() { ss = 0; if (SaveVideoElaborated) { folder = folder + System.DateTime.Now.ToString("_yyyy-MM-dd_HH-mm-ss"); if (System.IO.Directory.Exists(folder)) { System.IO.Directory.Delete(folder, true); } System.IO.Directory.CreateDirectory(folder); } x_coordinate = 0; y_coordinate = 0; skipFrame = 0; roiRect = null; termination = new TermCriteria(TermCriteria.EPS | TermCriteria.COUNT, 10, 1); #if UNITY_WEBGL && !UNITY_EDITOR StartCoroutine(Utils.getFilePathAsync("blobparams.yml", (result) => { blobparams_yml_filepath = result; })); #else blobparams_yml_filepath = Utils.getFilePath("blobparams.yml"); //Debug.Log(blobparams_yml_filepath); #endif }
// Search for just a single object in the image, such as the largest face, storing the result into 'largestObject'. // Can use Haar cascades or LBP cascades for Face Detection, or even eye, mouth, or car detection. // Input is temporarily shrunk to 'scaledWidth' for much faster detection, since 200 is enough to find faces. // Note: detectLargestObject() should be faster than detectManyObjects(). public static void DetectLargestObject(Mat img, CascadeClassifier cascade, out Rect largestObject, int scaledWidth = 320) { // Only search for just 1 object (the biggest in the image). int flags = Objdetect.CASCADE_FIND_BIGGEST_OBJECT;// | CASCADE_DO_ROUGH_SEARCH; // Smallest object size. Size minFeatureSize = new Size(20, 20); // How detailed should the search be. Must be larger than 1.0. float searchScaleFactor = 1.1f; // How much the detections should be filtered out. This should depend on how bad false detections are to your system. // minNeighbors=2 means lots of good+bad detections, and minNeighbors=6 means only good detections are given but some are missed. int minNeighbors = 4; // Perform Object or Face Detection, looking for just 1 object (the biggest in the image). List <Rect> objects; detectObjectsCustom(img, cascade, out objects, scaledWidth, flags, minFeatureSize, searchScaleFactor, minNeighbors); if (objects.Count > 0) { // Return the only detected object. largestObject = (Rect)objects [0]; } else { // Return an invalid rect. largestObject = new Rect(-1, -1, -1, -1); } }
private static List <OpenCVForUnity.Rect> getROIList(List <Mat> partMaskList) { List <OpenCVForUnity.Rect> roiList = new List <OpenCVForUnity.Rect>(); for (var i = 0; i < partMaskList.Count; i++) { // Find Contours List <MatOfPoint> contours = new List <MatOfPoint>(); Mat hierarchy = new Mat(); Mat mask = partMaskList[i].clone(); Imgproc.findContours(mask, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0)); // Find max contour id double maxArea = 0.0; int maxIdx = 0; for (var j = 0; j < contours.Count; j++) { double area = Imgproc.contourArea(contours[j]); if (area > maxArea) { maxArea = area; maxIdx = j; } } OpenCVForUnity.Rect roi = Imgproc.boundingRect(contours[maxIdx]); roiList.Add(roi); } return(roiList); }
// Use this for initialization void Start() { // ROI variable initialization roiPointList = new List <Point>(); termination = new TermCriteria(TermCriteria.EPS | TermCriteria.COUNT, 20, 1); // Unity graphic object initialization webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper>(); webCamTextureToMatHelper.Init(OnWebCamTextureToMatHelperInited, OnWebCamTextureToMatHelperDisposed); // Iniitialize the Kalman filter variables // Initialize roi objects roiRect = new OpenCVForUnity.Rect(); roiPred = new OpenCVForUnity.Rect(); roiSearch = new OpenCVForUnity.Rect(); fps = 60f; // expected frame rate dt = 1 / fps; // expected sampling interval // Observation matrix, H Matrix H2 = Matrix.Build.DenseOfArray(new float[, ] { { 1, 0, 0 } }); H = Matrix.Build.DenseOfMatrixArray(new Matrix[, ] { { H2, zero1x3, zero1x3 }, { zero1x3, H2, zero1x3 }, { zero1x3, zero1x3, H2 } }); }
public Mat createBinaryMat(Mat cameraMat, OpenCVForUnity.Rect rect) { // ROIを絞る Mat roiMat = new Mat(cameraMat, rect); // 二値化するためのチャンネル群 var hsvChannels = ARUtil.getHSVChannels(roiMat); var yCrCbChannels = ARUtil.getYCrCbChannels(roiMat); Mat S_Binary = new Mat(roiMat.size(), CvType.CV_8UC1); Core.inRange(hsvChannels[1], new Scalar(s_thresh_lower), new Scalar(s_thresh_upper), S_Binary); Mat Cr_Binary = new Mat(roiMat.size(), CvType.CV_8UC1); Core.inRange(yCrCbChannels[1], new Scalar(cr_thresh_lower), new Scalar(cr_thresh_upper), Cr_Binary); Mat V_Binary = new Mat(roiMat.size(), CvType.CV_8UC1); Core.inRange(hsvChannels[2], new Scalar(v_thresh_lower), new Scalar(v_thresh_upper), V_Binary); // TODO: adaptiveThresholdの有効利用 // Imgproc.adaptiveThreshold (hsvChannels [2], coffeeBinaryMat, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY_INV, 5, 5); // 上記二つの二値画像のAndを取る Mat wiseAnd = new Mat(roiMat.size(), CvType.CV_8UC1); Core.bitwise_and(S_Binary, Cr_Binary, wiseAnd); Core.bitwise_and(V_Binary, wiseAnd, wiseAnd); // ノイズ除去 //Imgproc.morphologyEx(wiseAnd, wiseAnd, Imgproc.MORPH_OPEN, new Mat(), new Point(-1, -1), 2); return(wiseAnd); }
/** Converts a Rectangle from the OpenCVForUnity lib to a Unity Rectangle */ private static UnityEngine.Rect CVtoUnityRect(OpenCVForUnity.Rect rectangle) { Vector3 topLeft = new Vector3(rectangle.x, rectangle.y); Vector3 bottomRight = new Vector3(rectangle.x + rectangle.width, rectangle.y + rectangle.height); return(UnityEngine.Rect.MinMaxRect(topLeft.x, topLeft.y, bottomRight.x, bottomRight.y)); }
public Rect Inflate(Rect rect, int x, int y) { rect.x -= x; rect.y -= y; rect.width += (2 * x); rect.height += (2 * y); return(rect); }
public OpenCVForUnity.Rect predictNextSearchRect() { int margin = Mathf.CeilToInt(rect.width * 0.1f); var searchRect = new OpenCVForUnity.Rect(rect.x - margin, rect.y - margin, rect.width + margin * 2, rect.height + margin * 2); searchRect = ARUtil.calcRectWithinMat(searchRect, parentMat); return(searchRect); }
Vector roi2center(OpenCVForUnity.Rect roi) { float xU = 0.5f * (roi.x + (roi.x + roi.width)); float yU = 0.5f * (roi.y + (roi.y + roi.height)); Vector meas = Vector.Build.DenseOfArray(new float[] { xU, yU }); // measurement vector return(meas); }
/// <summary> /// face rect convert rect to json /// </summary> /// <param name="face"></param> /// <returns></returns> private String jsonConvertFace(Rect face) { Thread.Sleep(10); // clippling face Texture2D clipTex; Debug.Log("x:" + face.x + " y:" + face.y + " width:" + face.width + " height" + face.height); // Texture2D texture_out = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false); var pixel = texture.GetPixels(face.x, face.y, face.width, face.height); var quad = GameObject.Find("Quad2"); clipTex = new Texture2D(face.width, face.height); clipTex.SetPixels(pixel); // TODO choose biliner or point // TextureScale.Point(clipTex, 32, 32); TextureScale.Bilinear(clipTex, 32, 32); clipTex.Apply(); quad.GetComponent <Renderer>().material.mainTexture = clipTex; // TODO convert pixels to float array [32][32][3] Color[] pixels = clipTex.GetPixels(); float[,,] array = new float[32, 32, 3]; Debug.Log(pixels.Length); String jsonString = "{\"input\":["; for (int i = 0, len = pixels.Length; i < len; i++) { array[i / 32, i % 32, 0] = Mathf.FloorToInt(pixels[i].r * 255); array[i / 32, i % 32, 1] = Mathf.FloorToInt(pixels[i].g * 255); array[i / 32, i % 32, 2] = Mathf.FloorToInt(pixels[i].b * 255); } // don't use it, but leaves it // Can use multi dimensional array by LINQ for (int i = 0; i < 32; i++) { jsonString += "["; for (int j = 0; j < 32; j++) { jsonString += "["; for (int k = 0; k < 3; k++) { jsonString += array[31 - i, j, k]; jsonString += k == 2 ? "" : ","; } jsonString += "]"; jsonString += j == 31 ? "" : ","; } jsonString += "]"; jsonString += i == 31 ? "" : ","; } jsonString += "]}"; Debug.Log(jsonString); return(jsonString); }
//漫水填充 public static Mat MyFloodFill(Mat img) { OpenCVForUnity.Rect ccomp = new OpenCVForUnity.Rect(); Mat mask = new Mat(); Imgproc.floodFill(img, mask, new Point(50, 20), new Scalar(0, 0, 0), ccomp, new Scalar(10, 10, 10), new Scalar(10, 10, 10), 0); return(img); }
//頭の上に年齢を表示する void PutAgeOnHead(Rect rect, int age) { _ageText.text = age.ToString(); var pos = new Vector3(rect.x - 320 + rect.width / 2f, 240 - rect.y, 0f); var text = Instantiate(_ageText, pos, Quaternion.identity); text.transform.SetParent(_ageCanvas, false); text.GetComponent <RectTransform>().position = RectTransformUtility.WorldToScreenPoint(Camera.main, pos); }
public RegionCandidate() { index = 0; contour = new MatOfPoint(); contour2f = new MatOfPoint2f(); _area = 0.0; _circularity = 0.0; _boundingRect = null; }
private void zoomCropped(ref Mat croppedImage, ref OpenCVForUnity.Rect bb) { int croppedWidth = croppedImage.cols(); int croppedHeight = croppedImage.rows(); OpenCVForUnity.Rect expandedBB; if (croppedWidth > croppedHeight) { int topMargin = (croppedWidth - croppedHeight) / 2; int botMargin = topMargin; // Needed due to percision loss when /2 if ((croppedHeight + topMargin * 2) != croppedWidth) { botMargin = croppedWidth - croppedHeight - topMargin; } Core.copyMakeBorder(croppedImage, croppedImage, topMargin, botMargin, 0, 0, Core.BORDER_REPLICATE); expandedBB = new OpenCVForUnity.Rect( new Point(bb.tl().x, bb.tl().y - topMargin), new Point(bb.br().x, bb.br().y + botMargin)); } else if (croppedHeight > croppedWidth) { int lefMargin = (croppedHeight - croppedWidth) / 2; int rigMargin = lefMargin; // Need due to percision loss when /2 if ((croppedWidth + lefMargin * 2) != croppedHeight) { rigMargin = croppedHeight - croppedWidth - lefMargin; } Core.copyMakeBorder(croppedImage, croppedImage, 0, 0, lefMargin, rigMargin, Core.BORDER_REPLICATE); expandedBB = new OpenCVForUnity.Rect( new Point(bb.tl().x - lefMargin, bb.tl().y), new Point(bb.br().x + rigMargin, bb.br().y)); } else { expandedBB = bb; } // We have the originPoint & originalSize in the frame cordinate here. originPoint = expandedBB.tl(); originImage = croppedImage.clone(); originalSize = expandedBB.size(); Mat scaleImage = new Mat(); Imgproc.resize(croppedImage, scaleImage, new Size(Constant.MODEL_HEIGHT, Constant.MODEL_WIDTH)); // Return croppedImage[224*224*3] bb(original cordinate expandedBB) croppedImage = scaleImage; bb = expandedBB; }
// rectの最小・最大をmat領域内におさめたRectを得る public static OpenCVForUnity.Rect calcRectWithinMat(OpenCVForUnity.Rect rect, Mat mat) { int minLimitX = (int)Mathf.Min(Mathf.Max((float)0.0, (float)rect.tl().x), (float)mat.cols()); int maxLimitX = (int)Mathf.Min(Mathf.Max((float)0.0, (float)rect.br().x), (float)mat.cols()); int minLimitY = (int)Mathf.Min(Mathf.Max((float)0.0, (float)rect.tl().y), (float)mat.rows()); int maxLimitY = (int)Mathf.Min(Mathf.Max((float)0.0, (float)rect.br().y), (float)mat.rows()); return(new OpenCVForUnity.Rect(minLimitX, minLimitY, maxLimitX - minLimitX, maxLimitY - minLimitY)); }
void Update() { Image camImg = CameraDevice.Instance.GetCameraImage(Image.PIXEL_FORMAT.RGBA8888); if (camImg != null) { if (camImageMat == null) { camImageMat = new Mat(camImg.Height, camImg.Width, CvType.CV_8UC4); //Note: rows=height, cols=width } camImageMat.put(0, 0, camImg.Pixels); // Read from videoCap and save in mat videoCapture.read(frontCameraImgMat); Face.getFacesHAAR(frontCameraImgMat, faces, faceXML); Debug.Log("Faces " + faces.height()); if (faces.height() > 0) { for (var i = 0; i < faces.height(); i++) { double[] faceRect = faces.get(i, 0); Point faceRectPoint1 = new Point(faceRect [0], faceRect [1]); Point faceRectPoint2 = new Point(faceRect [0] + faceRect [2], faceRect [1] + faceRect [3]); Imgproc.rectangle(frontCameraImgMat, faceRectPoint1, faceRectPoint2, new Scalar(0, 0, 255), 5); roi = new OpenCVForUnity.Rect(faceRectPoint1, faceRectPoint2); } faceWithMarkings = new Mat(frontCameraImgMat, roi); Face.getFacesHAAR(faceWithMarkings, eyes, eyeXML); Debug.Log("Eyes " + eyes.height()); if (eyes.height() != 0) { for (var i = 0; i < eyes.height(); i++) { if (i < 2) { double[] eyeRect = eyes.get(i, 0); Point eyeCenter = new Point(eyeRect [2] * 0.5F + eyeRect [0], eyeRect [3] * 0.5F + eyeRect [1]); int radius = (int)Mathf.Sqrt(Mathf.Pow(((float)eyeRect [2]) * 0.5F, 2F) + Mathf.Pow(((float)eyeRect [3]) * 0.5F, 2F)); Imgproc.circle(faceWithMarkings, new Point(eyeCenter.x, eyeCenter.y), radius, new Scalar(255, 0, 0), 5); } } } MatDisplay.MatToTexture(faceWithMarkings, ref unwarpedTexture); faceTargetPlane.GetComponent <Renderer> ().material.mainTexture = unwarpedTexture; } } MatDisplay.DisplayMat(camImageMat, MatDisplaySettings.FULL_BACKGROUND); MatDisplay.DisplayMat(frontCameraImgMat, MatDisplaySettings.BOTTOM_LEFT); }
/** System call executed in polling, one time per frame */ void Update() { ProcessKeyboardInput(); if (IsHandoffRequired()) { doHandoff(); } rectanglesToPrint.Clear(); frame = getCurrentFrame(); #if VERBOSE Debug.Log("camera_in_position: " + IsBackToStart(CurrentCamera())); #endif if (!getTrackingState()) { if (IsBackToStart(CurrentCamera())) { roiRect = BgSub(); if (roiRect != null) { InitCamShift(); } if (saveVideo) { SaveMatToFile("rgbMat", frame); } } } else { if (!isCamshiftPaused) { hsvMat = new Mat(frame.rows(), frame.cols(), CvType.CV_8UC3); Imgproc.cvtColor(frame, hsvMat, Imgproc.COLOR_RGBA2RGB); Imgproc.cvtColor(hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV); RunCamShift(); iterations++; if (iterations > maxIterations || roiRect.height > 2 * (Screen.height / 3)) { Camera master = cameraArray[0]; ChangeCamera(master); setTracking(false); } rectanglesToPrint.Add(new ColoredRect(roiRect, Color.green)); PassCoordinatesToCamera(); } } }
public static void affineTransform(Mat src, Mat dst, OpenCVForUnity.Rect roi) { // アフィン行列を取得 var srcPoints = new MatOfPoint2f(new Point(0.0, 0.0), new Point(src.cols() - 1, 0.0), new Point(src.cols() - 1, src.rows() - 1)); var dstPoints = new MatOfPoint2f(roi.tl(), new Point(roi.x + roi.width, roi.y), roi.br()); Mat transform = Imgproc.getAffineTransform(srcPoints, dstPoints); // アフィン変換 Imgproc.warpAffine(src, dst, transform, dst.size(), Imgproc.INTER_LINEAR); }
public virtual Texture2D UpdateLUTTex(int id, Mat src, Mat dst, List <Vector2> src_landmarkPoints, List <Vector2> dst_landmarkPoints) { if (src_mask != null && (src.width() != src_mask.width() || src.height() != src_mask.height())) { src_mask.Dispose(); src_mask = null; } src_mask = src_mask ?? new Mat(src.rows(), src.cols(), CvType.CV_8UC1, Scalar.all(0)); if (dst_mask != null && (dst.width() != dst_mask.width() || dst.height() != dst_mask.height())) { dst_mask.Dispose(); dst_mask = null; } dst_mask = dst_mask ?? new Mat(dst.rows(), dst.cols(), CvType.CV_8UC1, Scalar.all(0)); // Get facial contour points. GetFacialContourPoints(src_landmarkPoints, src_facialContourPoints); GetFacialContourPoints(dst_landmarkPoints, dst_facialContourPoints); // Get facial contour rect. OpenCVForUnity.Rect src_facialContourRect = Imgproc.boundingRect(new MatOfPoint(src_facialContourPoints)); OpenCVForUnity.Rect dst_facialContourRect = Imgproc.boundingRect(new MatOfPoint(dst_facialContourPoints)); src_facialContourRect = src_facialContourRect.intersect(new OpenCVForUnity.Rect(0, 0, src.width(), src.height())); dst_facialContourRect = dst_facialContourRect.intersect(new OpenCVForUnity.Rect(0, 0, dst.width(), dst.height())); Mat src_ROI = new Mat(src, src_facialContourRect); Mat dst_ROI = new Mat(dst, dst_facialContourRect); Mat src_mask_ROI = new Mat(src_mask, src_facialContourRect); Mat dst_mask_ROI = new Mat(dst_mask, dst_facialContourRect); GetPointsInFrame(src_mask_ROI, src_facialContourPoints, src_facialContourPoints); GetPointsInFrame(dst_mask_ROI, dst_facialContourPoints, dst_facialContourPoints); src_mask_ROI.setTo(new Scalar(0)); dst_mask_ROI.setTo(new Scalar(0)); Imgproc.fillConvexPoly(src_mask_ROI, new MatOfPoint(src_facialContourPoints), new Scalar(255)); Imgproc.fillConvexPoly(dst_mask_ROI, new MatOfPoint(dst_facialContourPoints), new Scalar(255)); Texture2D LUTTex; if (LUTTexDict.ContainsKey(id)) { LUTTex = LUTTexDict[id]; } else { LUTTex = new Texture2D(256, 1, TextureFormat.RGB24, false); LUTTexDict.Add(id, LUTTex); } FaceMaskShaderUtils.CalculateLUT(src_ROI, dst_ROI, src_mask_ROI, dst_mask_ROI, LUTTex); return(LUTTex); }
private Mat cropTexToModelSizeMat(Texture2D sourceTex, List <int> thresList) { Mat sourceImage = new Mat(sourceTex.height, sourceTex.width, CvType.CV_8UC3); Utils.texture2DToMat(sourceTex, sourceImage); // BGR to HSV Mat hsvImage = new Mat(sourceImage.rows(), sourceImage.cols(), CvType.CV_8UC3); List <Mat> hsvList = new List <Mat>(); Imgproc.cvtColor(sourceImage, hsvImage, Imgproc.COLOR_BGR2HSV); // InRange Mat grayImage = new Mat(sourceImage.rows(), sourceImage.cols(), CvType.CV_8UC1); Core.inRange(hsvImage, new Scalar(thresList[0], thresList[2], thresList[4]), new Scalar(thresList[1], thresList[3], thresList[5]), grayImage); Imgproc.morphologyEx(grayImage, grayImage, Imgproc.MORPH_OPEN, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5))); // Find Contours List <MatOfPoint> contours = new List <MatOfPoint>(); Mat hierarchy = new Mat(); Imgproc.findContours(grayImage, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE, new Point(0, 0)); int maxAreaIdex = 0; double maxArea = 0; for (var i = 0; i < contours.Count; i++) { double area = Imgproc.contourArea(contours[i]); if (area > maxArea) { maxArea = area; maxAreaIdex = i; } } // Find Bounding Box OpenCVForUnity.Rect roi = Imgproc.boundingRect(contours[maxAreaIdex]); OpenCVForUnity.Rect bb = new OpenCVForUnity.Rect( new Point(Math.Max(roi.tl().x - 50.0, 0), Math.Max(roi.tl().y - 50.0, 0)), new Point(Math.Min(roi.br().x + 50.0, sourceImage.cols()), Math.Min(roi.br().y + 50.0, sourceImage.rows()))); Mat croppedImage = new Mat(sourceImage, bb); // Zoom to 224*224 zoomCropped(ref croppedImage, ref bb); return(croppedImage); }
private void detectInRegion (Mat img, Rect r, List<Rect> detectedObjectsInRegions) { Rect r0 = new Rect (new Point (), img.size ()); Rect r1 = new Rect (r.x, r.y, r.width, r.height); Inflate (r1, (int)((r1.width * innerParameters.coeffTrackingWindowSize) - r1.width) / 2, (int)((r1.height * innerParameters.coeffTrackingWindowSize) - r1.height) / 2); r1 = Intersect (r0, r1); if ((r1.width <= 0) || (r1.height <= 0)) { Debug.Log ("DetectionBasedTracker::detectInRegion: Empty intersection"); return; } int d = Math.Min (r.width, r.height); d = (int)Math.Round (d * innerParameters.coeffObjectSizeToTrack); MatOfRect tmpobjects = new MatOfRect (); Mat img1 = new Mat (img, r1);//subimage for rectangle -- without data copying regionCascade.detectMultiScale (img1, tmpobjects, parameters.scaleFactor, parameters.minNeighbors, 0 | Objdetect.CASCADE_DO_CANNY_PRUNING | Objdetect.CASCADE_SCALE_IMAGE | Objdetect.CASCADE_FIND_BIGGEST_OBJECT, new Size (d, d), new Size ()); Rect[] tmpobjectsArray = tmpobjects.toArray (); int len = tmpobjectsArray.Length; for (int i = 0; i < len; i++) { Rect tmp = tmpobjectsArray [i]; Rect curres = new Rect (new Point (tmp.x + r1.x, tmp.y + r1.y), tmp.size ()); detectedObjectsInRegions.Add (curres); } }
private IEnumerator init() { if (webCamTexture != null) { webCamTexture.Stop (); initDone = false; rgbaMat.Dispose (); hsvMat.Dispose (); if (roiHistMat != null) roiHistMat.Dispose (); roiPointList.Clear (); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) { Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture (webCamDevice.name, width, height); } Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play (); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); hsvMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC3); texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); gameObject.transform.eulerAngles = new Vector3 (0, 0, 0); #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR gameObject.transform.eulerAngles = new Vector3 (0, 0, -90); #endif // gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back); gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1); // bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored; // float scaleX = 1; // float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f; // if (webCamTexture.videoRotationAngle == 270) // scaleY = -1.0f; // gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR Camera.main.orthographicSize = (((float)Screen.height/(float)Screen.width) * (float)webCamTexture.height) / 2.0f; #else Camera.main.orthographicSize = webCamTexture.height / 2; #endif initDone = true; break; } else { yield return 0; } } } // Update is called once per frame void Update () { if (!initDone) return; #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors); if (webCamTexture.videoVerticallyMirrored) { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 90) { Core.flip (rgbaMat, rgbaMat, 0); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip (rgbaMat, rgbaMat, 1); } } else { if (webCamTexture.videoRotationAngle == 90) { } else if (webCamTexture.videoRotationAngle == 270) { Core.flip (rgbaMat, rgbaMat, -1); } } } else { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 90) { Core.flip (rgbaMat, rgbaMat, 0); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip (rgbaMat, rgbaMat, 1); } } else { if (webCamTexture.videoRotationAngle == 90) { } else if (webCamTexture.videoRotationAngle == 270) { Core.flip (rgbaMat, rgbaMat, -1); } } } Imgproc.cvtColor (rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB); Imgproc.cvtColor (hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV); Point[] points = roiPointList.ToArray (); if (roiPointList.Count == 4) { using (Mat backProj = new Mat ()) { Imgproc.calcBackProject (new List<Mat> (new Mat[]{hsvMat}), new MatOfInt (0), roiHistMat, backProj, new MatOfFloat (0, 180), 1.0); RotatedRect r = Video.CamShift (backProj, roiRect, termination); r.points (points); } #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR) //Touch int touchCount = Input.touchCount; if (touchCount == 1) { if(Input.GetTouch(0).phase == TouchPhase.Ended){ roiPointList.Clear (); } } #else if (Input.GetMouseButtonUp (0)) { roiPointList.Clear (); } #endif } if (roiPointList.Count < 4) { #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR) //Touch int touchCount = Input.touchCount; if (touchCount == 1) { Touch t = Input.GetTouch(0); if(t.phase == TouchPhase.Ended){ roiPointList.Add (convertScreenPoint (new Point (t.position.x, t.position.y), gameObject, Camera.main)); // Debug.Log ("touch X " + t.position.x); // Debug.Log ("touch Y " + t.position.y); if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) { roiPointList.RemoveAt (roiPointList.Count - 1); } } } #else //Mouse if (Input.GetMouseButtonUp (0)) { roiPointList.Add (convertScreenPoint (new Point (Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main)); // Debug.Log ("mouse X " + Input.mousePosition.x); // Debug.Log ("mouse Y " + Input.mousePosition.y); if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) { roiPointList.RemoveAt (roiPointList.Count - 1); } } #endif if (roiPointList.Count == 4) { using (MatOfPoint roiPointMat = new MatOfPoint (roiPointList.ToArray ())) { roiRect = Imgproc.boundingRect (roiPointMat); } if (roiHistMat != null) { roiHistMat.Dispose (); roiHistMat = null; } roiHistMat = new Mat (); using (Mat roiHSVMat = new Mat(hsvMat, roiRect)) using (Mat maskMat = new Mat ()) { Imgproc.calcHist (new List<Mat> (new Mat[]{roiHSVMat}), new MatOfInt (0), maskMat, roiHistMat, new MatOfInt (16), new MatOfFloat (0, 180)); Core.normalize (roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX); // Debug.Log ("roiHist " + roiHistMat.ToString ()); } } } if (points.Length < 4) { for (int i = 0; i < points.Length; i++) { Core.circle (rgbaMat, points [i], 6, new Scalar (0, 0, 255, 255), 2); } } else { for (int i = 0; i < 4; i++) { Core.line (rgbaMat, points [i], points [(i + 1) % 4], new Scalar (255, 0, 0, 255), 2); } Core.rectangle (rgbaMat, roiRect.tl (), roiRect.br (), new Scalar (0, 255, 0, 255), 2); } Core.putText (rgbaMat, "PLEASE TOUCH 4 POINTS", new Point (5, 25), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D (rgbaMat, texture, colors); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; } } void OnDisable () { webCamTexture.Stop (); } void OnGUI () { float screenScale = Screen.width / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical (); if (GUILayout.Button ("back")) { Application.LoadLevel ("OpenCVForUnitySample"); } if (GUILayout.Button ("change camera")) { isFrontFacing = !isFrontFacing; StartCoroutine (init ()); } GUILayout.EndVertical (); } /// <summary> /// Converts the screen point. /// </summary> /// <returns>The screen point.</returns> /// <param name="screenPoint">Screen point.</param> /// <param name="quad">Quad.</param> /// <param name="cam">Cam.</param> static Point convertScreenPoint (Point screenPoint, GameObject quad, Camera cam) { #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR) Vector2 tl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.y / 2, quad.transform.localPosition.y + quad.transform.localScale.x / 2, quad.transform.localPosition.z)); Vector2 tr = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.y / 2, quad.transform.localPosition.y - quad.transform.localScale.x / 2, quad.transform.localPosition.z)); Vector2 br = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.y / 2, quad.transform.localPosition.y - quad.transform.localScale.x / 2, quad.transform.localPosition.z)); Vector2 bl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.y / 2, quad.transform.localPosition.y + quad.transform.localScale.x / 2, quad.transform.localPosition.z)); #else Vector2 tl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.x / 2, quad.transform.localPosition.y + quad.transform.localScale.y / 2, quad.transform.localPosition.z)); Vector2 tr = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.x / 2, quad.transform.localPosition.y + quad.transform.localScale.y / 2, quad.transform.localPosition.z)); Vector2 br = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.x / 2, quad.transform.localPosition.y - quad.transform.localScale.y / 2, quad.transform.localPosition.z)); Vector2 bl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.x / 2, quad.transform.localPosition.y - quad.transform.localScale.y / 2, quad.transform.localPosition.z)); #endif Mat srcRectMat = new Mat (4, 1, CvType.CV_32FC2); Mat dstRectMat = new Mat (4, 1, CvType.CV_32FC2); srcRectMat.put (0, 0, tl.x, tl.y, tr.x, tr.y, br.x, br.y, bl.x, bl.y); dstRectMat.put (0, 0, 0.0, 0.0, quad.transform.localScale.x, 0.0, quad.transform.localScale.x, quad.transform.localScale.y, 0.0, quad.transform.localScale.y); Mat perspectiveTransform = Imgproc.getPerspectiveTransform (srcRectMat, dstRectMat); // Debug.Log ("srcRectMat " + srcRectMat.dump ()); // Debug.Log ("dstRectMat " + dstRectMat.dump ()); // Debug.Log ("perspectiveTransform " + perspectiveTransform.dump ()); MatOfPoint2f srcPointMat = new MatOfPoint2f (screenPoint); MatOfPoint2f dstPointMat = new MatOfPoint2f (); Core.perspectiveTransform (srcPointMat, dstPointMat, perspectiveTransform); // Debug.Log ("srcPointMat " + srcPointMat.dump ()); // Debug.Log ("dstPointMat " + dstPointMat.dump ()); return dstPointMat.toArray () [0]; } }
// Use this for initialization void Start() { Texture2D imageTexture = Resources.Load ("lena") as Texture2D; Mat image = new Mat (imageTexture.height, imageTexture.width, CvType.CV_8UC3); Utils.texture2DToMat (imageTexture, image); Debug.Log ("image.ToString() " + image.ToString ()); Texture2D maskTexture = Resources.Load ("lena_grabcut_mask") as Texture2D; Mat mask = new Mat (imageTexture.height, imageTexture.width, CvType.CV_8UC1); Utils.texture2DToMat (maskTexture, mask); Debug.Log ("mask.ToString() " + mask.ToString ()); OpenCVForUnity.Rect rectangle = new OpenCVForUnity.Rect (10, 10, image.cols () - 20, image.rows () - 20); Mat bgdModel = new Mat (); // extracted features for background Mat fgdModel = new Mat (); // extracted features for foreground convertToGrabCutValues (mask); // from grayscale values to grabcut values int iterCount = 5; // Imgproc.grabCut (image, mask, rectangle, bgdModel, fgdModel, iterCount, Imgproc.GC_INIT_WITH_RECT); Imgproc.grabCut (image, mask, rectangle, bgdModel, fgdModel, iterCount, Imgproc.GC_INIT_WITH_MASK); convertToGrayScaleValues (mask); // back to grayscale values Imgproc.threshold (mask, mask, 128, 255, Imgproc.THRESH_TOZERO); Mat foreground = new Mat (image.size (), CvType.CV_8UC3, new Scalar (0, 0, 0)); image.copyTo (foreground, mask); Texture2D texture = new Texture2D (image.cols (), image.rows (), TextureFormat.RGBA32, false); Utils.matToTexture2D (foreground, texture); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; }
// Update is called once per frame void Update() { if (roiPointList.Count == 4) { #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR) //Touch int touchCount = Input.touchCount; if (touchCount == 1) { Touch t = Input.GetTouch(0); if(t.phase == TouchPhase.Ended && !EventSystem.current.IsPointerOverGameObject(t.fingerId)){ roiPointList.Clear (); } } #else if (Input.GetMouseButtonUp (0) && !EventSystem.current.IsPointerOverGameObject()) { roiPointList.Clear (); } #endif } if (roiPointList.Count < 4) { #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR) //Touch int touchCount = Input.touchCount; if (touchCount == 1) { Touch t = Input.GetTouch(0); if(t.phase == TouchPhase.Ended && !EventSystem.current.IsPointerOverGameObject(t.fingerId)){ roiPointList.Add (convertScreenPoint (new Point (t.position.x, t.position.y), gameObject, Camera.main)); //Debug.Log ("touch X " + t.position.x); //Debug.Log ("touch Y " + t.position.y); if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) { roiPointList.RemoveAt (roiPointList.Count - 1); } } } #else //Mouse if (Input.GetMouseButtonUp (0) && !EventSystem.current.IsPointerOverGameObject()) { roiPointList.Add (convertScreenPoint (new Point (Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main)); // Debug.Log ("mouse X " + Input.mousePosition.x); // Debug.Log ("mouse Y " + Input.mousePosition.y); if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) { roiPointList.RemoveAt (roiPointList.Count - 1); } } #endif if (roiPointList.Count == 4) { shouldStartCamShift = true; } } if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat (); Imgproc.cvtColor (rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB); Imgproc.cvtColor (hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV); Point[] points = roiPointList.ToArray (); if (shouldStartCamShift) { shouldStartCamShift = false; using (MatOfPoint roiPointMat = new MatOfPoint (roiPointList.ToArray ())) { roiRect = Imgproc.boundingRect (roiPointMat); } if (roiHistMat != null) { roiHistMat.Dispose (); roiHistMat = null; } roiHistMat = new Mat (); using (Mat roiHSVMat = new Mat(hsvMat, roiRect)) using (Mat maskMat = new Mat ()) { Imgproc.calcHist (new List<Mat> (new Mat[]{roiHSVMat}), new MatOfInt (0), maskMat, roiHistMat, new MatOfInt (16), new MatOfFloat (0, 180)); Core.normalize (roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX); //Debug.Log ("roiHist " + roiHistMat.ToString ()); } }else if (roiPointList.Count == 4) { using (Mat backProj = new Mat ()) { Imgproc.calcBackProject (new List<Mat> (new Mat[]{hsvMat}), new MatOfInt (0), roiHistMat, backProj, new MatOfFloat (0, 180), 1.0); RotatedRect r = Video.CamShift (backProj, roiRect, termination); r.points (points); } } if (points.Length < 4) { for (int i = 0; i < points.Length; i++) { Imgproc.circle (rgbaMat, points [i], 6, new Scalar (0, 0, 255, 255), 2); } } else { for (int i = 0; i < 4; i++) { Imgproc.line (rgbaMat, points [i], points [(i + 1) % 4], new Scalar (255, 0, 0, 255), 2); } Imgproc.rectangle (rgbaMat, roiRect.tl (), roiRect.br (), new Scalar (0, 255, 0, 255), 2); } Imgproc.putText (rgbaMat, "Please touch the 4 points surrounding the tracking object.", new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); // Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.matToTexture2D (rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
/// <summary> /// Ons the touch. /// </summary> /// <param name="touchPoint">Touch point.</param> public void onTouch(Mat rgbaMat, Point touchPoint) { int cols = rgbaMat.cols (); int rows = rgbaMat.rows (); int x = (int)touchPoint.x; int y = (int)touchPoint.y; // Debug.Log ("Touch image coordinates: (" + x + ", " + y + ")"); if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return; OpenCVForUnity.Rect touchedRect = new OpenCVForUnity.Rect (); touchedRect.x = (x > 5) ? x - 5 : 0; touchedRect.y = (y > 5) ? y - 5 : 0; touchedRect.width = (x + 5 < cols) ? x + 5 - touchedRect.x : cols - touchedRect.x; touchedRect.height = (y + 5 < rows) ? y + 5 - touchedRect.y : rows - touchedRect.y; Mat touchedRegionRgba = rgbaMat.submat (touchedRect); Mat touchedRegionHsv = new Mat (); Imgproc.cvtColor (touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL); // Calculate average color of touched region blobColorHsv = Core.sumElems (touchedRegionHsv); int pointCount = touchedRect.width * touchedRect.height; for (int i = 0; i < blobColorHsv.val.Length; i++) blobColorHsv.val [i] /= pointCount; //blobColorRgba = converScalarHsv2Rgba (blobColorHsv); // Debug.Log ("Touched rgba color: (" + mBlobColorRgba.val [0] + ", " + mBlobColorRgba.val [1] + // ", " + mBlobColorRgba.val [2] + ", " + mBlobColorRgba.val [3] + ")"); detector.setHsvColor (blobColorHsv); Imgproc.resize (detector.getSpectrum (), spectrumMat, SPECTRUM_SIZE); isColorSelected = true; touchedRegionRgba.release (); touchedRegionHsv.release (); }
private Rect calcTrackedObjectPositionToShow (int i) { if ((i < 0) || (i >= trackedObjects.Count)) { Debug.Log ("DetectionBasedTracker::calcTrackedObjectPositionToShow: ERROR: wrong i=" + i); return new Rect (); } if (trackedObjects [i].numDetectedFrames <= innerParameters.numStepsToWaitBeforeFirstShow) { //Debug.Log("DetectionBasedTracker::calcTrackedObjectPositionToShow: " + "trackedObjects[" + i + "].numDetectedFrames=" + trackedObjects[i].numDetectedFrames + " <= numStepsToWaitBeforeFirstShow=" + innerParameters.numStepsToWaitBeforeFirstShow + " --- return empty Rect()"); return new Rect (); } if (trackedObjects [i].numFramesNotDetected > innerParameters.numStepsToShowWithoutDetecting) { return new Rect (); } List<Rect> lastPositions = trackedObjects [i].lastPositions; int N = lastPositions.Count; if (N <= 0) { Debug.Log ("DetectionBasedTracker::calcTrackedObjectPositionToShow: ERROR: no positions for i=" + i); return new Rect (); } int Nsize = Math.Min (N, (int)weightsSizesSmoothing.Count); int Ncenter = Math.Min (N, (int)weightsPositionsSmoothing.Count); Point center = new Point (); double w = 0, h = 0; if (Nsize > 0) { double sum = 0; for (int j = 0; j < Nsize; j++) { int k = N - j - 1; w += lastPositions [k].width * weightsSizesSmoothing [j]; h += lastPositions [k].height * weightsSizesSmoothing [j]; sum += weightsSizesSmoothing [j]; } w /= sum; h /= sum; } else { w = lastPositions [N - 1].width; h = lastPositions [N - 1].height; } if (Ncenter > 0) { double sum = 0; for (int j = 0; j < Ncenter; j++) { int k = N - j - 1; Point tl = lastPositions [k].tl (); Point br = lastPositions [k].br (); Point c1; //c1=tl; //c1=c1* 0.5f;// c1 = new Point (tl.x * 0.5f, tl.y * 0.5f); Point c2; //c2=br; //c2=c2*0.5f; c2 = new Point (br.x * 0.5f, br.y * 0.5f); //c1=c1+c2; c1 = new Point (c1.x + c2.x, c1.y + c2.y); //center=center+ (c1 * weightsPositionsSmoothing[j]); center = new Point (center.x + (c1.x * weightsPositionsSmoothing [j]), center.y + (c1.y * weightsPositionsSmoothing [j])); sum += weightsPositionsSmoothing [j]; } //center *= (float)(1 / sum); center = new Point (center.x * (1 / sum), center.y * (1 / sum)); } else { int k = N - 1; Point tl = lastPositions [k].tl (); Point br = lastPositions [k].br (); Point c1; //c1=tl; //c1=c1* 0.5f; c1 = new Point (tl.x * 0.5f, tl.y * 0.5f); Point c2; //c2=br; //c2=c2*0.5f; c2 = new Point (br.x * 0.5f, br.y * 0.5f); //center=c1+c2; center = new Point (c1.x + c2.x, c1.y + c2.y); } //Point2f tl=center-(Point2f(w,h)*0.5); Point tl2 = new Point (center.x - (w * 0.5f), center.y - (h * 0.5f)); //Rect res(cvRound(tl.x), cvRound(tl.y), cvRound(w), cvRound(h)); Rect res = new Rect ((int)Math.Round (tl2.x), (int)Math.Round (tl2.y), (int)Math.Round (w), (int)Math.Round (h)); //LOGD("DetectionBasedTracker::calcTrackedObjectPositionToShow: Result for i=%d: {%d, %d, %d x %d}", i, res.x, res.y, res.width, res.height); return res; }
public Point centerRect (Rect r) { return new Point (r.x + (r.width / 2), r.y + (r.height / 2)); }
// Update is called once per frame void Update () { if (!initDone) return; if (screenOrientation != Screen.orientation) { screenOrientation = Screen.orientation; updateLayout (); } #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors); //flip to correct direction. if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip (rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 90) { Core.flip (rgbaMat, rgbaMat, 0); } if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, 0); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip (rgbaMat, rgbaMat, 1); } } else { if (webCamTexture.videoRotationAngle == 180) { Core.flip (rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip (rgbaMat, rgbaMat, -1); } } Imgproc.cvtColor (rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB); Imgproc.cvtColor (hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV); Point[] points = roiPointList.ToArray (); if (roiPointList.Count == 4) { using (Mat backProj = new Mat ()) { Imgproc.calcBackProject (new List<Mat> (new Mat[]{hsvMat}), new MatOfInt (0), roiHistMat, backProj, new MatOfFloat (0, 180), 1.0); RotatedRect r = Video.CamShift (backProj, roiRect, termination); r.points (points); } #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR) //Touch int touchCount = Input.touchCount; if (touchCount == 1) { if(Input.GetTouch(0).phase == TouchPhase.Ended){ roiPointList.Clear (); } } #else if (Input.GetMouseButtonUp (0)) { roiPointList.Clear (); } #endif } if (roiPointList.Count < 4) { #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR) //Touch int touchCount = Input.touchCount; if (touchCount == 1) { Touch t = Input.GetTouch(0); if(t.phase == TouchPhase.Ended){ roiPointList.Add (convertScreenPoint (new Point (t.position.x, t.position.y), gameObject, Camera.main)); // Debug.Log ("touch X " + t.position.x); // Debug.Log ("touch Y " + t.position.y); if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) { roiPointList.RemoveAt (roiPointList.Count - 1); } } } #else //Mouse if (Input.GetMouseButtonUp (0)) { roiPointList.Add (convertScreenPoint (new Point (Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main)); // Debug.Log ("mouse X " + Input.mousePosition.x); // Debug.Log ("mouse Y " + Input.mousePosition.y); if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) { roiPointList.RemoveAt (roiPointList.Count - 1); } } #endif if (roiPointList.Count == 4) { using (MatOfPoint roiPointMat = new MatOfPoint (roiPointList.ToArray ())) { roiRect = Imgproc.boundingRect (roiPointMat); } if (roiHistMat != null) { roiHistMat.Dispose (); roiHistMat = null; } roiHistMat = new Mat (); using (Mat roiHSVMat = new Mat(hsvMat, roiRect)) using (Mat maskMat = new Mat ()) { Imgproc.calcHist (new List<Mat> (new Mat[]{roiHSVMat}), new MatOfInt (0), maskMat, roiHistMat, new MatOfInt (16), new MatOfFloat (0, 180)); Core.normalize (roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX); // Debug.Log ("roiHist " + roiHistMat.ToString ()); } } } if (points.Length < 4) { for (int i = 0; i < points.Length; i++) { Core.circle (rgbaMat, points [i], 6, new Scalar (0, 0, 255, 255), 2); } } else { for (int i = 0; i < 4; i++) { Core.line (rgbaMat, points [i], points [(i + 1) % 4], new Scalar (255, 0, 0, 255), 2); } Core.rectangle (rgbaMat, roiRect.tl (), roiRect.br (), new Scalar (0, 255, 0, 255), 2); } Core.putText (rgbaMat, "PLEASE TOUCH 4 POINTS", new Point (5, 25), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false); Utils.matToTexture2D (rgbaMat, texture, colors); gameObject.GetComponent<Renderer> ().material.mainTexture = texture; } } void OnDisable () { webCamTexture.Stop (); } void OnGUI () { float screenScale = Screen.height / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical (); if (GUILayout.Button ("back")) { Application.LoadLevel ("OpenCVForUnitySample"); } if (GUILayout.Button ("change camera")) { shouldUseFrontFacing = !shouldUseFrontFacing; StartCoroutine (init ()); } GUILayout.EndVertical (); }
public Rect Intersect (Rect a, Rect b) { int x1 = Math.Max (a.x, b.x); int x2 = Math.Min (a.x + a.width, b.x + b.width); int y1 = Math.Max (a.y, b.y); int y2 = Math.Min (a.y + a.height, b.y + b.height); if (x2 >= x1 && y2 >= y1) return new Rect (x1, y1, x2 - x1, y2 - y1); else return new Rect (); }
public Rect Inflate (Rect rect, int x, int y) { rect.x -= x; rect.y -= y; rect.width += (2 * x); rect.height += (2 * y); return rect; }