public bool GetPosition(Mat frame, bool isKeyboardFound) { Mat frameProc = new Mat(); //frame.rows(), frame.cols(), CvType.CV_16UC3 Mat frameMask = new Mat(); Mat hierarchy = new Mat(); Imgproc.cvtColor(frame, frameProc, Imgproc.COLOR_BGR2HSV); Scalar lowerB = new Scalar(HueLower, SatLower, ValLower); Scalar upperB = new Scalar(HueUpper, SatUpper, ValUpper); Core.inRange(frameProc, lowerB, upperB, frameMask); Core.bitwise_and(frame, frame, frameProc, frameMask); //Imgproc.bilateralFilter(frameProc, frameProc, 9, 50, 100); Imgproc.morphologyEx(frameProc, frameProc, 2, Mat.ones(5, 5, CvType.CV_8U)); // Imgproc.dilate(frameProc, frameProc, Mat.ones(5, 5, CvType.CV_8U)); //Mat.ones(5, 5, CvType.CV_8U), anchor: new Point(-1, -1), iteration:2 Imgproc.cvtColor(frameProc, frameProc, Imgproc.COLOR_BGR2GRAY); List <MatOfPoint> contoursList = new List <MatOfPoint>(); Imgproc.findContours(frameProc, contoursList, hierarchy, Imgproc.RETR_TREE, Imgproc.CHAIN_APPROX_SIMPLE); int count = 0; foreach (MatOfPoint contour in contoursList) { MatOfPoint2f approx = new MatOfPoint2f(); MatOfPoint2f contourf = new MatOfPoint2f(contour.toArray()); Imgproc.approxPolyDP(contourf, approx, 0.01 * Imgproc.arcLength(contourf, true), true); //print(approx.dump()); if (approx.rows() == 4 && Imgproc.contourArea(contour) >= min_area) { count++; if (count >= 2) { continue; } else { OpenCVForUnity.CoreModule.Rect track_win = Imgproc.boundingRect(approx); TrackWindow = new int[] { track_win.x, track_win.y, track_win.width, track_win.height }; if (frame.height() - 5 < TrackWindow[0] + TrackWindow[2] && TrackWindow[0] + TrackWindow[2] <= frame.height() || 0 <= TrackWindow[0] && TrackWindow[0] < 5 || frame.width() - 5 < TrackWindow[1] + TrackWindow[3] && TrackWindow[1] + TrackWindow[3] <= frame.width() || 0 <= TrackWindow[1] && TrackWindow[1] < 5) { continue; } else { Approx = approx; Contour = contour; return(isKeyboardFound = true); } } } } return(isKeyboardFound = false); }
void UpdateCirclePatternSize() { if (_state == State.Initiating || _state == State.BlindCalibration) { _circlePatternSize = defaultCirclesPatternSize; } else { const int circlePatternSizeYMin = 7; const int desiredPixelsPerCirclePatternSegment = 25; // 50px is recommended, but for a 720p camera, this will give too fex dots. float desiredCirclePatternNumAspect = _chessPatternSize.x / (_chessPatternSize.y) / 2f; // 3f / 4f / 2f; float patternDistance = Vector3.Distance(_mainCamera.transform.position, _circlePatternTransform.position); float patternHeight = _chessPatternTransform.localScale.y; float viewHeightAtPatternPosition = Mathf.Tan(_mainCamera.fieldOfView * Mathf.Deg2Rad * 0.5f) * patternDistance * 2; int circlePatternPixelHeight = (int)((patternHeight / viewHeightAtPatternPosition) * _cameraTexture.height); int optimalPatternSizeY = Mathf.Max(circlePatternSizeYMin, Mathf.FloorToInt(circlePatternPixelHeight / (float)desiredPixelsPerCirclePatternSegment)); int optimalPatternSizeX = Mathf.FloorToInt(optimalPatternSizeY * desiredCirclePatternNumAspect); _circlePatternSize = TrackingToolsHelper.GetClosestValidPatternSize(new Vector2Int(optimalPatternSizeX, optimalPatternSizeY), TrackingToolsHelper.PatternType.AsymmetricCircleGrid); } _circlePatternPointCount = _circlePatternSize.x * _circlePatternSize.y; if (_circlePointsProjectorRenderImageMat != null && _circlePointsProjectorRenderImageMat.rows() == _circlePatternPointCount) { return; } if (_circlePointsProjectorRenderImageMat != null) { _circlePointsProjectorRenderImageMat.release(); } if (_circlePointsRealModelMat != null) { _circlePointsRealModelMat.release(); } if (_circlePointsDetectedWorldMat != null) { _circlePointsDetectedWorldMat.release(); } _circlePointsProjectorRenderImageMat.alloc(_circlePatternPointCount); _circlePointsRealModelMat.alloc(_circlePatternPointCount); _circlePointsDetectedWorldMat.alloc(_circlePatternPointCount); // Render pattern to texture. _circlePatternBorderSizeUV = TrackingToolsHelper.RenderPattern(_circlePatternSize, TrackingToolsHelper.PatternType.AsymmetricCircleGrid, 2048, ref _circlePatternTexture, ref _patternRenderMaterial, circlePatternBorder, true); _circlePatternBoardMaterial.mainTexture = _circlePatternTexture; // Update transform to match. float circleTextureAspect = _circlePatternTexture.width / (float)_circlePatternTexture.height; float borderProportion = (_circlePatternSize.y - 1 + 2f) / (_circlePatternSize.y - 1f); // Asymmetric patttern tiles are half the height. _circlePatternTransform.localScale = new Vector3(circleTextureAspect, 1, 0) * _chessPatternTransform.localScale.y * borderProportion; if (_state == State.TrackedCalibration || _state == State.Testing) { _circlePatternTransform.localPosition = -Vector3.right * (_chessCirclePatternCenterOffset / 1000f); } }
void getChessBoardWorldPositions(MatOfPoint2f positions, int pin1idx, float distBetweenCorners, ref Point[] realWorldPointArray, ref Point3[] realWorldPointArray3, ref Point[] imagePointArray) { //i want a list of 2d points and a corresponding list of 3d points: //float distBetweenCorners = 0.0498f; //meters realWorldPointArray = new Point[positions.rows()]; realWorldPointArray3 = new Point3[positions.rows()]; imagePointArray = new Point[positions.rows()]; for (int i = 0; i < positions.rows(); i++) { double xp = 0.0; double zp = 0.0; double yp = 0.0; if (pin1idx == 0) { xp = (i % 7); zp = -((int)(i / 7)); } if (pin1idx == 6) { xp = (i / 7); zp = ((int)(i % 7)) - 6; } if (pin1idx == 42) { xp = -((int)i / 7) + 6; zp = -(i % 7); } if (pin1idx == 48) { xp = -(i % 7) + 6; zp = ((int)i / 7) - 6; } xp = xp * distBetweenCorners; zp = zp * distBetweenCorners; realWorldPointArray[i] = new Point(xp, zp); realWorldPointArray3[i] = new Point3(xp, zp, 0.0); imagePointArray[i] = new Point(positions.get(i, 0)[0], positions.get(i, 0)[1]); //calibPointList2.Add(new Point(positions.get(i, 0)[0], positions.get(i, 0)[1])); //calibPointList3.Add(new Point3(xp,0.0,zp)); } }
public static void ReverseOrder(MatOfPoint2f points) { int count = points.rows(); for (int i = 0; i < count / 2; i++) { Vector2 vec2 = points.ReadVector2(i); int i2 = count - i - 1; points.WriteVector2(points.ReadVector2(i2), i); points.WriteVector2(vec2, i2); } }
int getPin1(MatOfPoint2f positions, Vector2[] blobs, ref Point closestBlob) { Vector2 chessPosition = new Vector2((float)positions.get(0, 0)[0], (float)positions.get(0, 0)[1]); int closest = 0; float closestDist = (blobs[0] - chessPosition).magnitude; for (int i = 0; i < blobs.Length; i++) { for (int ch = 0; ch < positions.rows(); ch++) { chessPosition = new Vector2((float)positions.get(ch, 0)[0], (float)positions.get(ch, 0)[1]); if ((blobs[i] - chessPosition).magnitude < closestDist) { closest = i; closestDist = (blobs[i] - chessPosition).magnitude; } } } closestBlob = new Point(blobs[closest].x, blobs[closest].y); //find pin1! int pin1idx = 0; float pin1dist = (blobs[closest] - (new Vector2((float)positions.get(0, 0)[0], (float)positions.get(0, 0)[1]))).magnitude; for (int ch = 0; ch < positions.rows(); ch++) { if (ch == 0 || ch == 6 || ch == 42 || ch == 48) { float newDist = (blobs[closest] - (new Vector2((float)positions.get(ch, 0)[0], (float)positions.get(ch, 0)[1]))).magnitude; if (newDist < pin1dist) { pin1idx = ch; pin1dist = newDist; } } } //Debug.Log("pin 1 idx: " + pin1idx); return(pin1idx); }
private void SetImagePoints(List <Vector2> landmarks) { //Listにランダムアクセスしたくないので、配列に全部書き写したのを用いる int i = 0; foreach (var mark in landmarks) { _landmarks[i] = mark; i++; } //NOTE: 17点モデルから目、鼻、耳を(_objPointsと同じ対応付けで)取り出す。 if (_imagePoints.rows() == 0) { //初回: 領域確保も兼ねてちゃんと作る _imagePoints.fromArray( new Point((_landmarks[2].x + _landmarks[3].x) / 2, (_landmarks[2].y + _landmarks[3].y) / 2), new Point((_landmarks[4].x + _landmarks[5].x) / 2, (_landmarks[4].y + _landmarks[5].y) / 2), new Point(_landmarks[0].x, _landmarks[0].y), new Point(_landmarks[1].x, _landmarks[1].y), new Point(_landmarks[6].x, _landmarks[6].y), new Point(_landmarks[8].x, _landmarks[8].y) ); } else { //初回以外: fromArrayとかnew PointはGCAllocなのでダメです。 _imagePointsSetter[0] = (_landmarks[2].x + _landmarks[3].x) / 2; _imagePointsSetter[1] = (_landmarks[2].y + _landmarks[3].y) / 2; _imagePointsSetter[2] = (_landmarks[4].x + _landmarks[5].x) / 2; _imagePointsSetter[3] = (_landmarks[4].y + _landmarks[5].y) / 2; _imagePointsSetter[4] = _landmarks[0].x; _imagePointsSetter[5] = _landmarks[0].y; _imagePointsSetter[6] = _landmarks[1].x; _imagePointsSetter[7] = _landmarks[1].y; _imagePointsSetter[8] = _landmarks[6].x; _imagePointsSetter[9] = _landmarks[6].y; _imagePointsSetter[10] = _landmarks[8].x; _imagePointsSetter[11] = _landmarks[8].y; _imagePoints.put(0, 0, _imagePointsSetter); } }
private void updateFeatures() { if (mMOP2fptsPrev.rows() == 0) { Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY); matOpFlowThis.copyTo(matOpFlowPrev); Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, maxCorners, qualityLevel, minDistance); mMOP2fptsPrev.fromArray(MOPcorners.toArray()); mMOP2fptsPrev.copyTo(mMOP2fptsSafe); } else { matOpFlowThis.copyTo(matOpFlowPrev); Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY); Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, maxCorners, qualityLevel, minDistance); mMOP2fptsThis.fromArray(MOPcorners.toArray()); mMOP2fptsSafe.copyTo(mMOP2fptsPrev); mMOP2fptsThis.copyTo(mMOP2fptsSafe); } }
public string Detect(MatOfPoint2f c) { var shape = "Não identificado"; var peri = Imgproc.arcLength(c, true); var epsilon = 0.04 * peri; var aprox = new MatOfPoint2f(); Imgproc.approxPolyDP(c, aprox, epsilon, true); if (aprox.rows() == 3) { UnityEngine.Debug.Log("Triângulo"); //webCamTextureToMatHelper.Dispose(); } //else if (aprox.rows() == 4) //{ // shape = ""; // var rect = Imgproc.boundingRect(aprox); // var ar = rect.width / rect.height; // if (ar >= 0.95 && ar <= 1.05) // { // UnityEngine.Debug.Log("Quadrado"); // } // else // { // UnityEngine.Debug.Log("Retângulo"); // } //} //else if (aprox.rows() == 5) //{ // UnityEngine.Debug.Log("Pentágono"); //} //else if (aprox.rows() == 0) //{ // UnityEngine.Debug.Log("Círculo"); //} return(shape); }
private IEnumerator init() { if (webCamTexture != null) { webCamTexture.Stop(); initDone = false; rgbaMat.Dispose(); matOpFlowThis.Dispose(); matOpFlowPrev.Dispose(); MOPcorners.Dispose(); mMOP2fptsThis.Dispose(); mMOP2fptsPrev.Dispose(); mMOP2fptsSafe.Dispose(); mMOBStatus.Dispose(); mMOFerr.Dispose(); } // Checks how many and which cameras are available on the device for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) { if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) { Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing); webCamDevice = WebCamTexture.devices [cameraIndex]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); break; } } if (webCamTexture == null) { webCamDevice = WebCamTexture.devices [0]; webCamTexture = new WebCamTexture(webCamDevice.name, width, height); } Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); // Starts the camera webCamTexture.Play(); while (true) { //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/) #if UNITY_IPHONE && !UNITY_EDITOR if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS); Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing); colors = new Color32[webCamTexture.width * webCamTexture.height]; rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4); matOpFlowThis = new Mat(); matOpFlowPrev = new Mat(); MOPcorners = new MatOfPoint(); mMOP2fptsThis = new MatOfPoint2f(); mMOP2fptsPrev = new MatOfPoint2f(); mMOP2fptsSafe = new MatOfPoint2f(); mMOBStatus = new MatOfByte(); mMOFerr = new MatOfFloat(); texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false); gameObject.transform.eulerAngles = new Vector3(0, 0, 0); #if (UNITY_ANDROID || UNITY_IPHONE) && !UNITY_EDITOR gameObject.transform.eulerAngles = new Vector3(0, 0, -90); #endif // gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back); gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1); // bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored; // float scaleX = 1; // float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f; // if (webCamTexture.videoRotationAngle == 270) // scaleY = -1.0f; // gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; #if (UNITY_ANDROID || UNITY_IPHONE) && !UNITY_EDITOR Camera.main.orthographicSize = webCamTexture.width / 2; #else Camera.main.orthographicSize = webCamTexture.height / 2; #endif initDone = true; break; } else { yield return(0); } } } // Update is called once per frame void Update() { if (!initDone) { return; } #if UNITY_IPHONE && !UNITY_EDITOR if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors); if (webCamTexture.videoVerticallyMirrored) { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 90) { Core.flip(rgbaMat, rgbaMat, 0); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, 1); } } else { if (webCamTexture.videoRotationAngle == 90) { } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, -1); } } } else { if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 90) { Core.flip(rgbaMat, rgbaMat, 0); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, 1); } } else { if (webCamTexture.videoRotationAngle == 90) { } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, -1); } } } if (mMOP2fptsPrev.rows() == 0) { // first time through the loop so we need prev and this mats // plus prev points // get this mat Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY); // copy that to prev mat matOpFlowThis.copyTo(matOpFlowPrev); // get prev corners Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20); mMOP2fptsPrev.fromArray(MOPcorners.toArray()); // get safe copy of this corners mMOP2fptsPrev.copyTo(mMOP2fptsSafe); } else { // we've been through before so // this mat is valid. Copy it to prev mat matOpFlowThis.copyTo(matOpFlowPrev); // get this mat Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY); // get the corners for this mat Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20); mMOP2fptsThis.fromArray(MOPcorners.toArray()); // retrieve the corners from the prev mat // (saves calculating them again) mMOP2fptsSafe.copyTo(mMOP2fptsPrev); // and save this corners for next time through mMOP2fptsThis.copyTo(mMOP2fptsSafe); } /* * Parameters: * prevImg first 8-bit input image * nextImg second input image * prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers. * nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input. * status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0. * err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases). */ Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr); if (!mMOBStatus.empty()) { List <Point> cornersPrev = mMOP2fptsPrev.toList(); List <Point> cornersThis = mMOP2fptsThis.toList(); List <byte> byteStatus = mMOBStatus.toList(); int x = 0; int y = byteStatus.Count - 1; for (x = 0; x < y; x++) { if (byteStatus [x] == 1) { Point pt = cornersThis [x]; Point pt2 = cornersPrev [x]; Core.circle(rgbaMat, pt, 5, colorRed, iLineThickness - 1); Core.line(rgbaMat, pt, pt2, colorRed, iLineThickness); } } } Utils.matToTexture2D(rgbaMat, texture, colors); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; } } void OnDisable() { webCamTexture.Stop(); } void OnGUI() { float screenScale = Screen.width / 240.0f; Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale)); GUI.matrix = scaledMatrix; GUILayout.BeginVertical(); if (GUILayout.Button("back")) { Application.LoadLevel("OpenCVForUnitySample"); } if (GUILayout.Button("change camera")) { isFrontFacing = !isFrontFacing; StartCoroutine(init()); } GUILayout.EndVertical(); } } }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame()) { Mat rgbaMat = webCamTextureToMatHelper.GetMat(); if (mMOP2fptsPrev.rows() == 0) { // first time through the loop so we need prev and this mats // plus prev points // get this mat Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY); // copy that to prev mat matOpFlowThis.copyTo(matOpFlowPrev); // get prev corners Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20); mMOP2fptsPrev.fromArray(MOPcorners.toArray()); // get safe copy of this corners mMOP2fptsPrev.copyTo(mMOP2fptsSafe); } else { // we've been through before so // this mat is valid. Copy it to prev mat matOpFlowThis.copyTo(matOpFlowPrev); // get this mat Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY); // get the corners for this mat Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20); mMOP2fptsThis.fromArray(MOPcorners.toArray()); // retrieve the corners from the prev mat // (saves calculating them again) mMOP2fptsSafe.copyTo(mMOP2fptsPrev); // and save this corners for next time through mMOP2fptsThis.copyTo(mMOP2fptsSafe); } /* * Parameters: * prevImg first 8-bit input image * nextImg second input image * prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers. * nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input. * status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0. * err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases). */ Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr); if (!mMOBStatus.empty()) { List <Point> cornersPrev = mMOP2fptsPrev.toList(); List <Point> cornersThis = mMOP2fptsThis.toList(); List <byte> byteStatus = mMOBStatus.toList(); int x = 0; int y = byteStatus.Count - 1; for (x = 0; x < y; x++) { if (byteStatus [x] == 1) { Point pt = cornersThis [x]; Point pt2 = cornersPrev [x]; Imgproc.circle(rgbaMat, pt, 5, colorRed, iLineThickness - 1); Imgproc.line(rgbaMat, pt, pt2, colorRed, iLineThickness); } } } // Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors()); } }
void handleCalibration() { for (int i = 0; i < AK_receiver.GetComponent <akplay>().camInfoList.Count; i++) { //create color mat: byte[] colorBytes = ((Texture2D)(AK_receiver.GetComponent <akplay>().camInfoList[i].colorCube.GetComponent <Renderer>().material.mainTexture)).GetRawTextureData(); GCHandle ch = GCHandle.Alloc(colorBytes, GCHandleType.Pinned); Mat colorMat = new Mat(AK_receiver.GetComponent <akplay>().camInfoList[i].color_height, AK_receiver.GetComponent <akplay>().camInfoList[i].color_width, CvType.CV_8UC4); Utils.copyToMat(ch.AddrOfPinnedObject(), colorMat); ch.Free(); //OpenCVForUnity.CoreModule.Core.flip(colorMat, colorMat, 0); //detect a chessboard in the image, and refine the points, and save the pixel positions: MatOfPoint2f positions = new MatOfPoint2f(); int resizer = 4; resizer = 1; //noresize! Mat colorMatSmall = new Mat(); //~27 ms each Imgproc.resize(colorMat, colorMatSmall, new Size(colorMat.cols() / resizer, colorMat.rows() / resizer)); bool success = Calib3d.findChessboardCorners(colorMatSmall, new Size(7, 7), positions); for (int ss = 0; ss < positions.rows(); ss++) { double[] data = positions.get(ss, 0); data[0] = data[0] * resizer; data[1] = data[1] * resizer; positions.put(ss, 0, data); } //subpixel, drawing chessboard, and getting orange blobs takes 14ms TermCriteria tc = new TermCriteria(); Imgproc.cornerSubPix(colorMat, positions, new Size(5, 5), new Size(-1, -1), tc); Mat chessboardResult = new Mat(); colorMat.copyTo(chessboardResult); Calib3d.drawChessboardCorners(chessboardResult, new Size(7, 7), positions, success); //Find the orange blobs: Mat orangeMask = new Mat(); Vector2[] blobs = getOrangeBlobs(ref colorMat, ref orangeMask); //find blob closest to chessboard if (success && (blobs.Length > 0)) { Debug.Log("found a chessboard and blobs for camera: " + i); // time to get pin1 and chessboard positions: 27ms //find pin1: Point closestBlob = new Point(); int pin1idx = getPin1(positions, blobs, ref closestBlob); Imgproc.circle(chessboardResult, new Point(positions.get(pin1idx, 0)[0], positions.get(pin1idx, 0)[1]), 10, new Scalar(255, 0, 0), -1); Imgproc.circle(chessboardResult, closestBlob, 10, new Scalar(255, 255, 0), -1); //get world positions of chessboard Point[] realWorldPointArray = new Point[positions.rows()]; Point3[] realWorldPointArray3 = new Point3[positions.rows()]; Point[] imagePointArray = new Point[positions.rows()]; //getChessBoardWorldPositions(positions, pin1idx, 0.0498f, ref realWorldPointArray, ref realWorldPointArray3, ref imagePointArray); //green and white checkerboard. getChessBoardWorldPositions(positions, pin1idx, 0.07522f, ref realWorldPointArray, ref realWorldPointArray3, ref imagePointArray); //black and white checkerboard. string text = ""; float decimals = 1000.0f; int text_red = 255; int text_green = 0; int text_blue = 0; text = ((int)(realWorldPointArray3[0].x * decimals)) / decimals + "," + ((int)(realWorldPointArray3[0].y * decimals)) / decimals + "," + ((int)(realWorldPointArray3[0].z * decimals)) / decimals; //text = sprintf("%f,%f,%f", realWorldPointArray3[0].x, realWorldPointArray3[0].y, realWorldPointArray3[0].z); Imgproc.putText(chessboardResult, text, new Point(positions.get(0, 0)[0], positions.get(0, 0)[1]), 0, .6, new Scalar(text_red, text_green, text_blue)); text = ((int)(realWorldPointArray3[6].x * decimals)) / decimals + "," + ((int)(realWorldPointArray3[6].y * decimals)) / decimals + "," + ((int)(realWorldPointArray3[6].z * decimals)) / decimals; //text = sprintf("%f,%f,%f", realWorldPointArray3[0].x, realWorldPointArray3[0].y, realWorldPointArray3[0].z); Imgproc.putText(chessboardResult, text, new Point(positions.get(6, 0)[0], positions.get(6, 0)[1]), 0, .6, new Scalar(text_red, text_green, text_blue)); text = ((int)(realWorldPointArray3[42].x * decimals)) / decimals + "," + ((int)(realWorldPointArray3[42].y * decimals)) / decimals + "," + ((int)(realWorldPointArray3[42].z * decimals)) / decimals; //text = sprintf("%f,%f,%f", realWorldPointArray3[0].x, realWorldPointArray3[0].y, realWorldPointArray3[0].z); Imgproc.putText(chessboardResult, text, new Point(positions.get(42, 0)[0], positions.get(42, 0)[1]), 0, .6, new Scalar(text_red, text_green, text_blue)); text = ((int)(realWorldPointArray3[48].x * decimals)) / decimals + "," + ((int)(realWorldPointArray3[48].y * decimals)) / decimals + "," + ((int)(realWorldPointArray3[48].z * decimals)) / decimals; //text = sprintf("%2.2f,%2.2f,%2.2f", realWorldPointArray3[48].x, realWorldPointArray3[48].y, realWorldPointArray3[48].z); Imgproc.putText(chessboardResult, text, new Point(positions.get(48, 0)[0], positions.get(48, 0)[1]), 0, .6, new Scalar(text_red, text_green, text_blue)); Mat cameraMatrix = Mat.eye(3, 3, CvType.CV_64F); cameraMatrix.put(0, 0, AK_receiver.GetComponent <akplay>().camInfoList[i].color_fx); cameraMatrix.put(1, 1, AK_receiver.GetComponent <akplay>().camInfoList[i].color_fy); cameraMatrix.put(0, 2, AK_receiver.GetComponent <akplay>().camInfoList[i].color_cx); cameraMatrix.put(1, 2, AK_receiver.GetComponent <akplay>().camInfoList[i].color_cy); double[] distortion = new double[8]; distortion[0] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k1; distortion[1] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k2; distortion[2] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_p1; distortion[3] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_p2; distortion[4] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k3; distortion[5] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k4; distortion[6] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k5; distortion[7] = AK_receiver.GetComponent <akplay>().camInfoList[i].color_k6; /* * distortion[0] = 0.0; * distortion[1] = 0.0; * distortion[2] = 0.0; * distortion[3] = 0.0; * distortion[4] = 0.0; * distortion[5] = 0.0; * distortion[6] = 0.0; * distortion[7] = 0.0; */ //~1 ms to solve for pnp Mat rvec = new Mat(); Mat tvec = new Mat(); bool solvepnpSucces = Calib3d.solvePnP(new MatOfPoint3f(realWorldPointArray3), new MatOfPoint2f(imagePointArray), cameraMatrix, new MatOfDouble(distortion), rvec, tvec); Mat R = new Mat(); Calib3d.Rodrigues(rvec, R); //calculate unity vectors, and camera transforms Mat camCenter = -R.t() * tvec; Mat forwardOffset = new Mat(3, 1, tvec.type()); forwardOffset.put(0, 0, 0); forwardOffset.put(1, 0, 0); forwardOffset.put(2, 0, 1); Mat upOffset = new Mat(3, 1, tvec.type()); upOffset.put(0, 0, 0); upOffset.put(1, 0, -1); upOffset.put(2, 0, 0); Mat forwardVectorCV = R.t() * (forwardOffset - tvec); forwardVectorCV = forwardVectorCV - camCenter; Mat upVectorCV = R.t() * (upOffset - tvec); upVectorCV = upVectorCV - camCenter; Vector3 forwardVectorUnity = new Vector3((float)forwardVectorCV.get(0, 0)[0], (float)forwardVectorCV.get(2, 0)[0], (float)forwardVectorCV.get(1, 0)[0]); //need to flip y and z due to unity coordinate system Vector3 upVectorUnity = new Vector3((float)upVectorCV.get(0, 0)[0], (float)upVectorCV.get(2, 0)[0], (float)upVectorCV.get(1, 0)[0]); //need to flip y and z due to unity coordinate system Vector3 camCenterUnity = new Vector3((float)camCenter.get(0, 0)[0], (float)camCenter.get(2, 0)[0], (float)camCenter.get(1, 0)[0]); Quaternion rotationUnity = Quaternion.LookRotation(forwardVectorUnity, upVectorUnity); GameObject colorMarker = GameObject.CreatePrimitive(PrimitiveType.Cube); //colorMarker.transform.localScale = new Vector3(0.1f, 0.1f, 0.2f); //colorMarker.transform.parent = AK_receiver.transform; colorMarker.layer = LayerMask.NameToLayer("Debug"); colorMarker.transform.position = camCenterUnity; colorMarker.transform.rotation = Quaternion.LookRotation(forwardVectorUnity, upVectorUnity); colorMarker.GetComponent <Renderer>().material.color = Color.blue; Vector3 forwardDepth = AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.MultiplyPoint(forwardVectorUnity); Vector3 upDepth = AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.MultiplyPoint(upVectorUnity); Vector3 camCenterDepth = AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.MultiplyPoint(camCenterUnity); Quaternion rotationDepth = Quaternion.LookRotation(forwardDepth, upDepth); GameObject depthMarker = GameObject.CreatePrimitive(PrimitiveType.Cube); depthMarker.layer = LayerMask.NameToLayer("Debug"); depthMarker.transform.parent = colorMarker.transform; //depthMarker.transform.localScale = AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.lossyScale; depthMarker.transform.localRotation = AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.inverse.rotation; Vector3 matrixPosition = new Vector3(AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.inverse.GetColumn(3).x, AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.inverse.GetColumn(3).y, AK_receiver.GetComponent <akplay>().camInfoList[i].color_extrinsics.inverse.GetColumn(3).z); /* * depthMarker.transform.localRotation = AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.rotation; * * Vector3 matrixPosition = new Vector3(AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.GetColumn(3).x, * AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.GetColumn(3).y, * AK_receiver.GetComponent<akplay>().camInfoList[i].color_extrinsics.GetColumn(3).z); */ depthMarker.transform.localPosition = -matrixPosition; depthMarker.transform.parent = null; colorMarker.transform.localScale = new Vector3(0.1f, 0.1f, 0.2f); depthMarker.transform.localScale = new Vector3(0.1f, 0.1f, 0.2f); //depthMarker.transform.parent = AK_receiver.transform; //depthMarker.transform.position = camCenterDepth; //depthMarker.transform.rotation = Quaternion.LookRotation(forwardDepth-camCenterDepth, upDepth-camCenterDepth); depthMarker.GetComponent <Renderer>().material.color = Color.red; AK_receiver.GetComponent <akplay>().camInfoList[i].visualization.transform.position = depthMarker.transform.position; //need to flip y and z due to unity coordinate system AK_receiver.GetComponent <akplay>().camInfoList[i].visualization.transform.rotation = depthMarker.transform.rotation; } //draw chessboard result to calibration ui: Texture2D colorTexture = new Texture2D(chessboardResult.cols(), chessboardResult.rows(), TextureFormat.BGRA32, false); colorTexture.LoadRawTextureData((IntPtr)chessboardResult.dataAddr(), (int)chessboardResult.total() * (int)chessboardResult.elemSize()); colorTexture.Apply(); checkerboard_display_list[i].GetComponent <Renderer>().material.mainTexture = colorTexture; //draw threshold to calibration ui: Texture2D orangeTexture = new Texture2D(orangeMask.cols(), orangeMask.rows(), TextureFormat.R8, false); orangeTexture.LoadRawTextureData((IntPtr)orangeMask.dataAddr(), (int)orangeMask.total() * (int)orangeMask.elemSize()); orangeTexture.Apply(); threshold_display_list[i].GetComponent <Renderer>().material.mainTexture = orangeTexture; } }
// Update is called once per frame void Update() { if (!initDone) { return; } if (screenOrientation != Screen.orientation) { screenOrientation = Screen.orientation; updateLayout(); } #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1) if (webCamTexture.width > 16 && webCamTexture.height > 16) { #else if (webCamTexture.didUpdateThisFrame) { #endif Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors); //flip to correct direction. if (webCamDevice.isFrontFacing) { if (webCamTexture.videoRotationAngle == 0) { Core.flip(rgbaMat, rgbaMat, 1); } else if (webCamTexture.videoRotationAngle == 90) { Core.flip(rgbaMat, rgbaMat, 0); } if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, 0); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, 1); } } else { if (webCamTexture.videoRotationAngle == 180) { Core.flip(rgbaMat, rgbaMat, -1); } else if (webCamTexture.videoRotationAngle == 270) { Core.flip(rgbaMat, rgbaMat, -1); } } if (mMOP2fptsPrev.rows() == 0) { // first time through the loop so we need prev and this mats // plus prev points // get this mat Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY); // copy that to prev mat matOpFlowThis.copyTo(matOpFlowPrev); // get prev corners Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20); mMOP2fptsPrev.fromArray(MOPcorners.toArray()); // get safe copy of this corners mMOP2fptsPrev.copyTo(mMOP2fptsSafe); } else { // we've been through before so // this mat is valid. Copy it to prev mat matOpFlowThis.copyTo(matOpFlowPrev); // get this mat Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY); // get the corners for this mat Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20); mMOP2fptsThis.fromArray(MOPcorners.toArray()); // retrieve the corners from the prev mat // (saves calculating them again) mMOP2fptsSafe.copyTo(mMOP2fptsPrev); // and save this corners for next time through mMOP2fptsThis.copyTo(mMOP2fptsSafe); } /* * Parameters: * prevImg first 8-bit input image * nextImg second input image * prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers. * nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input. * status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0. * err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases). */ Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr); if (!mMOBStatus.empty()) { List <Point> cornersPrev = mMOP2fptsPrev.toList(); List <Point> cornersThis = mMOP2fptsThis.toList(); List <byte> byteStatus = mMOBStatus.toList(); int x = 0; int y = byteStatus.Count - 1; for (x = 0; x < y; x++) { if (byteStatus [x] == 1) { Point pt = cornersThis [x]; Point pt2 = cornersPrev [x]; Core.circle(rgbaMat, pt, 5, colorRed, iLineThickness - 1); Core.line(rgbaMat, pt, pt2, colorRed, iLineThickness); } } } Utils.matToTexture2D(rgbaMat, texture, colors); gameObject.GetComponent <Renderer> ().material.mainTexture = texture; } } void OnDisable() { webCamTexture.Stop(); }
//Optical flow IEnumerator OpticalFlow() { Scalar tempHue; Scalar tempSpeed; int iCountTrackedPoints = 0; int vecCount = 0; if (mMOP2fptsPrev.rows() == 0) { // first time through the loop so we need prev and this mats Imgproc.cvtColor(openCVCreateMat.rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY); // copy that to prev mat matOpFlowThis.copyTo(matOpFlowPrev); //if (blurImage == true){ //Gaussian filter of the MOG2 images //Imgproc.GaussianBlur(matOpFlowPrev, matOpFlowPrev, kernelSize, sigmaX, sigmaY);//Gauss filter //} // get prev corners Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, qLevel, minDistCorners); //SLIDER input mMOP2fptsPrev.fromArray(MOPcorners.toArray()); // get safe copy of this corners mMOP2fptsPrev.copyTo(mMOP2fptsSafe); } else { // we've been through before so // this mat is valid. Copy it to prev mat matOpFlowThis.copyTo(matOpFlowPrev); // get this mat Imgproc.cvtColor(openCVCreateMat.rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY); //if (blurImage == true){ //Gaussian filter of the MOG2 images //Imgproc.GaussianBlur(matOpFlowThis, matOpFlowThis, kernelSize, sigmaX, sigmaY);//Gauss filter //} // get the corners for this mat Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, qLevel, minDistCorners); // SLIDER input mMOP2fptsThis.fromArray(MOPcorners.toArray()); // retrieve the corners from the prev mat (saves calculating them again) mMOP2fptsSafe.copyTo(mMOP2fptsPrev); // and save this corners for next time through mMOP2fptsThis.copyTo(mMOP2fptsSafe); } Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr); if (mMOBStatus.rows() > 0) { List <Point> cornersPrev = mMOP2fptsPrev.toList(); List <Point> cornersThis = mMOP2fptsThis.toList(); List <byte> byteStatus = mMOBStatus.toList(); int x = 0; int y = byteStatus.Count - 1; double absX; double absY; //will use for calculation of polar coordiates for (x = 0; x < y; x++) { if (byteStatus [x] == 1) { Point pt = cornersThis [x]; Point pt2 = cornersPrev [x]; //if (pt != pt2) {//I think this IF statement should be removed as pt and pt2 should always be differnt float mySpeed = CalculateSpeedFloat(pt, pt2); absX = pt.x - pt2.x; absY = pt.y - pt2.y; float angle = Mathf.Atan2((float)absX, (float)absY) * Mathf.Rad2Deg; angle = Mathf.RoundToInt(angle); //Get Hue based on Angle tempHue = GetHueColor((int)angle); tempSpeed = GetSpeedColor((int)mySpeed); //Store so we can add tracers if (mySpeed > maxSpeed) //|| CalculateSpeedFloat (pt, pt2) <= 1 { yield return(null); } else { tracerPoints.AddTracersToStorage(pt, pt2, tempHue, tempSpeed, videoPlayer.frame, angle, mySpeed); speedVec = speedVec + mySpeed; angleVec = angleVec + angle; vecCount++; //tracerPoints2.AddTracersToStorage (pt, pt2, tempSpeed, videoPlayer.frame, angle, mySpeed); //ADD STORING SPEEDS TO VECTOR //CSVDataEmail.AddDataTrack (speed,angle.ToString ()); } iCountTrackedPoints++; } } } meanSpeed = (int)(speedVec / vecCount); meanAngle = (int)(angleVec / vecCount); //sTrackingLogger = "Video frame: " + videoPlayer.frame.ToString() + " Points: " + iCountTrackedPoints.ToString() + ""; sTrackingLogger = "Speed: " + meanSpeed.ToString() + " Angle: " + meanAngle.ToString() + ""; textTrackedPoints.text = sTrackingLogger; yield return(null); }
// Update is called once per frame void Update() { if (!IsStarted) { return; } Mat grayMat = webCamTextureToMat.GetMat(); Imgproc.cvtColor(grayMat, grayMat, Imgproc.COLOR_RGBA2GRAY); //Debug.Log("mMOP2fptsPrev.rows() : " + mMOP2fptsPrev.rows().ToString()); //Debug.Log("rgbaMat.rows() : " + rgbaMat.rows().ToString()); //Debug.Log("matOpFlowThis.rows() : " + matOpFlowThis.rows().ToString()); if (mMOP2fptsPrev.rows() == 0) { // first time through the loop so we need prev and this mats // plus prev points // get this mat //rgbaMat.copyTo(matOpFlowThis); grayMat.copyTo(matOpFlowThis); grayMat.copyTo(matOpFlowPrev); //matOpFlowThis = rgbaMat; //matOpFlowPrev = rgbaMat; matOpFlowPrev.empty(); //matOpFlowPrev = new Mat(rgbaMat.size(), rgbaMat.type()); //Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY); // copy that to prev mat matOpFlowThis.copyTo(matOpFlowPrev); // get prev corners Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.1, 100); mMOP2fptsPrev.fromArray(MOPcorners.toArray()); // get safe copy of this corners mMOP2fptsPrev.copyTo(mMOP2fptsSafe); //Debug.Log("opencv optical flow --- 1 "); } else { // we've been through before so // this mat is valid. Copy it to prev mat //rgbaMat.copyTo(matOpFlowThis); //matOpFlowPrev = new Mat(rgbaMat.size(), rgbaMat.type()); matOpFlowThis.copyTo(matOpFlowPrev); //matOpFlowThis = new Mat(rgbaMat.size(), rgbaMat.type()); // get this mat grayMat.copyTo(matOpFlowThis); //matOpFlowThis = rgbaMat; //Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY); // get the corners for this mat Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.1, 100); mMOP2fptsThis.fromArray(MOPcorners.toArray()); // retrieve the corners from the prev mat // (saves calculating them again) mMOP2fptsSafe.copyTo(mMOP2fptsPrev); // and save this corners for next time through mMOP2fptsThis.copyTo(mMOP2fptsSafe); //Debug.Log("opencv optical flow --- 2 "); } /* * Parameters: * prevImg first 8-bit input image * nextImg second input image * prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers. * nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input. * status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0. * err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases). */ Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr); if (!mMOBStatus.empty()) { List <Point> cornersPrev = mMOP2fptsPrev.toList(); List <Point> cornersThis = mMOP2fptsThis.toList(); List <byte> byteStatus = mMOBStatus.toList(); int x = 0; int y = byteStatus.Count - 1; int num_distance = 0; for (x = 0; x < y; x++) { if (byteStatus[x] == 1) { Point pt = cornersThis[x]; Point pt2 = cornersPrev[x]; Imgproc.circle(grayMat, pt, 5, colorRed, iLineThickness - 1); Imgproc.line(grayMat, pt, pt2, colorRed, iLineThickness); double distance = System.Math.Sqrt(System.Math.Pow((pt2.x - pt.x), 2.0) + System.Math.Pow((pt2.y - pt.y), 2.0)); if (distance > 20) { num_distance++; } //Utilities.Debug("Distance[" + x + "] : " + distance); //Debug.Log("Distance[" + x + "] : " + distance); } } Debug.Log("Num of Distance : " + num_distance); if (num_distance > 0) { Debug.Log("Movement Detected !!"); } } // Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); //this.GetComponent<CVCore>().Add(0, rgbaMat); Utils.matToTexture2D(grayMat, texture, colors); gameObject.GetComponent <Renderer>().material.mainTexture = texture; }