/// <summary> /// Raises the web cam texture to mat helper initialized event. /// </summary> public void OnWebCamTextureToMatHelperInitialized() { UnityEngine.Debug.Log("OnWebCamTextureToMatHelperInitialized"); OpenCVForUnity.CoreModule.Mat webCamTextureMat = webCamTextureToMatHelper.GetMat(); texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false); gameObject.GetComponent <Renderer>().material.mainTexture = texture; gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1); UnityEngine.Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); if (fpsMonitor != null) { fpsMonitor.Add("width", webCamTextureMat.width().ToString()); fpsMonitor.Add("height", webCamTextureMat.height().ToString()); fpsMonitor.Add("orientation", Screen.orientation.ToString()); } float width = webCamTextureMat.width(); float height = webCamTextureMat.height(); float widthScale = (float)Screen.width / width; float heightScale = (float)Screen.height / height; if (widthScale < heightScale) { Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; } else { Camera.main.orthographicSize = height / 2; } matOpFlowThis = new OpenCVForUnity.CoreModule.Mat(); matOpFlowPrev = new OpenCVForUnity.CoreModule.Mat(); MOPcorners = new MatOfPoint(); mMOP2fptsThis = new MatOfPoint2f(); mMOP2fptsPrev = new MatOfPoint2f(); mMOP2fptsSafe = new MatOfPoint2f(); mMOBStatus = new MatOfByte(); mMOFerr = new MatOfFloat(); }
// Update is called once per frame void Update() { if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) { OpenCVForUnity.CoreModule.Mat rgbaMat = webCamTextureToMatHelper.GetMat(); if (mMOP2fptsPrev.rows() == 0) { // first time through the loop so we need prev and this mats // plus prev points // get this mat Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY); // copy that to prev mat matOpFlowThis.copyTo(matOpFlowPrev); // get prev corners Imgproc.goodFeaturesToTrack(matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20); mMOP2fptsPrev.fromArray(MOPcorners.toArray()); // get safe copy of this corners mMOP2fptsPrev.copyTo(mMOP2fptsSafe); } else { // we've been through before so // this mat is valid. Copy it to prev mat matOpFlowThis.copyTo(matOpFlowPrev); // get this mat Imgproc.cvtColor(rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY); // get the corners for this mat Imgproc.goodFeaturesToTrack(matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20); mMOP2fptsThis.fromArray(MOPcorners.toArray()); // retrieve the corners from the prev mat // (saves calculating them again) mMOP2fptsSafe.copyTo(mMOP2fptsPrev); // and save this corners for next time through mMOP2fptsThis.copyTo(mMOP2fptsSafe); } /* * Parameters: * prevImg first 8-bit input image * nextImg second input image * prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers. * nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input. * status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0. * err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases). */ Video.calcOpticalFlowPyrLK(matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr); //if (mMOBStatus.rows() > 0) //{ // List<Point> cornersPrev = mMOP2fptsPrev.toList(); // List<Point> cornersThis = mMOP2fptsThis.toList(); // List<byte> byteStatus = mMOBStatus.toList(); // int x = 0; // int y = byteStatus.Count - 1; // for (x = 0; x < y; x++) // { // if (byteStatus[x] == 1) // { // Point pt = cornersThis[x]; // Point pt2 = cornersPrev[x]; // Imgproc.circle(rgbaMat, pt, 5, colorRed, iLineThickness - 1); // Imgproc.line(rgbaMat, pt, pt2, colorRed, iLineThickness); // } // } //} // Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Utils.fastMatToTexture2D(rgbaMat, texture); if (DateTime.Now >= dataAtual.AddSeconds(5)) { webCamTextureToMatHelper.Pause(); TakeSnapshot(); DetectShapes(); dataAtual = DateTime.Now; webCamTextureToMatHelper.Play(); } } }