Exemplo n.º 1
0
        protected virtual void Run()
        {
            if (string.IsNullOrEmpty(videoFilePath))
            {
                Debug.LogError("video file does not exist.");
            }

            captureMat = new Mat();
            resultMat  = new Mat();

            capture = new VideoCapture();
            capture.open(videoFilePath);

            if (!capture.isOpened())
            {
                Debug.LogError("capture.isOpened() false");
            }

            //Debug.Log("CAP_PROP_FORMAT: " + capture.get(Videoio.CAP_PROP_FORMAT));
            //Debug.Log("CAP_PROP_POS_MSEC: " + capture.get(Videoio.CAP_PROP_POS_MSEC));
            //Debug.Log("CAP_PROP_POS_FRAMES: " + capture.get(Videoio.CAP_PROP_POS_FRAMES));
            //Debug.Log("CAP_PROP_POS_AVI_RATIO: " + capture.get(Videoio.CAP_PROP_POS_AVI_RATIO));
            //Debug.Log("CAP_PROP_FRAME_COUNT: " + capture.get(Videoio.CAP_PROP_FRAME_COUNT));
            //Debug.Log("CAP_PROP_FPS: " + capture.get(Videoio.CAP_PROP_FPS));
            //Debug.Log("CAP_PROP_FRAME_WIDTH: " + capture.get(Videoio.CAP_PROP_FRAME_WIDTH));
            //Debug.Log("CAP_PROP_FRAME_HEIGHT: " + capture.get(Videoio.CAP_PROP_FRAME_HEIGHT));

            capture.grab();
            capture.retrieve(captureMat, 0);
            capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);

            StartCoroutine("WaitFrameTime");
        }
        void DoProcess()
        {
            if (!(owner.Value is OpenCVForUnityPlayMakerActions.VideoCapture))
            {
                LogError("owner is not initialized. Add Action \"newVideoCapture\".");
                return;
            }
            OpenCVForUnity.VideoioModule.VideoCapture wrapped_owner = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.VideoCapture, OpenCVForUnity.VideoioModule.VideoCapture>(owner);

            storeResult.Value = wrapped_owner.getBackendName();
        }
Exemplo n.º 3
0
        void DoProcess()
        {
            if (!(owner.Value is OpenCVForUnityPlayMakerActions.VideoCapture))
            {
                LogError("owner is not initialized. Add Action \"newVideoCapture\".");
                return;
            }
            OpenCVForUnity.VideoioModule.VideoCapture wrapped_owner = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.VideoCapture, OpenCVForUnity.VideoioModule.VideoCapture>(owner);

            wrapped_owner.setExceptionMode(enable.Value);
        }
        void DoProcess()
        {
            if (!(owner.Value is OpenCVForUnityPlayMakerActions.VideoCapture))
            {
                LogError("owner is not initialized. Add Action \"newVideoCapture\".");
                return;
            }
            OpenCVForUnity.VideoioModule.VideoCapture wrapped_owner = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.VideoCapture, OpenCVForUnity.VideoioModule.VideoCapture>(owner);

            storeResult.Value = wrapped_owner.open(index.Value);

            Fsm.Event(storeResult.Value ? trueEvent : falseEvent);
        }
Exemplo n.º 5
0
        // Use this for initialization
        void Start()
        {
            capture = new VideoCapture();

            #if UNITY_WEBGL && !UNITY_EDITOR
            getFilePath_Coroutine = GetFilePath();
            StartCoroutine(getFilePath_Coroutine);
            #else
            haarcascade_frontalface_alt_xml_filepath = OpenCVForUnity.UnityUtils.Utils.getFilePath("haarcascade_frontalface_alt.xml");
            sp_human_face_68_dat_filepath            = DlibFaceLandmarkDetector.UnityUtils.Utils.getFilePath("sp_human_face_68.dat");
            couple_avi_filepath = OpenCVForUnity.UnityUtils.Utils.getFilePath("dance.avi");
            Run();
            #endif
        }
        void DoProcess()
        {
            if (!(owner.Value is OpenCVForUnityPlayMakerActions.VideoCapture))
            {
                LogError("owner is not initialized. Add Action \"newVideoCapture\".");
                return;
            }
            OpenCVForUnity.VideoioModule.VideoCapture wrapped_owner = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.VideoCapture, OpenCVForUnity.VideoioModule.VideoCapture>(owner);

            if (!(storeResult.Value is OpenCVForUnityPlayMakerActions.Double))
            {
                storeResult.Value = new OpenCVForUnityPlayMakerActions.Double();
            }
            ((OpenCVForUnityPlayMakerActions.Double)storeResult.Value).wrappedObject = wrapped_owner.get(propId.Value);
        }
Exemplo n.º 7
0
        void DoProcess()
        {
            if (!(owner.Value is OpenCVForUnityPlayMakerActions.VideoCapture))
            {
                LogError("owner is not initialized. Add Action \"newVideoCapture\".");
                return;
            }
            OpenCVForUnity.VideoioModule.VideoCapture wrapped_owner = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.VideoCapture, OpenCVForUnity.VideoioModule.VideoCapture>(owner);

            if (!(_params.Value is OpenCVForUnityPlayMakerActions.MatOfInt))
            {
                LogError("_params is not initialized. Add Action \"newMatOfInt\".");
                return;
            }
            OpenCVForUnity.CoreModule.MatOfInt wrapped__params = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.MatOfInt, OpenCVForUnity.CoreModule.MatOfInt>(_params);

            storeResult.Value = wrapped_owner.open(filename.Value, apiPreference.Value, wrapped__params);

            Fsm.Event(storeResult.Value ? trueEvent : falseEvent);
        }
        // Use this for initialization
        void Start()
        {
            capture = new VideoCapture();

#if UNITY_WEBGL
            getFilePath_Coroutine = Utils.getFilePathAsync(fileName, (result) =>
            {
                getFilePath_Coroutine = null;

                Debug.Log("result " + result);

                capture.open(result);
                Init();
            });
            StartCoroutine(getFilePath_Coroutine);
#else
            capture.open(Utils.getFilePath(fileName));
            Init();
#endif
        }
        void DoProcess()
        {
            if (!(owner.Value is OpenCVForUnityPlayMakerActions.VideoCapture))
            {
                LogError("owner is not initialized. Add Action \"newVideoCapture\".");
                return;
            }
            OpenCVForUnity.VideoioModule.VideoCapture wrapped_owner = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.VideoCapture, OpenCVForUnity.VideoioModule.VideoCapture>(owner);

            if (!(value.Value is OpenCVForUnityPlayMakerActions.Double))
            {
                LogError("value is not initialized. Add Action \"newDouble\".");
                return;
            }
            System.Double wrapped_value = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Double, System.Double>(value);

            storeResult.Value = wrapped_owner.set(propId.Value, wrapped_value);

            Fsm.Event(storeResult.Value ? trueEvent : falseEvent);
        }
Exemplo n.º 10
0
        void DoProcess()
        {
            if (!(owner.Value is OpenCVForUnityPlayMakerActions.VideoCapture))
            {
                LogError("owner is not initialized. Add Action \"newVideoCapture\".");
                return;
            }
            OpenCVForUnity.VideoioModule.VideoCapture wrapped_owner = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.VideoCapture, OpenCVForUnity.VideoioModule.VideoCapture>(owner);

            if (!(image.Value is OpenCVForUnityPlayMakerActions.Mat))
            {
                LogError("image is not initialized. Add Action \"newMat\".");
                return;
            }
            OpenCVForUnity.CoreModule.Mat wrapped_image = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.Mat, OpenCVForUnity.CoreModule.Mat>(image);

            storeResult.Value = wrapped_owner.retrieve(wrapped_image, flag.Value);

            Fsm.Event(storeResult.Value ? trueEvent : falseEvent);
        }
Exemplo n.º 11
0
 public VideoCapture(OpenCVForUnity.VideoioModule.VideoCapture nativeObj) : base(nativeObj)
 {
 }
        private void Run()
        {
            if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
            {
                Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            }

            faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath);

            rgbMat = new Mat();

            capture = new VideoCapture();
            capture.open(video_filepath);

            if (!capture.isOpened())
            {
                Debug.LogError("capture.isOpened() is false. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            }


            Debug.Log("CAP_PROP_FORMAT: " + capture.get(Videoio.CAP_PROP_FORMAT));
            Debug.Log("CAP_PROP_POS_MSEC: " + capture.get(Videoio.CAP_PROP_POS_MSEC));
            Debug.Log("CAP_PROP_POS_FRAMES: " + capture.get(Videoio.CAP_PROP_POS_FRAMES));
            Debug.Log("CAP_PROP_POS_AVI_RATIO: " + capture.get(Videoio.CAP_PROP_POS_AVI_RATIO));
            Debug.Log("CAP_PROP_FRAME_COUNT: " + capture.get(Videoio.CAP_PROP_FRAME_COUNT));
            Debug.Log("CAP_PROP_FPS: " + capture.get(Videoio.CAP_PROP_FPS));
            Debug.Log("CAP_PROP_FRAME_WIDTH: " + capture.get(Videoio.CAP_PROP_FRAME_WIDTH));
            Debug.Log("CAP_PROP_FRAME_HEIGHT: " + capture.get(Videoio.CAP_PROP_FRAME_HEIGHT));
            double ext = capture.get(Videoio.CAP_PROP_FOURCC);

            Debug.Log("CAP_PROP_FOURCC: " + (char)((int)ext & 0XFF) + (char)(((int)ext & 0XFF00) >> 8) + (char)(((int)ext & 0XFF0000) >> 16) + (char)(((int)ext & 0XFF000000) >> 24));

            capture.grab();
            capture.retrieve(rgbMat, 0);
            int frameWidth  = rgbMat.cols();
            int frameHeight = rgbMat.rows();

            texture = new Texture2D(frameWidth, frameHeight, TextureFormat.RGB24, false);
            gameObject.transform.localScale = new Vector3((float)frameWidth, (float)frameHeight, 1);
            float widthScale  = (float)Screen.width / (float)frameWidth;
            float heightScale = (float)Screen.height / (float)frameHeight;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = ((float)frameWidth * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = (float)frameHeight / 2;
            }
            capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);

            gameObject.GetComponent <Renderer>().material.mainTexture = texture;

            if (fpsMonitor != null)
            {
                fpsMonitor.Add("dlib shape predictor", dlibShapePredictorFileName);
                fpsMonitor.Add("width", frameWidth.ToString());
                fpsMonitor.Add("height", frameHeight.ToString());
                fpsMonitor.Add("orientation", Screen.orientation.ToString());
            }
        }
Exemplo n.º 13
0
        private void Run()
        {
            if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
            {
                Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            }

            //set 3d face object points. (right-handed coordinates system)
            objectPoints68 = new MatOfPoint3f(
                new Point3(-34, 90, 83),  //l eye (Interpupillary breadth)
                new Point3(34, 90, 83),   //r eye (Interpupillary breadth)
                new Point3(0.0, 50, 117), //nose (Tip)
                new Point3(0.0, 32, 97),  //nose (Subnasale)
                new Point3(-79, 90, 10),  //l ear (Bitragion breadth)
                new Point3(79, 90, 10)    //r ear (Bitragion breadth)
                );

            objectPoints17 = new MatOfPoint3f(
                new Point3(-34, 90, 83),  //l eye (Interpupillary breadth)
                new Point3(34, 90, 83),   //r eye (Interpupillary breadth)
                new Point3(0.0, 50, 117), //nose (Tip)
                new Point3(0.0, 32, 97),  //nose (Subnasale)
                new Point3(-79, 90, 10),  //l ear (Bitragion breadth)
                new Point3(79, 90, 10)    //r ear (Bitragion breadth)
                );

            objectPoints6 = new MatOfPoint3f(
                new Point3(-34, 90, 83),  //l eye (Interpupillary breadth)
                new Point3(34, 90, 83),   //r eye (Interpupillary breadth)
                new Point3(0.0, 50, 117), //nose (Tip)
                new Point3(0.0, 32, 97)   //nose (Subnasale)
                );

            objectPoints5 = new MatOfPoint3f(
                new Point3(-23, 90, 83), //l eye (Inner corner of the eye)
                new Point3(23, 90, 83),  //r eye (Inner corner of the eye)
                new Point3(-50, 90, 80), //l eye (Tail of the eye)
                new Point3(50, 90, 80),  //r eye (Tail of the eye)
                new Point3(0.0, 32, 97)  //nose (Subnasale)
                );

            imagePoints = new MatOfPoint2f();

            faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath);

            rgbMat = new Mat();

            capture = new VideoCapture();
            capture.open(video_filepath);

            if (!capture.isOpened())
            {
                Debug.LogError("capture.isOpened() is false. Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            }


            Debug.Log("CAP_PROP_FORMAT: " + capture.get(Videoio.CAP_PROP_FORMAT));
            Debug.Log("CAP_PROP_POS_MSEC: " + capture.get(Videoio.CAP_PROP_POS_MSEC));
            Debug.Log("CAP_PROP_POS_FRAMES: " + capture.get(Videoio.CAP_PROP_POS_FRAMES));
            Debug.Log("CAP_PROP_POS_AVI_RATIO: " + capture.get(Videoio.CAP_PROP_POS_AVI_RATIO));
            Debug.Log("CAP_PROP_FRAME_COUNT: " + capture.get(Videoio.CAP_PROP_FRAME_COUNT));
            Debug.Log("CAP_PROP_FPS: " + capture.get(Videoio.CAP_PROP_FPS));
            Debug.Log("CAP_PROP_FRAME_WIDTH: " + capture.get(Videoio.CAP_PROP_FRAME_WIDTH));
            Debug.Log("CAP_PROP_FRAME_HEIGHT: " + capture.get(Videoio.CAP_PROP_FRAME_HEIGHT));

            capture.grab();
            capture.retrieve(rgbMat, 0);
            int frameWidth  = rgbMat.cols();
            int frameHeight = rgbMat.rows();

            texture = new Texture2D(frameWidth, frameHeight, TextureFormat.RGB24, false);
            gameObject.transform.localScale = new Vector3((float)frameWidth, (float)frameHeight, 1);
            capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);

            gameObject.GetComponent <Renderer>().material.mainTexture = texture;

            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            if (fpsMonitor != null)
            {
                fpsMonitor.Add("dlib shape predictor", dlibShapePredictorFileName);
                fpsMonitor.Add("width", frameWidth.ToString());
                fpsMonitor.Add("height", frameHeight.ToString());
                fpsMonitor.Add("orientation", Screen.orientation.ToString());
            }


            float width  = (float)frameWidth;
            float height = (float)frameHeight;

            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            //set cameraparam
            int    max_d = (int)Mathf.Max(width, height);
            double fx    = max_d;
            double fy    = max_d;
            double cx    = width / 2.0f;
            double cy    = height / 2.0f;

            camMatrix = new Mat(3, 3, CvType.CV_64FC1);
            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());


            distCoeffs = new MatOfDouble(0, 0, 0, 0);
            Debug.Log("distCoeffs " + distCoeffs.dump());

            // create AR camera P * V Matrix
            Matrix4x4 P = ARUtils.CalculateProjectionMatrixFromCameraMatrixValues((float)fx, (float)fy, (float)cx, (float)cy, width, height, 0.3f, 2000f);
            Matrix4x4 V = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));

            VP = P * V;

            //calibration camera
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx[0]);
            Debug.Log("fovy " + fovy[0]);
            Debug.Log("focalLength " + focalLength[0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio[0]);


            //To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            Debug.Log("fovXScale " + fovXScale);
            Debug.Log("fovYScale " + fovYScale);


            //Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
            if (widthScale < heightScale)
            {
                ARCamera.fieldOfView = (float)(fovx[0] * fovXScale);
            }
            else
            {
                ARCamera.fieldOfView = (float)(fovy[0] * fovYScale);
            }


            invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
            Debug.Log("invertYM " + invertYM.ToString());

            invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
            Debug.Log("invertZM " + invertZM.ToString());


            axes.SetActive(false);
            head.SetActive(false);
            rightEye.SetActive(false);
            leftEye.SetActive(false);
            mouth.SetActive(false);

            mouthParticleSystem = mouth.GetComponentsInChildren <ParticleSystem>(true);
        }