private void Init()
        {
            rgbMat = new Mat();

            if (capture.isOpened())
            {
                Debug.Log("capture.isOpened() true");

                Debug.Log("CAP_PROP_FORMAT: " + capture.get(Videoio.CAP_PROP_FORMAT));
                Debug.Log("CAP_PROP_POS_MSEC: " + capture.get(Videoio.CAP_PROP_POS_MSEC));
                Debug.Log("CAP_PROP_POS_FRAMES: " + capture.get(Videoio.CAP_PROP_POS_FRAMES));
                Debug.Log("CAP_PROP_POS_AVI_RATIO: " + capture.get(Videoio.CAP_PROP_POS_AVI_RATIO));
                Debug.Log("CAP_PROP_FRAME_COUNT: " + capture.get(Videoio.CAP_PROP_FRAME_COUNT));
                Debug.Log("CAP_PROP_FPS: " + capture.get(Videoio.CAP_PROP_FPS));
                Debug.Log("CAP_PROP_FRAME_WIDTH: " + capture.get(Videoio.CAP_PROP_FRAME_WIDTH));
                Debug.Log("CAP_PROP_FRAME_HEIGHT: " + capture.get(Videoio.CAP_PROP_FRAME_HEIGHT));

                capture.grab();
                capture.retrieve(rgbMat, 0);
                colors  = new Color32[rgbMat.cols() * rgbMat.rows()];
                texture = new Texture2D(rgbMat.cols(), rgbMat.rows(), TextureFormat.RGBA32, false);
                gameObject.transform.localScale = new Vector3(-((float)rgbMat.cols() / (float)rgbMat.rows()), -1, -1);
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);


                gameObject.GetComponent <Renderer>().material.mainTexture = texture;

                isPlaying = true;
            }
            else
            {
                Debug.Log("capture.isOpened() false");
            }
        }
예제 #2
0
        public override void UpdateValue()
        {
            if (capture == null)
            {
                return;
            }

            didUpdateResultMat = false;

            if (shouldUpdateVideoFrame)
            {
                shouldUpdateVideoFrame = false;

                //Loop play
                if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
                {
                    capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
                }

                if (capture.grab() && !imageOptimizationHelper.IsCurrentFrameSkipped())
                {
                    capture.retrieve(captureMat, 0);

                    Imgproc.cvtColor(captureMat, resultMat, Imgproc.COLOR_BGR2RGBA);
                    downScaleResultMat = imageOptimizationHelper.GetDownScaleMat(resultMat);

                    didUpdateResultMat = true;
                }
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (capture == null)
            {
                return;
            }

            //Loop play
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            //error PlayerLoop called recursively! on iOS.reccomend WebCamTexture.
            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                foreach (var rect in detectResult)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                    //draw landmark points
                    OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, points, new Scalar(0, 255, 0), 2);

                    //draw face rect
                    OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 0, 0), 2);
                }

                //Imgproc.putText (rgbMat, "W:" + rgbMat.width () + " H:" + rgbMat.height () + " SO:" + Screen.orientation, new Point (5, rgbMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(rgbMat, texture);
            }
        }
예제 #4
0
        void DoProcess()
        {
            if (!(owner.Value is OpenCVForUnityPlayMakerActions.VideoCapture))
            {
                LogError("owner is not initialized. Add Action \"newVideoCapture\".");
                return;
            }
            OpenCVForUnity.VideoioModule.VideoCapture wrapped_owner = OpenCVForUnityPlayMakerActionsUtils.GetWrappedObject <OpenCVForUnityPlayMakerActions.VideoCapture, OpenCVForUnity.VideoioModule.VideoCapture>(owner);

            storeResult.Value = (float)wrapped_owner.get(propId.Value);
        }
        private void Run()
        {
            if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
            {
                Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            }

            faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath);

            rgbMat = new Mat();

            capture = new VideoCapture();
            capture.open(video_filepath);

            if (!capture.isOpened())
            {
                Debug.LogError("capture.isOpened() is false. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            }


            Debug.Log("CAP_PROP_FORMAT: " + capture.get(Videoio.CAP_PROP_FORMAT));
            Debug.Log("CAP_PROP_POS_MSEC: " + capture.get(Videoio.CAP_PROP_POS_MSEC));
            Debug.Log("CAP_PROP_POS_FRAMES: " + capture.get(Videoio.CAP_PROP_POS_FRAMES));
            Debug.Log("CAP_PROP_POS_AVI_RATIO: " + capture.get(Videoio.CAP_PROP_POS_AVI_RATIO));
            Debug.Log("CAP_PROP_FRAME_COUNT: " + capture.get(Videoio.CAP_PROP_FRAME_COUNT));
            Debug.Log("CAP_PROP_FPS: " + capture.get(Videoio.CAP_PROP_FPS));
            Debug.Log("CAP_PROP_FRAME_WIDTH: " + capture.get(Videoio.CAP_PROP_FRAME_WIDTH));
            Debug.Log("CAP_PROP_FRAME_HEIGHT: " + capture.get(Videoio.CAP_PROP_FRAME_HEIGHT));
            double ext = capture.get(Videoio.CAP_PROP_FOURCC);

            Debug.Log("CAP_PROP_FOURCC: " + (char)((int)ext & 0XFF) + (char)(((int)ext & 0XFF00) >> 8) + (char)(((int)ext & 0XFF0000) >> 16) + (char)(((int)ext & 0XFF000000) >> 24));

            capture.grab();
            capture.retrieve(rgbMat, 0);
            int frameWidth  = rgbMat.cols();
            int frameHeight = rgbMat.rows();

            texture = new Texture2D(frameWidth, frameHeight, TextureFormat.RGB24, false);
            gameObject.transform.localScale = new Vector3((float)frameWidth, (float)frameHeight, 1);
            float widthScale  = (float)Screen.width / (float)frameWidth;
            float heightScale = (float)Screen.height / (float)frameHeight;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = ((float)frameWidth * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = (float)frameHeight / 2;
            }
            capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);

            gameObject.GetComponent <Renderer>().material.mainTexture = texture;

            if (fpsMonitor != null)
            {
                fpsMonitor.Add("dlib shape predictor", dlibShapePredictorFileName);
                fpsMonitor.Add("width", frameWidth.ToString());
                fpsMonitor.Add("height", frameHeight.ToString());
                fpsMonitor.Add("orientation", Screen.orientation.ToString());
            }
        }
예제 #6
0
        // Update is called once per frame
        void Update()
        {
            if (capture == null)
            {
                return;
            }

            //Loop play
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            //error PlayerLoop called recursively! on iOS.reccomend WebCamTexture.
            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                UnityEngine.Rect rect   = new UnityEngine.Rect();
                List <Vector2>   points = null;
                if (detectResult.Count > 0)
                {
                    rect = detectResult [0];

                    //detect landmark points
                    points = faceLandmarkDetector.DetectLandmark(rect);

                    skippedFrames = 0;
                }
                else
                {
                    skippedFrames++;
                    if (skippedFrames == maximumAllowedSkippedFrames)
                    {
                        if (drawLowPassFilter)
                        {
                            lowPassFilter.Reset();
                        }
                        if (drawKalmanFilter)
                        {
                            kalmanFilter.Reset();
                        }
                        if (drawOpticalFlowFilter)
                        {
                            opticalFlowFilter.Reset();
                        }
                        if (drawOFAndLPFilter)
                        {
                            opticalFlowFilter.Reset();
                        }
                        lowPassFilter.Reset();
                    }
                }

                if (drawLowPassFilter)
                {
                    lowPassFilter.Process(rgbMat, points, lowPassFilteredPoints, isDebugMode);
                }
                if (drawKalmanFilter)
                {
                    kalmanFilter.Process(rgbMat, points, kalmanFilteredPoints, isDebugMode);
                }
                if (drawOpticalFlowFilter)
                {
                    opticalFlowFilter.Process(rgbMat, points, opticalFlowFilteredPoints, isDebugMode);
                }
                if (drawOFAndLPFilter)
                {
                    opticalFlowFilter.Process(rgbMat, points, points, false);
                    lowPassFilter.Process(rgbMat, points, ofAndLPFilteredPoints, isDebugMode);
                }


                if (points != null && !isDebugMode)
                {
                    // draw raw landmark points.
                    OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, points, new Scalar(0, 255, 0), 2);
                }

                // draw face rect.
                //OpenCVForUnityUtils.DrawFaceRect (rgbMat, rect, new Scalar (255, 0, 0), 2);

                // draw filtered lam points.
                if (points != null && !isDebugMode)
                {
                    if (drawLowPassFilter)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, lowPassFilteredPoints, new Scalar(0, 255, 255), 2);
                    }
                    if (drawKalmanFilter)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, kalmanFilteredPoints, new Scalar(0, 0, 255), 2);
                    }
                    if (drawOpticalFlowFilter)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, opticalFlowFilteredPoints, new Scalar(255, 0, 0), 2);
                    }
                    if (drawOFAndLPFilter)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, ofAndLPFilteredPoints, new Scalar(255, 0, 255), 2);
                    }
                }

                //Imgproc.putText (rgbMat, "W:" + rgbMat.width () + " H:" + rgbMat.height () + " SO:" + Screen.orientation, new Point (5, rgbMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(rgbMat, texture);
            }
        }
예제 #7
0
        // Update is called once per frame
        void Update()
        {
            if (capture == null)
            {
                return;
            }

            //Loop play
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                if (detectResult.Count > 0)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult[0]);

                    if (displayFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, points, new Scalar(0, 255, 0), 2);
                    }

                    MatOfPoint3f objectPoints   = null;
                    bool         isRightEyeOpen = false;
                    bool         isLeftEyeOpen  = false;
                    bool         isMouthOpen    = false;
                    if (points.Count == 68)
                    {
                        objectPoints = objectPoints68;

                        imagePoints.fromArray(
                            new Point((points[38].x + points[41].x) / 2, (points[38].y + points[41].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points[43].x + points[46].x) / 2, (points[43].y + points[46].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points[30].x, points[30].y),                                           //nose (Tip)
                            new Point(points[33].x, points[33].y),                                           //nose (Subnasale)
                            new Point(points[0].x, points[0].y),                                             //l ear (Bitragion breadth)
                            new Point(points[16].x, points[16].y)                                            //r ear (Bitragion breadth)
                            );

                        if (Mathf.Abs((float)(points[43].y - points[46].y)) > Mathf.Abs((float)(points[42].x - points[45].x)) / 5.0)
                        {
                            isRightEyeOpen = true;
                        }

                        if (Mathf.Abs((float)(points[38].y - points[41].y)) > Mathf.Abs((float)(points[39].x - points[36].x)) / 5.0)
                        {
                            isLeftEyeOpen = true;
                        }

                        float noseDistance  = Mathf.Abs((float)(points[27].y - points[33].y));
                        float mouseDistance = Mathf.Abs((float)(points[62].y - points[66].y));
                        if (mouseDistance > noseDistance / 5.0)
                        {
                            isMouthOpen = true;
                        }
                        else
                        {
                            isMouthOpen = false;
                        }
                    }
                    else if (points.Count == 17)
                    {
                        objectPoints = objectPoints17;

                        imagePoints.fromArray(
                            new Point((points[2].x + points[3].x) / 2, (points[2].y + points[3].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points[4].x + points[5].x) / 2, (points[4].y + points[5].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points[0].x, points[0].y),                                         //nose (Tip)
                            new Point(points[1].x, points[1].y),                                         //nose (Subnasale)
                            new Point(points[6].x, points[6].y),                                         //l ear (Bitragion breadth)
                            new Point(points[8].x, points[8].y)                                          //r ear (Bitragion breadth)
                            );

                        if (Mathf.Abs((float)(points[11].y - points[12].y)) > Mathf.Abs((float)(points[4].x - points[5].x)) / 5.0)
                        {
                            isRightEyeOpen = true;
                        }

                        if (Mathf.Abs((float)(points[9].y - points[10].y)) > Mathf.Abs((float)(points[2].x - points[3].x)) / 5.0)
                        {
                            isLeftEyeOpen = true;
                        }

                        float noseDistance  = Mathf.Abs((float)(points[3].y - points[1].y));
                        float mouseDistance = Mathf.Abs((float)(points[14].y - points[16].y));
                        if (mouseDistance > noseDistance / 2.0)
                        {
                            isMouthOpen = true;
                        }
                        else
                        {
                            isMouthOpen = false;
                        }
                    }
                    else if (points.Count == 6)
                    {
                        objectPoints = objectPoints6;

                        imagePoints.fromArray(
                            new Point((points[2].x + points[3].x) / 2, (points[2].y + points[3].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points[4].x + points[5].x) / 2, (points[4].y + points[5].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points[0].x, points[0].y),                                         //nose (Tip)
                            new Point(points[1].x, points[1].y)                                          //nose (Subnasale)
                            );
                    }
                    else if (points.Count == 5)
                    {
                        objectPoints = objectPoints5;

                        imagePoints.fromArray(
                            new Point(points[3].x, points[3].y), //l eye (Inner corner of the eye)
                            new Point(points[1].x, points[1].y), //r eye (Inner corner of the eye)
                            new Point(points[2].x, points[2].y), //l eye (Tail of the eye)
                            new Point(points[0].x, points[0].y), //r eye (Tail of the eye)
                            new Point(points[4].x, points[4].y)  //nose (Nose top)
                            );

                        if (fpsMonitor != null)
                        {
                            fpsMonitor.consoleText = "This example supports mainly the face landmark points of 68 points.";
                        }
                    }

                    // estimate head pose
                    if (rvec == null || tvec == null)
                    {
                        rvec = new Mat(3, 1, CvType.CV_64FC1);
                        tvec = new Mat(3, 1, CvType.CV_64FC1);
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }


                    double tvec_x = tvec.get(0, 0)[0], tvec_y = tvec.get(1, 0)[0], tvec_z = tvec.get(2, 0)[0];

                    bool    isNotInViewport = false;
                    Vector4 pos             = VP * new Vector4((float)tvec_x, (float)tvec_y, (float)tvec_z, 1.0f);
                    if (pos.w != 0)
                    {
                        float x = pos.x / pos.w, y = pos.y / pos.w, z = pos.z / pos.w;
                        if (x < -1.0f || x > 1.0f || y < -1.0f || y > 1.0f || z < -1.0f || z > 1.0f)
                        {
                            isNotInViewport = true;
                        }
                    }

                    if (double.IsNaN(tvec_z) || isNotInViewport)
                    { // if tvec is wrong data, do not use extrinsic guesses. (the estimated object is not in the camera field of view)
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }
                    else
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                    }

                    //Debug.Log (tvec.dump());

                    if (!isNotInViewport)
                    {
                        // Display effects.
                        if (displayHead)
                        {
                            head.SetActive(true);
                        }
                        if (displayAxes)
                        {
                            axes.SetActive(true);
                        }

                        if (displayEffects)
                        {
                            rightEye.SetActive(isRightEyeOpen);
                            leftEye.SetActive(isLeftEyeOpen);

                            if (isMouthOpen)
                            {
                                mouth.SetActive(true);
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled = true;
#if UNITY_5_5_OR_NEWER
                                    var main = ps.main;
                                    main.startSizeMultiplier = 20;
#else
                                    ps.startSize = 20;
#endif
                                }
                            }
                            else
                            {
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled = false;
                                }
                            }
                        }

                        // Convert to unity pose data.
                        double[] rvecArr = new double[3];
                        rvec.get(0, 0, rvecArr);
                        double[] tvecArr = new double[3];
                        tvec.get(0, 0, tvecArr);
                        PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                        // Changes in pos/rot below these thresholds are ignored.
                        if (enableLowPassFilter)
                        {
                            ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                        }
                        oldPoseData = poseData;

                        // Create transform matrix.
                        transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);
                    }


                    // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                    // https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
                    ARM = invertYM * transformationM * invertYM;

                    // Apply Y-axis and Z-axis refletion matrix. (Adjust the posture of the AR object)
                    ARM = ARM * invertYM * invertZM;

                    if (shouldMoveARCamera)
                    {
                        ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
                        ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                    }
                    else
                    {
                        ARM = ARCamera.transform.localToWorldMatrix * ARM;
                        ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                    }
                }
            }
            else
            {
                rightEye.SetActive(false);
                leftEye.SetActive(false);
                head.SetActive(false);
                mouth.SetActive(false);
                axes.SetActive(false);
            }

            //Imgproc.putText (rgbMat, "W:" + rgbMat.width () + " H:" + rgbMat.height () + " SO:" + Screen.orientation, new Point (5, rgbMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255), 1, Imgproc.LINE_AA, false);

            OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(rgbMat, texture);
        }
예제 #8
0
        private void Run()
        {
            if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
            {
                Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            }

            //set 3d face object points. (right-handed coordinates system)
            objectPoints68 = new MatOfPoint3f(
                new Point3(-34, 90, 83),  //l eye (Interpupillary breadth)
                new Point3(34, 90, 83),   //r eye (Interpupillary breadth)
                new Point3(0.0, 50, 117), //nose (Tip)
                new Point3(0.0, 32, 97),  //nose (Subnasale)
                new Point3(-79, 90, 10),  //l ear (Bitragion breadth)
                new Point3(79, 90, 10)    //r ear (Bitragion breadth)
                );

            objectPoints17 = new MatOfPoint3f(
                new Point3(-34, 90, 83),  //l eye (Interpupillary breadth)
                new Point3(34, 90, 83),   //r eye (Interpupillary breadth)
                new Point3(0.0, 50, 117), //nose (Tip)
                new Point3(0.0, 32, 97),  //nose (Subnasale)
                new Point3(-79, 90, 10),  //l ear (Bitragion breadth)
                new Point3(79, 90, 10)    //r ear (Bitragion breadth)
                );

            objectPoints6 = new MatOfPoint3f(
                new Point3(-34, 90, 83),  //l eye (Interpupillary breadth)
                new Point3(34, 90, 83),   //r eye (Interpupillary breadth)
                new Point3(0.0, 50, 117), //nose (Tip)
                new Point3(0.0, 32, 97)   //nose (Subnasale)
                );

            objectPoints5 = new MatOfPoint3f(
                new Point3(-23, 90, 83), //l eye (Inner corner of the eye)
                new Point3(23, 90, 83),  //r eye (Inner corner of the eye)
                new Point3(-50, 90, 80), //l eye (Tail of the eye)
                new Point3(50, 90, 80),  //r eye (Tail of the eye)
                new Point3(0.0, 32, 97)  //nose (Subnasale)
                );

            imagePoints = new MatOfPoint2f();

            faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath);

            rgbMat = new Mat();

            capture = new VideoCapture();
            capture.open(video_filepath);

            if (!capture.isOpened())
            {
                Debug.LogError("capture.isOpened() is false. Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            }


            Debug.Log("CAP_PROP_FORMAT: " + capture.get(Videoio.CAP_PROP_FORMAT));
            Debug.Log("CAP_PROP_POS_MSEC: " + capture.get(Videoio.CAP_PROP_POS_MSEC));
            Debug.Log("CAP_PROP_POS_FRAMES: " + capture.get(Videoio.CAP_PROP_POS_FRAMES));
            Debug.Log("CAP_PROP_POS_AVI_RATIO: " + capture.get(Videoio.CAP_PROP_POS_AVI_RATIO));
            Debug.Log("CAP_PROP_FRAME_COUNT: " + capture.get(Videoio.CAP_PROP_FRAME_COUNT));
            Debug.Log("CAP_PROP_FPS: " + capture.get(Videoio.CAP_PROP_FPS));
            Debug.Log("CAP_PROP_FRAME_WIDTH: " + capture.get(Videoio.CAP_PROP_FRAME_WIDTH));
            Debug.Log("CAP_PROP_FRAME_HEIGHT: " + capture.get(Videoio.CAP_PROP_FRAME_HEIGHT));

            capture.grab();
            capture.retrieve(rgbMat, 0);
            int frameWidth  = rgbMat.cols();
            int frameHeight = rgbMat.rows();

            texture = new Texture2D(frameWidth, frameHeight, TextureFormat.RGB24, false);
            gameObject.transform.localScale = new Vector3((float)frameWidth, (float)frameHeight, 1);
            capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);

            gameObject.GetComponent <Renderer>().material.mainTexture = texture;

            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            if (fpsMonitor != null)
            {
                fpsMonitor.Add("dlib shape predictor", dlibShapePredictorFileName);
                fpsMonitor.Add("width", frameWidth.ToString());
                fpsMonitor.Add("height", frameHeight.ToString());
                fpsMonitor.Add("orientation", Screen.orientation.ToString());
            }


            float width  = (float)frameWidth;
            float height = (float)frameHeight;

            float imageSizeScale = 1.0f;
            float widthScale     = (float)Screen.width / width;
            float heightScale    = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                imageSizeScale = (float)Screen.height / (float)Screen.width;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            //set cameraparam
            int    max_d = (int)Mathf.Max(width, height);
            double fx    = max_d;
            double fy    = max_d;
            double cx    = width / 2.0f;
            double cy    = height / 2.0f;

            camMatrix = new Mat(3, 3, CvType.CV_64FC1);
            camMatrix.put(0, 0, fx);
            camMatrix.put(0, 1, 0);
            camMatrix.put(0, 2, cx);
            camMatrix.put(1, 0, 0);
            camMatrix.put(1, 1, fy);
            camMatrix.put(1, 2, cy);
            camMatrix.put(2, 0, 0);
            camMatrix.put(2, 1, 0);
            camMatrix.put(2, 2, 1.0f);
            Debug.Log("camMatrix " + camMatrix.dump());


            distCoeffs = new MatOfDouble(0, 0, 0, 0);
            Debug.Log("distCoeffs " + distCoeffs.dump());

            // create AR camera P * V Matrix
            Matrix4x4 P = ARUtils.CalculateProjectionMatrixFromCameraMatrixValues((float)fx, (float)fy, (float)cx, (float)cy, width, height, 0.3f, 2000f);
            Matrix4x4 V = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));

            VP = P * V;

            //calibration camera
            Size   imageSize      = new Size(width * imageSizeScale, height * imageSizeScale);
            double apertureWidth  = 0;
            double apertureHeight = 0;

            double[] fovx           = new double[1];
            double[] fovy           = new double[1];
            double[] focalLength    = new double[1];
            Point    principalPoint = new Point(0, 0);

            double[] aspectratio = new double[1];

            Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

            Debug.Log("imageSize " + imageSize.ToString());
            Debug.Log("apertureWidth " + apertureWidth);
            Debug.Log("apertureHeight " + apertureHeight);
            Debug.Log("fovx " + fovx[0]);
            Debug.Log("fovy " + fovy[0]);
            Debug.Log("focalLength " + focalLength[0]);
            Debug.Log("principalPoint " + principalPoint.ToString());
            Debug.Log("aspectratio " + aspectratio[0]);


            //To convert the difference of the FOV value of the OpenCV and Unity.
            double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
            double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));

            Debug.Log("fovXScale " + fovXScale);
            Debug.Log("fovYScale " + fovYScale);


            //Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
            if (widthScale < heightScale)
            {
                ARCamera.fieldOfView = (float)(fovx[0] * fovXScale);
            }
            else
            {
                ARCamera.fieldOfView = (float)(fovy[0] * fovYScale);
            }


            invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
            Debug.Log("invertYM " + invertYM.ToString());

            invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
            Debug.Log("invertZM " + invertZM.ToString());


            axes.SetActive(false);
            head.SetActive(false);
            rightEye.SetActive(false);
            leftEye.SetActive(false);
            mouth.SetActive(false);

            mouthParticleSystem = mouth.GetComponentsInChildren <ParticleSystem>(true);
        }
예제 #9
0
        private void Run()
        {
            meshOverlay = this.GetComponent <TrackedMeshOverlay> ();

            shader_FadeID            = Shader.PropertyToID("_Fade");
            shader_ColorCorrectionID = Shader.PropertyToID("_ColorCorrection");
            shader_LUTTexID          = Shader.PropertyToID("_LUTTex");

            rectangleTracker = new RectangleTracker();

            frontalFaceChecker = new FrontalFaceChecker((float)frameWidth, (float)frameHeight);

            faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath);

            lowPassFilterDict     = new Dictionary <int, LowPassPointsFilter> ();
            opticalFlowFilterDict = new Dictionary <int, OFPointsFilter> ();

            faceMaskColorCorrector = new FaceMaskColorCorrector();

            rgbMat = new Mat();

            capture.open(couple_avi_filepath);

            if (capture.isOpened())
            {
                Debug.Log("capture.isOpened() true");
            }
            else
            {
                Debug.Log("capture.isOpened() false");
            }


            Debug.Log("CAP_PROP_FORMAT: " + capture.get(Videoio.CAP_PROP_FORMAT));
            Debug.Log("CAP_PROP_POS_MSEC: " + capture.get(Videoio.CAP_PROP_POS_MSEC));
            Debug.Log("CAP_PROP_POS_FRAMES: " + capture.get(Videoio.CAP_PROP_POS_FRAMES));
            Debug.Log("CAP_PROP_POS_AVI_RATIO: " + capture.get(Videoio.CAP_PROP_POS_AVI_RATIO));
            Debug.Log("CAP_PROP_FRAME_COUNT: " + capture.get(Videoio.CAP_PROP_FRAME_COUNT));
            Debug.Log("CAP_PROP_FPS: " + capture.get(Videoio.CAP_PROP_FPS));
            Debug.Log("CAP_PROP_FRAME_WIDTH: " + capture.get(Videoio.CAP_PROP_FRAME_WIDTH));
            Debug.Log("CAP_PROP_FRAME_HEIGHT: " + capture.get(Videoio.CAP_PROP_FRAME_HEIGHT));


            texture = new Texture2D((int)(frameWidth), (int)(frameHeight), TextureFormat.RGB24, false);
            gameObject.transform.localScale = new Vector3((float)frameWidth, (float)frameHeight, 1);
            float widthScale  = (float)Screen.width / (float)frameWidth;
            float heightScale = (float)Screen.height / (float)frameHeight;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = ((float)frameWidth * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = (float)frameHeight / 2;
            }

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

            meshOverlay.UpdateOverlayTransform(gameObject.transform);


            grayMat = new Mat((int)frameHeight, (int)frameWidth, CvType.CV_8UC1);
            cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath);
//            if (cascade.empty ()) {
//                Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
//            }

            displayFaceRectsToggle.isOn       = displayFaceRects;
            useDlibFaceDetecterToggle.isOn    = useDlibFaceDetecter;
            enableNoiseFilterToggle.isOn      = enableNoiseFilter;
            enableColorCorrectionToggle.isOn  = enableColorCorrection;
            filterNonFrontalFacesToggle.isOn  = filterNonFrontalFaces;
            displayDebugFacePointsToggle.isOn = displayDebugFacePoints;

            OnChangeFaceMaskButtonClick();
        }