Example #1
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                // detect faces.
                List <Rect> detectResult = new List <Rect>();
                if (useDlibFaceDetecter)
                {
                    OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                    List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                    foreach (var unityRect in result)
                    {
                        detectResult.Add(new Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                    }
                }
                else
                {
                    // convert image to greyscale.
                    Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect())
                        {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            detectResult = faces.toList();
                        }

                    // corrects the deviation of a detection result between OpenCV and Dlib.
                    foreach (Rect r in detectResult)
                    {
                        r.y += (int)(r.height * 0.1f);
                    }
                }


                // face tracking.
                rectangleTracker.UpdateTrackedObjects(detectResult);
                List <TrackedRect> trackedRects = new List <TrackedRect>();
                rectangleTracker.GetObjects(trackedRects, true);

                // create noise filter.
                foreach (var openCVRect in trackedRects)
                {
                    if (openCVRect.state == TrackedState.NEW)
                    {
                        if (!lowPassFilterDict.ContainsKey(openCVRect.id))
                        {
                            lowPassFilterDict.Add(openCVRect.id, new LowPassPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()));
                        }
                        if (!opticalFlowFilterDict.ContainsKey(openCVRect.id))
                        {
                            opticalFlowFilterDict.Add(openCVRect.id, new OFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()));
                        }
                    }
                    else if (openCVRect.state == TrackedState.DELETED)
                    {
                        if (lowPassFilterDict.ContainsKey(openCVRect.id))
                        {
                            lowPassFilterDict[openCVRect.id].Dispose();
                            lowPassFilterDict.Remove(openCVRect.id);
                        }
                        if (opticalFlowFilterDict.ContainsKey(openCVRect.id))
                        {
                            opticalFlowFilterDict[openCVRect.id].Dispose();
                            opticalFlowFilterDict.Remove(openCVRect.id);
                        }
                    }
                }

                // create LUT texture.
                foreach (var openCVRect in trackedRects)
                {
                    if (openCVRect.state == TrackedState.NEW)
                    {
                        faceMaskColorCorrector.CreateLUTTex(openCVRect.id);
                    }
                    else if (openCVRect.state == TrackedState.DELETED)
                    {
                        faceMaskColorCorrector.DeleteLUTTex(openCVRect.id);
                    }
                }

                // detect face landmark points.
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                List <List <Vector2> > landmarkPoints = new List <List <Vector2> >();
                for (int i = 0; i < trackedRects.Count; i++)
                {
                    TrackedRect      tr   = trackedRects[i];
                    UnityEngine.Rect rect = new UnityEngine.Rect(tr.x, tr.y, tr.width, tr.height);

                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                    // apply noise filter.
                    if (enableNoiseFilter)
                    {
                        if (tr.state > TrackedState.NEW && tr.state < TrackedState.DELETED)
                        {
                            opticalFlowFilterDict[tr.id].Process(rgbaMat, points, points);
                            lowPassFilterDict[tr.id].Process(rgbaMat, points, points);
                        }
                    }

                    landmarkPoints.Add(points);
                }

                // face masking.
                if (faceMaskTexture != null && landmarkPoints.Count >= 1)
                { // Apply face masking between detected faces and a face mask image.
                    float maskImageWidth  = faceMaskTexture.width;
                    float maskImageHeight = faceMaskTexture.height;

                    TrackedRect tr;

                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        tr = trackedRects[i];

                        if (tr.state == TrackedState.NEW)
                        {
                            meshOverlay.CreateObject(tr.id, faceMaskTexture);
                        }
                        if (tr.state < TrackedState.DELETED)
                        {
                            MaskFace(meshOverlay, tr, landmarkPoints[i], faceLandmarkPointsInMask, maskImageWidth, maskImageHeight);

                            if (enableColorCorrection)
                            {
                                CorrectFaceMaskColor(tr.id, faceMaskMat, rgbaMat, faceLandmarkPointsInMask, landmarkPoints[i]);
                            }
                        }
                        else if (tr.state == TrackedState.DELETED)
                        {
                            meshOverlay.DeleteObject(tr.id);
                        }
                    }
                }
                else if (landmarkPoints.Count >= 1)
                { // Apply face masking between detected faces.
                    float maskImageWidth  = texture.width;
                    float maskImageHeight = texture.height;

                    TrackedRect tr;

                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        tr = trackedRects[i];

                        if (tr.state == TrackedState.NEW)
                        {
                            meshOverlay.CreateObject(tr.id, texture);
                        }
                        if (tr.state < TrackedState.DELETED)
                        {
                            MaskFace(meshOverlay, tr, landmarkPoints[i], landmarkPoints[0], maskImageWidth, maskImageHeight);

                            if (enableColorCorrection)
                            {
                                CorrectFaceMaskColor(tr.id, rgbaMat, rgbaMat, landmarkPoints[0], landmarkPoints[i]);
                            }
                        }
                        else if (tr.state == TrackedState.DELETED)
                        {
                            meshOverlay.DeleteObject(tr.id);
                        }
                    }
                }

                // draw face rects.
                if (displayFaceRects)
                {
                    for (int i = 0; i < detectResult.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(detectResult[i].x, detectResult[i].y, detectResult[i].width, detectResult[i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2);
                    }

                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(trackedRects[i].x, trackedRects[i].y, trackedRects[i].width, trackedRects[i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 255, 0, 255), 2);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceAngles (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    }
                }

                // draw face points.
                if (displayDebugFacePoints)
                {
                    for (int i = 0; i < landmarkPoints.Count; i++)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, landmarkPoints[i], new Scalar(0, 255, 0, 255), 2);
                    }
                }


                // display face mask image.
                if (faceMaskTexture != null && faceMaskMat != null)
                {
                    if (displayFaceRects)
                    {
                        OpenCVForUnityUtils.DrawFaceRect(faceMaskMat, faceRectInMask, new Scalar(255, 0, 0, 255), 2);
                    }
                    if (displayDebugFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(faceMaskMat, faceLandmarkPointsInMask, new Scalar(0, 255, 0, 255), 2);
                    }

                    float scale = (rgbaMat.width() / 4f) / faceMaskMat.width();
                    float tx    = rgbaMat.width() - faceMaskMat.width() * scale;
                    float ty    = 0.0f;
                    Mat   trans = new Mat(2, 3, CvType.CV_32F);//1.0, 0.0, tx, 0.0, 1.0, ty);
                    trans.put(0, 0, scale);
                    trans.put(0, 1, 0.0f);
                    trans.put(0, 2, tx);
                    trans.put(1, 0, 0.0f);
                    trans.put(1, 1, scale);
                    trans.put(1, 2, ty);

                    Imgproc.warpAffine(faceMaskMat, rgbaMat, trans, rgbaMat.size(), Imgproc.INTER_LINEAR, Core.BORDER_TRANSPARENT, new Scalar(0));

                    if (displayFaceRects || displayDebugFacePointsToggle)
                    {
                        OpenCVForUnity.UnityUtils.Utils.texture2DToMat(faceMaskTexture, faceMaskMat);
                    }
                }

                //Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(rgbaMat, texture);
            }
        }
Example #2
0
        // Update is called once per frame
        void Update()
        {
            if (capture == null)
            {
                return;
            }

            //Loop play
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            //error PlayerLoop called recursively! on iOS.reccomend WebCamTexture.
            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                UnityEngine.Rect rect   = new UnityEngine.Rect();
                List <Vector2>   points = null;
                if (detectResult.Count > 0)
                {
                    rect = detectResult[0];

                    //detect landmark points
                    points = faceLandmarkDetector.DetectLandmark(rect);

                    skippedFrames = 0;
                }
                else
                {
                    skippedFrames++;
                    if (skippedFrames == maximumAllowedSkippedFrames)
                    {
                        if (drawLowPassFilter)
                        {
                            lowPassFilter.Reset();
                        }
                        if (drawKalmanFilter)
                        {
                            kalmanFilter.Reset();
                        }
                        if (drawOpticalFlowFilter)
                        {
                            opticalFlowFilter.Reset();
                        }
                        if (drawOFAndLPFilter)
                        {
                            opticalFlowFilter.Reset();
                        }
                        lowPassFilter.Reset();
                    }
                }

                if (drawLowPassFilter)
                {
                    lowPassFilter.Process(rgbMat, points, lowPassFilteredPoints, isDebugMode);
                }
                if (drawKalmanFilter)
                {
                    kalmanFilter.Process(rgbMat, points, kalmanFilteredPoints, isDebugMode);
                }
                if (drawOpticalFlowFilter)
                {
                    opticalFlowFilter.Process(rgbMat, points, opticalFlowFilteredPoints, isDebugMode);
                }
                if (drawOFAndLPFilter)
                {
                    opticalFlowFilter.Process(rgbMat, points, points, false);
                    lowPassFilter.Process(rgbMat, points, ofAndLPFilteredPoints, isDebugMode);
                }


                if (points != null && !isDebugMode)
                {
                    // draw raw landmark points.
                    OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, points, new Scalar(0, 255, 0), 2);
                }

                // draw face rect.
                //OpenCVForUnityUtils.DrawFaceRect (rgbMat, rect, new Scalar (255, 0, 0), 2);

                // draw filtered lam points.
                if (points != null && !isDebugMode)
                {
                    if (drawLowPassFilter)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, lowPassFilteredPoints, new Scalar(0, 255, 255), 2);
                    }
                    if (drawKalmanFilter)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, kalmanFilteredPoints, new Scalar(0, 0, 255), 2);
                    }
                    if (drawOpticalFlowFilter)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, opticalFlowFilteredPoints, new Scalar(255, 0, 0), 2);
                    }
                    if (drawOFAndLPFilter)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, ofAndLPFilteredPoints, new Scalar(255, 0, 255), 2);
                    }
                }

                //Imgproc.putText (rgbMat, "W:" + rgbMat.width () + " H:" + rgbMat.height () + " SO:" + Screen.orientation, new Point (5, rgbMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(rgbMat, texture);
            }
        }
Example #3
0
        // Update is called once per frame
        void Update()
        {
            if (capture == null)
            {
                return;
            }

            //Loop play
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                if (detectResult.Count > 0)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult[0]);

                    if (displayFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, points, new Scalar(0, 255, 0), 2);
                    }

                    MatOfPoint3f objectPoints   = null;
                    bool         isRightEyeOpen = false;
                    bool         isLeftEyeOpen  = false;
                    bool         isMouthOpen    = false;
                    if (points.Count == 68)
                    {
                        objectPoints = objectPoints68;

                        imagePoints.fromArray(
                            new Point((points[38].x + points[41].x) / 2, (points[38].y + points[41].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points[43].x + points[46].x) / 2, (points[43].y + points[46].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points[30].x, points[30].y),                                           //nose (Tip)
                            new Point(points[33].x, points[33].y),                                           //nose (Subnasale)
                            new Point(points[0].x, points[0].y),                                             //l ear (Bitragion breadth)
                            new Point(points[16].x, points[16].y)                                            //r ear (Bitragion breadth)
                            );

                        if (Mathf.Abs((float)(points[43].y - points[46].y)) > Mathf.Abs((float)(points[42].x - points[45].x)) / 5.0)
                        {
                            isRightEyeOpen = true;
                        }

                        if (Mathf.Abs((float)(points[38].y - points[41].y)) > Mathf.Abs((float)(points[39].x - points[36].x)) / 5.0)
                        {
                            isLeftEyeOpen = true;
                        }

                        float noseDistance  = Mathf.Abs((float)(points[27].y - points[33].y));
                        float mouseDistance = Mathf.Abs((float)(points[62].y - points[66].y));
                        if (mouseDistance > noseDistance / 5.0)
                        {
                            isMouthOpen = true;
                        }
                        else
                        {
                            isMouthOpen = false;
                        }
                    }
                    else if (points.Count == 17)
                    {
                        objectPoints = objectPoints17;

                        imagePoints.fromArray(
                            new Point((points[2].x + points[3].x) / 2, (points[2].y + points[3].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points[4].x + points[5].x) / 2, (points[4].y + points[5].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points[0].x, points[0].y),                                         //nose (Tip)
                            new Point(points[1].x, points[1].y),                                         //nose (Subnasale)
                            new Point(points[6].x, points[6].y),                                         //l ear (Bitragion breadth)
                            new Point(points[8].x, points[8].y)                                          //r ear (Bitragion breadth)
                            );

                        if (Mathf.Abs((float)(points[11].y - points[12].y)) > Mathf.Abs((float)(points[4].x - points[5].x)) / 5.0)
                        {
                            isRightEyeOpen = true;
                        }

                        if (Mathf.Abs((float)(points[9].y - points[10].y)) > Mathf.Abs((float)(points[2].x - points[3].x)) / 5.0)
                        {
                            isLeftEyeOpen = true;
                        }

                        float noseDistance  = Mathf.Abs((float)(points[3].y - points[1].y));
                        float mouseDistance = Mathf.Abs((float)(points[14].y - points[16].y));
                        if (mouseDistance > noseDistance / 2.0)
                        {
                            isMouthOpen = true;
                        }
                        else
                        {
                            isMouthOpen = false;
                        }
                    }
                    else if (points.Count == 6)
                    {
                        objectPoints = objectPoints6;

                        imagePoints.fromArray(
                            new Point((points[2].x + points[3].x) / 2, (points[2].y + points[3].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points[4].x + points[5].x) / 2, (points[4].y + points[5].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points[0].x, points[0].y),                                         //nose (Tip)
                            new Point(points[1].x, points[1].y)                                          //nose (Subnasale)
                            );
                    }
                    else if (points.Count == 5)
                    {
                        objectPoints = objectPoints5;

                        imagePoints.fromArray(
                            new Point(points[3].x, points[3].y), //l eye (Inner corner of the eye)
                            new Point(points[1].x, points[1].y), //r eye (Inner corner of the eye)
                            new Point(points[2].x, points[2].y), //l eye (Tail of the eye)
                            new Point(points[0].x, points[0].y), //r eye (Tail of the eye)
                            new Point(points[4].x, points[4].y)  //nose (Nose top)
                            );

                        if (fpsMonitor != null)
                        {
                            fpsMonitor.consoleText = "This example supports mainly the face landmark points of 68 points.";
                        }
                    }

                    // estimate head pose
                    if (rvec == null || tvec == null)
                    {
                        rvec = new Mat(3, 1, CvType.CV_64FC1);
                        tvec = new Mat(3, 1, CvType.CV_64FC1);
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }


                    double tvec_x = tvec.get(0, 0)[0], tvec_y = tvec.get(1, 0)[0], tvec_z = tvec.get(2, 0)[0];

                    bool    isNotInViewport = false;
                    Vector4 pos             = VP * new Vector4((float)tvec_x, (float)tvec_y, (float)tvec_z, 1.0f);
                    if (pos.w != 0)
                    {
                        float x = pos.x / pos.w, y = pos.y / pos.w, z = pos.z / pos.w;
                        if (x < -1.0f || x > 1.0f || y < -1.0f || y > 1.0f || z < -1.0f || z > 1.0f)
                        {
                            isNotInViewport = true;
                        }
                    }

                    if (double.IsNaN(tvec_z) || isNotInViewport)
                    { // if tvec is wrong data, do not use extrinsic guesses. (the estimated object is not in the camera field of view)
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }
                    else
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                    }

                    //Debug.Log (tvec.dump());

                    if (!isNotInViewport)
                    {
                        // Display effects.
                        if (displayHead)
                        {
                            head.SetActive(true);
                        }
                        if (displayAxes)
                        {
                            axes.SetActive(true);
                        }

                        if (displayEffects)
                        {
                            rightEye.SetActive(isRightEyeOpen);
                            leftEye.SetActive(isLeftEyeOpen);

                            if (isMouthOpen)
                            {
                                mouth.SetActive(true);
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled = true;
#if UNITY_5_5_OR_NEWER
                                    var main = ps.main;
                                    main.startSizeMultiplier = 20;
#else
                                    ps.startSize = 20;
#endif
                                }
                            }
                            else
                            {
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled = false;
                                }
                            }
                        }

                        // Convert to unity pose data.
                        double[] rvecArr = new double[3];
                        rvec.get(0, 0, rvecArr);
                        double[] tvecArr = new double[3];
                        tvec.get(0, 0, tvecArr);
                        PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                        // Changes in pos/rot below these thresholds are ignored.
                        if (enableLowPassFilter)
                        {
                            ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                        }
                        oldPoseData = poseData;

                        // Create transform matrix.
                        transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);
                    }


                    // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                    // https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
                    ARM = invertYM * transformationM * invertYM;

                    // Apply Y-axis and Z-axis refletion matrix. (Adjust the posture of the AR object)
                    ARM = ARM * invertYM * invertZM;

                    if (shouldMoveARCamera)
                    {
                        ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
                        ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                    }
                    else
                    {
                        ARM = ARCamera.transform.localToWorldMatrix * ARM;
                        ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                    }
                }
            }
            else
            {
                rightEye.SetActive(false);
                leftEye.SetActive(false);
                head.SetActive(false);
                mouth.SetActive(false);
                axes.SetActive(false);
            }

            //Imgproc.putText (rgbMat, "W:" + rgbMat.width () + " H:" + rgbMat.height () + " SO:" + Screen.orientation, new Point (5, rgbMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255), 1, Imgproc.LINE_AA, false);

            OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(rgbMat, texture);
        }
Example #4
0
        private void Run()
        {
            if (string.IsNullOrEmpty(object_detector_filepath))
            {
                Debug.LogError("object detecter file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            }
            if (string.IsNullOrEmpty(shape_predictor_filepath))
            {
                Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            }

            Texture2D dstTexture2D = new Texture2D(texture2D.width, texture2D.height, texture2D.format, false);

            dstTexture2D.SetPixels32(texture2D.GetPixels32());
            dstTexture2D.Apply();

            gameObject.transform.localScale = new Vector3(texture2D.width, texture2D.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = gameObject.transform.localScale.x;
            float height = gameObject.transform.localScale.y;

            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }

            FaceLandmarkDetector faceLandmarkDetector = new FaceLandmarkDetector(object_detector_filepath, shape_predictor_filepath);

            faceLandmarkDetector.SetImage(texture2D);

            //detect face rects
            List <Rect> detectResult = faceLandmarkDetector.Detect();

            foreach (var rect in detectResult)
            {
                Debug.Log("face : " + rect);

                //detect landmark points
                List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                Debug.Log("face points count : " + points.Count);
                foreach (var point in points)
                {
                    Debug.Log("face point : x " + point.x + " y " + point.y);
                }

                //draw landmark points
                faceLandmarkDetector.DrawDetectLandmarkResult(dstTexture2D, 0, 255, 0, 255);
            }

            //draw face rects
            faceLandmarkDetector.DrawDetectResult(dstTexture2D, 255, 0, 0, 255, 3);

            faceLandmarkDetector.Dispose();

            gameObject.GetComponent <Renderer>().material.mainTexture = dstTexture2D;

            if (fpsMonitor != null)
            {
                fpsMonitor.Add("dlib object detector", "frontal_cat_face.svm");
                fpsMonitor.Add("dlib shape predictor", "sp_cat_face_68.dat");
                fpsMonitor.Add("width", width.ToString());
                fpsMonitor.Add("height", height.ToString());
                fpsMonitor.Add("orientation", Screen.orientation.ToString());
            }
        }
        public void OnFrameMatAcquired(Mat grayMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix, CameraIntrinsics cameraIntrinsics)
        {
            isDetectingInFrameArrivedThread = true;

            DebugUtils.VideoTick();

            Mat   downScaleMat = null;
            float DOWNSCALE_RATIO;

            if (enableDownScale)
            {
                downScaleMat    = imageOptimizationHelper.GetDownScaleMat(grayMat);
                DOWNSCALE_RATIO = imageOptimizationHelper.downscaleRatio;
            }
            else
            {
                downScaleMat    = grayMat;
                DOWNSCALE_RATIO = 1.0f;
            }

            if (useOpenCVDetector)
            {
                Imgproc.equalizeHist(downScaleMat, downScaleMat);
            }

            if (enableDetection && !isDetecting)
            {
                isDetecting = true;

                downScaleMat.copyTo(grayMat4Thread);

                System.Threading.Tasks.Task.Run(() =>
                {
                    isThreadRunning = true;

                    if (useOpenCVDetector)
                    {
                        DetectObject(grayMat4Thread, out detectionResult, cascade4Thread, true);
                    }
                    else
                    {
                        DetectObject(grayMat4Thread, out detectionResult, faceLandmarkDetector4Thread);
                    }

                    isThreadRunning = false;
                    OnDetectionDone();
                });
            }

            if (!useSeparateDetection)
            {
                if (hasUpdatedDetectionResult)
                {
                    hasUpdatedDetectionResult = false;

                    lock (rectangleTracker)
                    {
                        rectangleTracker.UpdateTrackedObjects(detectionResult);
                    }
                }

                lock (rectangleTracker)
                {
                    rectangleTracker.GetObjects(resultObjects, true);
                }

                // set original size image
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, grayMat);

                resultFaceLandmarkPoints.Clear();
                foreach (Rect rect in resultObjects)
                {
                    // restore to original size rect
                    rect.x      = (int)(rect.x * DOWNSCALE_RATIO);
                    rect.y      = (int)(rect.y * DOWNSCALE_RATIO);
                    rect.width  = (int)(rect.width * DOWNSCALE_RATIO);
                    rect.height = (int)(rect.height * DOWNSCALE_RATIO);

                    // detect face landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height));
                    resultFaceLandmarkPoints.Add(points);
                }

                if (!displayCameraImage)
                {
                    // fill all black
                    Imgproc.rectangle(grayMat, new Point(0, 0), new Point(grayMat.width(), grayMat.height()), new Scalar(0, 0, 0, 0), -1);
                }

                if (displayDetectedFaceRect)
                {
                    // draw face rects
                    foreach (Rect rect in resultObjects)
                    {
                        OpenCVForUnityUtils.DrawFaceRect(grayMat, new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height), COLOR_GRAY, 2);
                    }
                }

                // draw face landmark points
                foreach (List <Vector2> points in resultFaceLandmarkPoints)
                {
                    OpenCVForUnityUtils.DrawFaceLandmark(grayMat, points, COLOR_WHITE, 4);
                }
            }
            else
            {
                Rect[] rectsWhereRegions;

                if (hasUpdatedDetectionResult)
                {
                    hasUpdatedDetectionResult = false;

                    //Enqueue(() =>
                    //{
                    //    Debug.Log("process: get rectsWhereRegions were got from detectionResult");
                    //});

                    lock (rectangleTracker)
                    {
                        rectsWhereRegions = detectionResult.ToArray();
                    }
                }
                else
                {
                    //Enqueue(() =>
                    //{
                    //    Debug.Log("process: get rectsWhereRegions from previous positions");
                    //});

                    if (useOpenCVDetector)
                    {
                        lock (rectangleTracker)
                        {
                            rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects();
                        }
                    }
                    else
                    {
                        lock (rectangleTracker)
                        {
                            rectsWhereRegions = rectangleTracker.CreateRawRects();
                        }
                    }
                }

                detectedObjectsInRegions.Clear();
                foreach (Rect rect in rectsWhereRegions)
                {
                    if (useOpenCVDetector)
                    {
                        DetectInRegion(downScaleMat, rect, detectedObjectsInRegions, cascade, true);
                    }
                    else
                    {
                        DetectInRegion(downScaleMat, rect, detectedObjectsInRegions, faceLandmarkDetector);
                    }
                }

                lock (rectangleTracker)
                {
                    rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions);
                    rectangleTracker.GetObjects(resultObjects, false);
                }

                // set original size image
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, grayMat);

                resultFaceLandmarkPoints.Clear();
                foreach (Rect rect in resultObjects)
                {
                    // restore to original size rect
                    rect.x      = (int)(rect.x * DOWNSCALE_RATIO);
                    rect.y      = (int)(rect.y * DOWNSCALE_RATIO);
                    rect.width  = (int)(rect.width * DOWNSCALE_RATIO);
                    rect.height = (int)(rect.height * DOWNSCALE_RATIO);

                    // detect face landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height));
                    resultFaceLandmarkPoints.Add(points);
                }

                if (!displayCameraImage)
                {
                    // fill all black
                    Imgproc.rectangle(grayMat, new Point(0, 0), new Point(grayMat.width(), grayMat.height()), new Scalar(0, 0, 0, 0), -1);
                }

                if (displayDetectedFaceRect)
                {
                    // draw previous rects
                    DrawDownScaleFaceRects(grayMat, rectsWhereRegions, DOWNSCALE_RATIO, COLOR_GRAY, 1);

                    // draw face rects
                    foreach (Rect rect in resultObjects)
                    {
                        OpenCVForUnityUtils.DrawFaceRect(grayMat, new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height), COLOR_GRAY, 2);
                    }
                }

                // draw face landmark points
                foreach (List <Vector2> points in resultFaceLandmarkPoints)
                {
                    OpenCVForUnityUtils.DrawFaceLandmark(grayMat, points, COLOR_WHITE, 4);
                }
            }

            DebugUtils.TrackTick();

            Enqueue(() =>
            {
                if (!webCamTextureToMatHelper.IsPlaying())
                {
                    return;
                }

                Utils.fastMatToTexture2D(grayMat, texture);
                grayMat.Dispose();

                Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;

                quad_renderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix);

                // Position the canvas object slightly in front
                // of the real world web camera.
                Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2) * 2.2f;

                // Rotate the canvas object so that it faces the user.
                Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));

                gameObject.transform.position = position;
                gameObject.transform.rotation = rotation;
            });

            isDetectingInFrameArrivedThread = false;
        }
Example #6
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                // detect faces on the downscale image
                if (!enableSkipFrame || !imageOptimizationHelper.IsCurrentFrameSkipped())
                {
                    Mat   downScaleRgbaMat = null;
                    float DOWNSCALE_RATIO  = 1.0f;
                    if (enableDownScale)
                    {
                        downScaleRgbaMat = imageOptimizationHelper.GetDownScaleMat(rgbaMat);
                        DOWNSCALE_RATIO  = imageOptimizationHelper.downscaleRatio;
                    }
                    else
                    {
                        downScaleRgbaMat = rgbaMat;
                        DOWNSCALE_RATIO  = 1.0f;
                    }

                    // set the downscale mat
                    OpenCVForUnityUtils.SetImage(faceLandmarkDetector, downScaleRgbaMat);

                    //detect face rects
                    if (useOpenCVFaceDetector)
                    {
                        // convert image to greyscale.
                        Imgproc.cvtColor(downScaleRgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                        using (Mat equalizeHistMat = new Mat())
                            using (MatOfRect faces = new MatOfRect())
                            {
                                Imgproc.equalizeHist(grayMat, equalizeHistMat);

                                cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                                List <OpenCVForUnity.CoreModule.Rect> opencvDetectResult = faces.toList();

                                // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
                                detectionResult.Clear();
                                foreach (var opencvRect in opencvDetectResult)
                                {
                                    detectionResult.Add(new UnityEngine.Rect((float)opencvRect.x, (float)opencvRect.y + (float)(opencvRect.height * 0.1f), (float)opencvRect.width, (float)opencvRect.height));
                                }
                            }
                    }
                    else
                    {
                        // Dlib's face detection processing time increases in proportion to image size.
                        detectionResult = faceLandmarkDetector.Detect();
                    }

                    if (enableDownScale)
                    {
                        for (int i = 0; i < detectionResult.Count; ++i)
                        {
                            var rect = detectionResult[i];
                            detectionResult[i] = new UnityEngine.Rect(
                                rect.x * DOWNSCALE_RATIO,
                                rect.y * DOWNSCALE_RATIO,
                                rect.width * DOWNSCALE_RATIO,
                                rect.height * DOWNSCALE_RATIO);
                        }
                    }
                }

                // set the original scale image
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                // detect face landmarks on the original image
                foreach (var rect in detectionResult)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                    //draw landmark points
                    OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);
                    //draw face rect
                    OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2);
                }

                //Imgproc.putText (rgbaMat, "Original:(" + rgbaMat.width () + "," + rgbaMat.height () + ") DownScale:(" + rgbaMat.width () / imageOptimizationHelper.downscaleRatio + "," + rgbaMat.height () / imageOptimizationHelper.downscaleRatio + ") FrameSkipping: " + imageOptimizationHelper.frameSkippingRatio, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(rgbaMat, texture);
            }
        }
Example #7
0
        private void Run()
        {
            if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
            {
                Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            }

            //if true, The error log of the Native side Dlib will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);

            Texture2D dstTexture2D = new Texture2D(texture2D.width, texture2D.height, texture2D.format, false);

            Graphics.CopyTexture(texture2D, dstTexture2D);

            gameObject.transform.localScale = new Vector3(texture2D.width, texture2D.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = gameObject.transform.localScale.x;
            float height = gameObject.transform.localScale.y;

            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }

            FaceLandmarkDetector faceLandmarkDetector = new FaceLandmarkDetector(dlibShapePredictorFilePath);

            faceLandmarkDetector.SetImage(texture2D);

            //detect face rects
            List <Rect> detectResult = faceLandmarkDetector.Detect();


            foreach (var rect in detectResult)
            {
                Debug.Log("face : " + rect);

                //detect landmark points
                List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                Debug.Log("face points count : " + points.Count);
                foreach (var point in points)
                {
                    Debug.Log("face point : x " + point.x + " y " + point.y);
                }

                //draw landmark points
                faceLandmarkDetector.DrawDetectLandmarkResult(dstTexture2D, 0, 255, 0, 255);
            }

            //draw face rect
            faceLandmarkDetector.DrawDetectResult(dstTexture2D, 255, 0, 0, 255, 2);

            faceLandmarkDetector.Dispose();

            gameObject.GetComponent <Renderer> ().material.mainTexture = dstTexture2D;


            Utils.setDebugMode(false);

            if (fpsMonitor != null)
            {
                fpsMonitor.Add("dlib shape predictor", dlibShapePredictorFileName);
                fpsMonitor.Add("width", width.ToString());
                fpsMonitor.Add("height", height.ToString());
                fpsMonitor.Add("orientation", Screen.orientation.ToString());
            }
        }
Example #8
0
        // Update is called once per frame
        void Update()
        {
            // loop play.
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                // detect faces.
                List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> ();
                if (useDlibFaceDetecter)
                {
                    OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
                    List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                    foreach (var unityRect in result)
                    {
                        detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                    }
                }
                else
                {
                    // convert image to greyscale.
                    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);

                    // convert image to greyscale.
                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            detectResult = faces.toList();

                            // adjust to Dilb's result.
                            foreach (OpenCVForUnity.Rect r in detectResult)
                            {
                                r.y += (int)(r.height * 0.1f);
                            }
                        }
                }

                // face traking.
                if (enableTracking)
                {
                    rectangleTracker.UpdateTrackedObjects(detectResult);
                    detectResult = new List <OpenCVForUnity.Rect> ();
                    rectangleTracker.GetObjects(detectResult, true);
                }

                // detect face landmark points.
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
                List <List <Vector2> > landmarkPoints = new List <List <Vector2> > ();
                foreach (var openCVRect in detectResult)
                {
                    UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);

                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);
                    landmarkPoints.Add(points);
                }

                // filter non frontal faces.
                if (filterNonFrontalFaces)
                {
                    for (int i = 0; i < landmarkPoints.Count; i++)
                    {
                        if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit)
                        {
                            detectResult.RemoveAt(i);
                            landmarkPoints.RemoveAt(i);
                            i--;
                        }
                    }
                }

                // face swapping.
                if (landmarkPoints.Count >= 2)
                {
                    int ann = 0, bob = 1;
                    for (int i = 0; i < landmarkPoints.Count - 1; i += 2)
                    {
                        ann = i;
                        bob = i + 1;

                        faceSwapper.SwapFaces(rgbMat, landmarkPoints [ann], landmarkPoints [bob], 1);
                    }
                }

                // draw face rects.
                if (displayFaceRects)
                {
                    for (int i = 0; i < detectResult.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 0, 0, 255), 2);
                        //Imgproc.putText (rgbMat, " " + frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255), 2, Imgproc.LINE_AA, false);
                    }
                }

                Imgproc.putText(rgbMat, "W:" + rgbMat.width() + " H:" + rgbMat.height() + " SO:" + Screen.orientation, new Point(5, rgbMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbMat, texture);
            }
        }
        private void Run()
        {
            displayFaceRectsToggle.isOn       = displayFaceRects;
            useDlibFaceDetecterToggle.isOn    = useDlibFaceDetecter;
            filterNonFrontalFacesToggle.isOn  = filterNonFrontalFaces;
            displayDebugFacePointsToggle.isOn = displayDebugFacePoints;

            if (imgTexture == null)
            {
                imgTexture = Resources.Load("family") as Texture2D;
            }

            gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = 0;
            float height = 0;

            width  = gameObject.transform.localScale.x;
            height = gameObject.transform.localScale.y;


            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }

            Mat rgbaMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);

            OpenCVForUnity.Utils.texture2DToMat(imgTexture, rgbaMat);
            Debug.Log("rgbaMat ToString " + rgbaMat.ToString());

            if (faceLandmarkDetector == null)
            {
                faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath);
            }

            FrontalFaceChecker frontalFaceChecker = new FrontalFaceChecker(width, height);

            // detect faces.
            List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> ();

            if (useDlibFaceDetecter)
            {
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                foreach (var unityRect in result)
                {
                    detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                }
            }
            else
            {
                if (cascade == null)
                {
                    cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath);
                }
//                if (cascade.empty ()) {
//                    Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
//                }

                // convert image to greyscale.
                Mat gray = new Mat();
                Imgproc.cvtColor(rgbaMat, gray, Imgproc.COLOR_RGBA2GRAY);

                MatOfRect faces = new MatOfRect();
                Imgproc.equalizeHist(gray, gray);
                cascade.detectMultiScale(gray, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size());
                //Debug.Log ("faces " + faces.dump ());

                detectResult = faces.toList();

                // adjust to Dilb's result.
                foreach (OpenCVForUnity.Rect r in detectResult)
                {
                    r.y += (int)(r.height * 0.1f);
                }

                gray.Dispose();
            }

            // detect face landmark points.
            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
            List <List <Vector2> > landmarkPoints = new List <List <Vector2> > ();

            foreach (var openCVRect in detectResult)
            {
                UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);

                Debug.Log("face : " + rect);
                //OpenCVForUnityUtils.DrawFaceRect(imgMat, rect, new Scalar(255, 0, 0, 255), 2);

                List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);
                //OpenCVForUnityUtils.DrawFaceLandmark(imgMat, points, new Scalar(0, 255, 0, 255), 2);
                landmarkPoints.Add(points);
            }


            // filter non frontal faces.
            if (filterNonFrontalFaces)
            {
                for (int i = 0; i < landmarkPoints.Count; i++)
                {
                    if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit)
                    {
                        detectResult.RemoveAt(i);
                        landmarkPoints.RemoveAt(i);
                        i--;
                    }
                }
            }


            // change faces.
            //Debug.Log("face points count : " + points.Count);
            int[] face_nums = new int[landmarkPoints.Count];
            for (int i = 0; i < face_nums.Length; i++)
            {
                face_nums [i] = i;
            }
            face_nums = face_nums.OrderBy(i => System.Guid.NewGuid()).ToArray();
            if (landmarkPoints.Count >= 2)
            {
                DlibFaceChanger faceChanger = new DlibFaceChanger();
                faceChanger.isShowingDebugFacePoints = displayDebugFacePoints;

                faceChanger.SetTargetImage(rgbaMat);

                for (int i = 1; i < face_nums.Length; i++)
                {
                    faceChanger.AddFaceChangeData(rgbaMat, landmarkPoints [face_nums [0]], landmarkPoints [face_nums [i]], 1);
                }

                faceChanger.ChangeFace();
                faceChanger.Dispose();
            }

            // draw face rects.
            if (displayFaceRects && face_nums.Count() > 0)
            {
                int ann = face_nums[0];
                UnityEngine.Rect rect_ann = new UnityEngine.Rect(detectResult [ann].x, detectResult [ann].y, detectResult [ann].width, detectResult [ann].height);
                OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_ann, new Scalar(255, 255, 0, 255), 2);

                int bob = 0;
                for (int i = 1; i < face_nums.Length; i++)
                {
                    bob = face_nums [i];
                    UnityEngine.Rect rect_bob = new UnityEngine.Rect(detectResult [bob].x, detectResult [bob].y, detectResult [bob].width, detectResult [bob].height);
                    OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_bob, new Scalar(255, 0, 0, 255), 2);
                }
            }

            frontalFaceChecker.Dispose();

            Texture2D texture = new Texture2D(rgbaMat.cols(), rgbaMat.rows(), TextureFormat.RGBA32, false);

            OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture);
            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

            rgbaMat.Dispose();
        }
        public void OnFrameMatAcquired(Mat bgraMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix)
        {
            Mat downScaleFrameMat = imageOptimizationHelper.GetDownScaleMat(bgraMat);

            Imgproc.cvtColor(downScaleFrameMat, grayMat, Imgproc.COLOR_BGRA2GRAY);
            Imgproc.equalizeHist(grayMat, grayMat);

            if (enableDetection && !isDetecting)
            {
                isDetecting = true;

                grayMat.copyTo(grayMat4Thread);

                System.Threading.Tasks.Task.Run(() => {
                    isThreadRunning = true;
                    DetectObject();
                    isThreadRunning = false;
                    OnDetectionDone();
                });
            }

            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, grayMat);

            Mat bgraMat4preview = null;

            if (displayCameraPreview)
            {
                bgraMat4preview = new Mat();
                downScaleFrameMat.copyTo(bgraMat4preview);
            }

            List <Vector2> points = null;

            Rect[] rects;
            if (!useSeparateDetection)
            {
                if (hasUpdatedDetectionResult)
                {
                    hasUpdatedDetectionResult = false;

                    lock (rectangleTracker) {
                        rectangleTracker.UpdateTrackedObjects(detectionResult.toList());
                    }
                }

                lock (rectangleTracker) {
                    rectangleTracker.GetObjects(resultObjects, true);
                }
                rects = resultObjects.ToArray();

                if (rects.Length > 0)
                {
                    OpenCVForUnity.Rect rect = rects [0];

                    // Adjust to Dilb's result.
                    rect.y += (int)(rect.height * 0.1f);

                    //detect landmark points
                    points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height));

                    if (displayCameraPreview && bgraMat4preview != null)
                    {
                        //draw landmark points
                        OpenCVForUnityUtils.DrawFaceLandmark(bgraMat4preview, points, new Scalar(0, 255, 0, 255), 2);
                    }
                }
            }
            else
            {
                if (hasUpdatedDetectionResult)
                {
                    hasUpdatedDetectionResult = false;

                    //UnityEngine.WSA.Application.InvokeOnAppThread (() => {
                    //    Debug.Log("process: get rectsWhereRegions were got from detectionResult");
                    //}, true);

                    lock (rectangleTracker) {
                        rectsWhereRegions = detectionResult.toArray();
                    }
                }
                else
                {
                    //UnityEngine.WSA.Application.InvokeOnAppThread (() => {
                    //    Debug.Log("process: get rectsWhereRegions from previous positions");
                    //}, true);

                    lock (rectangleTracker) {
                        rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects();
                    }
                }

                detectedObjectsInRegions.Clear();
                if (rectsWhereRegions.Length > 0)
                {
                    int len = rectsWhereRegions.Length;
                    for (int i = 0; i < len; i++)
                    {
                        DetectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
                    }
                }

                lock (rectangleTracker) {
                    rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions);
                    rectangleTracker.GetObjects(resultObjects, true);
                }

                if (resultObjects.Count > 0)
                {
                    OpenCVForUnity.Rect rect = resultObjects [0];

                    // Adjust to Dilb's result.
                    rect.y += (int)(rect.height * 0.1f);

                    //detect landmark points
                    points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height));

                    if (displayCameraPreview && bgraMat4preview != null)
                    {
                        //draw landmark points
                        OpenCVForUnityUtils.DrawFaceLandmark(bgraMat4preview, points, new Scalar(0, 255, 0, 255), 2);
                    }
                }
            }


            UnityEngine.WSA.Application.InvokeOnAppThread(() => {
                if (!webCamTextureToMatHelper.IsPlaying())
                {
                    return;
                }

                if (displayCameraPreview && bgraMat4preview != null)
                {
                    OpenCVForUnity.Utils.fastMatToTexture2D(bgraMat4preview, texture);
                }

                if (points != null)
                {
                    UpdateARHeadTransform(points);
                }

                bgraMat.Dispose();
                if (bgraMat4preview != null)
                {
                    bgraMat4preview.Dispose();
                }
            }, false);
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                UnityEngine.Rect rect              = new UnityEngine.Rect();
                List <Vector2>   points            = null;
                bool             shouldResetfilter = false;
                if (detectResult.Count > 0)
                {
                    rect = detectResult [0];

                    //detect landmark points
                    points = faceLandmarkDetector.DetectLandmark(rect);

                    skippedFrames = 0;
                }
                else
                {
                    skippedFrames++;
                    if (skippedFrames == maximumAllowedSkippedFrames)
                    {
                        shouldResetfilter = true;
                    }
                }

                switch (filterMode)
                {
                default:
                case FilterMode.None:
                    break;

                case FilterMode.LowPassFilter:
                    if (shouldResetfilter)
                    {
                        lowPassFilter.Reset();
                    }
                    lowPassFilter.Process(rgbaMat, points, lowPassFilteredPoints, isDebugMode);
                    break;

                case FilterMode.KalmanFilter:
                    if (shouldResetfilter)
                    {
                        kalmanFilter.Reset();
                    }
                    kalmanFilter.Process(rgbaMat, points, kalmanFilteredPoints, isDebugMode);
                    break;

                case FilterMode.OpticalFlowFilter:
                    if (shouldResetfilter)
                    {
                        opticalFlowFilter.Reset();
                    }
                    opticalFlowFilter.Process(rgbaMat, points, opticalFlowFilteredPoints, isDebugMode);
                    break;

                case FilterMode.OFAndLPFilter:
                    if (shouldResetfilter)
                    {
                        opticalFlowFilter.Reset();
                        lowPassFilter.Reset();
                    }

                    opticalFlowFilter.Process(rgbaMat, points, points, false);
                    lowPassFilter.Process(rgbaMat, points, ofAndLPFilteredPoints, isDebugMode);
                    break;
                }


                if (points != null && !isDebugMode)
                {
                    // draw raw landmark points.
                    OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);
                }

                // draw face rect.
//                OpenCVForUnityUtils.DrawFaceRect (rgbaMat, rect, new Scalar (255, 0, 0, 255), 2);

                // draw filtered lam points.
                if (points != null && !isDebugMode)
                {
                    switch (filterMode)
                    {
                    default:
                    case FilterMode.None:
                        break;

                    case FilterMode.LowPassFilter:
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, lowPassFilteredPoints, new Scalar(0, 255, 255, 255), 2);
                        break;

                    case FilterMode.KalmanFilter:
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, kalmanFilteredPoints, new Scalar(0, 0, 255, 255), 2);
                        break;

                    case FilterMode.OpticalFlowFilter:
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, opticalFlowFilteredPoints, new Scalar(255, 0, 0, 255), 2);
                        break;

                    case FilterMode.OFAndLPFilter:
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, ofAndLPFilteredPoints, new Scalar(255, 0, 255, 255), 2);
                        break;
                    }
                }


                //Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(rgbaMat, texture);
            }
        }
Example #12
0
        public void OnFrameMatAcquired(Mat bgraMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix)
        {
            Mat downScaleFrameMat = bgraMat;

            if (imageOptimizationHelper.downscaleRatio != 1)
            {
                downScaleFrameMat = new Mat();
                Imgproc.resize(bgraMat, downScaleFrameMat, new Size(), 1.0 / imageOptimizationHelper.downscaleRatio, 1.0 / imageOptimizationHelper.downscaleRatio, Imgproc.INTER_LINEAR);
            }

            Imgproc.cvtColor(downScaleFrameMat, grayMat, Imgproc.COLOR_BGRA2GRAY);
            Imgproc.equalizeHist(grayMat, grayMat);

            if (enableDetection && !isDetecting)
            {
                isDetecting = true;

                grayMat.copyTo(grayMat4Thread);

                System.Threading.Tasks.Task.Run(() => {
                    isThreadRunning = true;
                    DetectObject();
                    isThreadRunning = false;
                    OnDetectionDone();
                });
            }

            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, grayMat);

            if (!displayCameraImage)
            {
                // fill all black.
                Imgproc.rectangle(downScaleFrameMat, new Point(0, 0), new Point(downScaleFrameMat.width(), downScaleFrameMat.height()), new Scalar(0, 0, 0, 0), -1);
            }


            Rect[] rects;
            if (!useSeparateDetection)
            {
                if (hasUpdatedDetectionResult)
                {
                    hasUpdatedDetectionResult = false;

                    lock (rectangleTracker) {
                        rectangleTracker.UpdateTrackedObjects(detectionResult.toList());
                    }
                }

                lock (rectangleTracker) {
                    rectangleTracker.GetObjects(resultObjects, true);
                }
                rects = resultObjects.ToArray();

                if (displayDetectedFaceRect)
                {
                    for (int i = 0; i < rects.Length; i++)
                    {
                        //UnityEngine.WSA.Application.InvokeOnAppThread (() => {
                        //    Debug.Log ("detected face[" + i + "] " + rects [i]);
                        //}, true);

                        Imgproc.rectangle(downScaleFrameMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 0, 255, 255), 3);
                    }
                }

                // Adjust to Dilb's result.
                foreach (OpenCVForUnity.Rect r in resultObjects)
                {
                    r.y += (int)(r.height * 0.1f);
                }

                foreach (var rect in resultObjects)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height));

                    //draw landmark points
                    OpenCVForUnityUtils.DrawFaceLandmark(downScaleFrameMat, points, new Scalar(0, 255, 0, 255), 2);

                    if (displayDetectedFaceRect)
                    {
                        //draw face rect
                        OpenCVForUnityUtils.DrawFaceRect(downScaleFrameMat, new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height), new Scalar(0, 0, 255, 255), 2);
                    }
                }
            }
            else
            {
                if (hasUpdatedDetectionResult)
                {
                    hasUpdatedDetectionResult = false;

                    //UnityEngine.WSA.Application.InvokeOnAppThread (() => {
                    //    Debug.Log("process: get rectsWhereRegions were got from detectionResult");
                    //}, true);

                    lock (rectangleTracker) {
                        rectsWhereRegions = detectionResult.toArray();
                    }

                    if (displayDetectedFaceRect)
                    {
                        rects = rectsWhereRegions;
                        for (int i = 0; i < rects.Length; i++)
                        {
                            Imgproc.rectangle(downScaleFrameMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 1);
                        }
                    }
                }
                else
                {
                    //UnityEngine.WSA.Application.InvokeOnAppThread (() => {
                    //    Debug.Log("process: get rectsWhereRegions from previous positions");
                    //}, true);

                    lock (rectangleTracker) {
                        rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects();
                    }

                    if (displayDetectedFaceRect)
                    {
                        rects = rectsWhereRegions;
                        for (int i = 0; i < rects.Length; i++)
                        {
                            Imgproc.rectangle(downScaleFrameMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 255, 0, 255), 1);
                        }
                    }
                }

                detectedObjectsInRegions.Clear();
                if (rectsWhereRegions.Length > 0)
                {
                    int len = rectsWhereRegions.Length;
                    for (int i = 0; i < len; i++)
                    {
                        DetectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
                    }
                }

                lock (rectangleTracker) {
                    rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions);
                    rectangleTracker.GetObjects(resultObjects, true);
                }

                if (displayDetectedFaceRect)
                {
                    rects = resultObjects.ToArray();
                    for (int i = 0; i < rects.Length; i++)
                    {
                        //UnityEngine.WSA.Application.InvokeOnAppThread (() => {
                        //    Debug.Log ("detected face[" + i + "] " + rects [i]);
                        //}, true);

                        Imgproc.rectangle(downScaleFrameMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 0, 255, 255), 3);
                    }
                }

                // Adjust to Dilb's result.
                foreach (OpenCVForUnity.Rect r in resultObjects)
                {
                    r.y += (int)(r.height * 0.1f);
                }

                foreach (var rect in resultObjects)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height));

                    //draw landmark points
                    OpenCVForUnityUtils.DrawFaceLandmark(downScaleFrameMat, points, new Scalar(0, 255, 0, 255), 2);

                    if (displayDetectedFaceRect)
                    {
                        //draw face rect
                        OpenCVForUnityUtils.DrawFaceRect(downScaleFrameMat, new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height), new Scalar(0, 0, 255, 255), 2);
                    }
                }
            }


            UnityEngine.WSA.Application.InvokeOnAppThread(() => {
                if (!webCamTextureToMatHelper.IsPlaying())
                {
                    return;
                }

                OpenCVForUnity.Utils.fastMatToTexture2D(downScaleFrameMat, texture);
                bgraMat.Dispose();
                if (imageOptimizationHelper.downscaleRatio != 1)
                {
                    downScaleFrameMat.Dispose();
                }

                Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;

                quad_renderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix);

                // Position the canvas object slightly in front
                // of the real world web camera.
                Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2);

                // Rotate the canvas object so that it faces the user.
                Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));

                gameObject.transform.position = position;
                gameObject.transform.rotation = rotation;
            }, false);
        }
Example #13
0
        // Update is called once per frame
        void Update()
        {
            if (capture == null)
            {
                return;
            }

            //Loop play
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                if (detectResult.Count > 0)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult [0]);

                    if (displayFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, points, new Scalar(0, 255, 0), 2);
                    }

                    imagePoints.fromArray(
                        new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                        new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                        new Point(points [30].x, points [30].y),                                             //nose (Nose top)
                        new Point(points [48].x, points [48].y),                                             //l mouth (Mouth breadth)
                        new Point(points [54].x, points [54].y),                                             //r mouth (Mouth breadth)
                        new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                        new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                        );

                    // Estimate head pose.
                    if (rvec == null || tvec == null)
                    {
                        rvec = new Mat(3, 1, CvType.CV_64FC1);
                        tvec = new Mat(3, 1, CvType.CV_64FC1);
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }

                    double tvec_z = tvec.get(2, 0) [0];

                    if (double.IsNaN(tvec_z) || tvec_z < 0)    // if tvec is wrong data, do not use extrinsic guesses.
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }
                    else
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                    }

//                    Debug.Log (tvec.dump());

                    if (!double.IsNaN(tvec_z))
                    {
                        if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 6.0)
                        {
                            if (displayEffects)
                            {
                                rightEye.SetActive(true);
                            }
                        }

                        if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 6.0)
                        {
                            if (displayEffects)
                            {
                                leftEye.SetActive(true);
                            }
                        }
                        if (displayHead)
                        {
                            head.SetActive(true);
                        }
                        if (displayAxes)
                        {
                            axes.SetActive(true);
                        }



                        float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                        float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                        if (mouseDistance > noseDistance / 5.0)
                        {
                            if (displayEffects)
                            {
                                mouth.SetActive(true);
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled   = true;
                                    ps.startSize = 40 * (mouseDistance / noseDistance);
                                }
                            }
                        }
                        else
                        {
                            if (displayEffects)
                            {
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled = false;
                                }
                            }
                        }

                        Calib3d.Rodrigues(rvec, rotMat);

                        transformationM.SetRow(0, new Vector4((float)rotMat.get(0, 0) [0], (float)rotMat.get(0, 1) [0], (float)rotMat.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
                        transformationM.SetRow(1, new Vector4((float)rotMat.get(1, 0) [0], (float)rotMat.get(1, 1) [0], (float)rotMat.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
                        transformationM.SetRow(2, new Vector4((float)rotMat.get(2, 0) [0], (float)rotMat.get(2, 1) [0], (float)rotMat.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
                        transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                        // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                        ARM = invertYM * transformationM;

                        // Apply Z axis inverted matrix.
                        ARM = ARM * invertZM;

                        if (shouldMoveARCamera)
                        {
                            ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;

                            ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                        }
                        else
                        {
                            ARM = ARCamera.transform.localToWorldMatrix * ARM;

                            ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                        }
                    }
                }
                else
                {
                    rightEye.SetActive(false);
                    leftEye.SetActive(false);
                    head.SetActive(false);
                    mouth.SetActive(false);
                    axes.SetActive(false);
                }

                Imgproc.putText(rgbMat, "W:" + rgbMat.width() + " H:" + rgbMat.height() + " SO:" + Screen.orientation, new Point(5, rgbMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbMat, texture, colors);
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                // detect faces.
                List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> ();
                if (useDlibFaceDetecter)
                {
                    OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                    List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                    foreach (var unityRect in result)
                    {
                        detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                    }
                }
                else
                {
                    // convert image to greyscale.
                    Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            detectResult = faces.toList();
                        }

                    // Adjust to Dilb's result.
                    foreach (OpenCVForUnity.Rect r in detectResult)
                    {
                        r.y += (int)(r.height * 0.1f);
                    }
                }

                // face traking.
                rectangleTracker.UpdateTrackedObjects(detectResult);
                List <TrackedRect> trackedRects = new List <TrackedRect> ();
                rectangleTracker.GetObjects(trackedRects, true);

                // detect face landmark.
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                List <List <Vector2> > landmarkPoints = new List <List <Vector2> > ();
                for (int i = 0; i < trackedRects.Count; i++)
                {
                    TrackedRect      tr   = trackedRects [i];
                    UnityEngine.Rect rect = new UnityEngine.Rect(tr.x, tr.y, tr.width, tr.height);

                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);
                    landmarkPoints.Add(points);
                }

                // face masking.
                if (faceMaskTexture != null && landmarkPoints.Count >= 1)
                {
                    OpenCVForUnity.Utils.texture2DToMat(faceMaskTexture, faceMaskMat);

                    float offsetX         = meshOverlay.Width / 2f;
                    float offsetY         = meshOverlay.Height / 2f;
                    float maskImageWidth  = faceMaskTexture.width;
                    float maskImageHeight = faceMaskTexture.height;

                    TrackedRect tr;
                    TrackedMesh tm;
                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        tr = trackedRects [i];

                        if (tr.state == TrackedState.NEW)
                        {
                            meshOverlay.CreateObject(tr.id, faceMaskTexture);
                        }
                        if (tr.state < TrackedState.DELETED)
                        {
                            tm = meshOverlay.GetObjectById(tr.id);

                            Vector3[] vertices = tm.MeshFilter.mesh.vertices;
                            if (vertices.Length == landmarkPoints [i].Count)
                            {
                                for (int j = 0; j < vertices.Length; j++)
                                {
                                    vertices [j].x = landmarkPoints [i] [j].x - offsetX;
                                    vertices [j].y = offsetY - landmarkPoints [i] [j].y;
                                }
                            }
                            Vector2[] uv = tm.MeshFilter.mesh.uv;
                            if (uv.Length == faceLandmarkPointsInMask.Count)
                            {
                                for (int jj = 0; jj < uv.Length; jj++)
                                {
                                    uv [jj].x = faceLandmarkPointsInMask [jj].x / maskImageWidth;
                                    uv [jj].y = (maskImageHeight - faceLandmarkPointsInMask [jj].y) / maskImageHeight;
                                }
                            }
                            meshOverlay.UpdateObject(tr.id, vertices, null, uv);

                            if (tr.numFramesNotDetected > 3)
                            {
                                tm.Material.SetFloat(shader_FadeID, 1f);
                            }
                            else if (tr.numFramesNotDetected > 0 && tr.numFramesNotDetected <= 3)
                            {
                                tm.Material.SetFloat(shader_FadeID, 0.3f + (0.7f / 4f) * tr.numFramesNotDetected);
                            }
                            else
                            {
                                tm.Material.SetFloat(shader_FadeID, 0.3f);
                            }

                            // filter nonfrontalface.
                            if (isFilteringNonFrontalFaces && frontalFaceParam.getFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit)
                            {
                                tm.Material.SetFloat(shader_FadeID, 1f);
                            }
                        }
                        else if (tr.state == TrackedState.DELETED)
                        {
                            meshOverlay.DeleteObject(tr.id);
                        }
                    }
                }
                else if (landmarkPoints.Count >= 1)
                {
                    float offsetX         = meshOverlay.Width / 2f;
                    float offsetY         = meshOverlay.Height / 2f;
                    float maskImageWidth  = texture.width;
                    float maskImageHeight = texture.height;

                    TrackedRect tr;
                    TrackedMesh tm;
                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        tr = trackedRects [i];

                        if (tr.state == TrackedState.NEW)
                        {
                            meshOverlay.CreateObject(tr.id, texture);
                        }
                        if (tr.state < TrackedState.DELETED)
                        {
                            tm = meshOverlay.GetObjectById(tr.id);

                            Vector3[] vertices = tm.MeshFilter.mesh.vertices;
                            if (vertices.Length == landmarkPoints [i].Count)
                            {
                                for (int j = 0; j < vertices.Length; j++)
                                {
                                    vertices [j].x = landmarkPoints[i][j].x - offsetX;
                                    vertices [j].y = offsetY - landmarkPoints[i][j].y;
                                }
                            }
                            Vector2[] uv = tm.MeshFilter.mesh.uv;
                            if (uv.Length == landmarkPoints [0].Count)
                            {
                                for (int jj = 0; jj < uv.Length; jj++)
                                {
                                    uv [jj].x = landmarkPoints[0][jj].x / maskImageWidth;
                                    uv [jj].y = (maskImageHeight - landmarkPoints[0][jj].y) / maskImageHeight;
                                }
                            }
                            meshOverlay.UpdateObject(tr.id, vertices, null, uv);

                            if (tr.numFramesNotDetected > 3)
                            {
                                tm.Material.SetFloat(shader_FadeID, 1f);
                            }
                            else if (tr.numFramesNotDetected > 0 && tr.numFramesNotDetected <= 3)
                            {
                                tm.Material.SetFloat(shader_FadeID, 0.3f + (0.7f / 4f) * tr.numFramesNotDetected);
                            }
                            else
                            {
                                tm.Material.SetFloat(shader_FadeID, 0.3f);
                            }

                            // filter nonfrontalface.
                            if (isFilteringNonFrontalFaces && frontalFaceParam.getFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit)
                            {
                                tm.Material.SetFloat(shader_FadeID, 1f);
                            }
                        }
                        else if (tr.state == TrackedState.DELETED)
                        {
                            meshOverlay.DeleteObject(tr.id);
                        }
                    }
                }

                // draw face rects.
                if (isShowingFaceRects)
                {
                    for (int i = 0; i < detectResult.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2);
                    }

                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(trackedRects [i].x, trackedRects [i].y, trackedRects [i].width, trackedRects [i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 255, 0, 255), 2);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceParam.getAngleOfFrontalFace (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    }
                }

                // draw face points.
                if (isShowingDebugFacePoints)
                {
                    for (int i = 0; i < landmarkPoints.Count; i++)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, landmarkPoints [i], new Scalar(0, 255, 0, 255), 2);
                    }
                }


                // display face mask image.
                if (faceMaskTexture != null && faceMaskMat != null)
                {
                    if (isShowingFaceRects)
                    {
                        OpenCVForUnityUtils.DrawFaceRect(faceMaskMat, faceRectInMask, new Scalar(255, 0, 0, 255), 2);
                    }
                    if (isShowingDebugFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(faceMaskMat, faceLandmarkPointsInMask, new Scalar(0, 255, 0, 255), 2);
                    }

                    float scale = (rgbaMat.width() / 4f) / faceMaskMat.width();
                    float tx    = rgbaMat.width() - faceMaskMat.width() * scale;
                    float ty    = 0.0f;
                    Mat   trans = new Mat(2, 3, CvType.CV_32F);//1.0, 0.0, tx, 0.0, 1.0, ty);
                    trans.put(0, 0, scale);
                    trans.put(0, 1, 0.0f);
                    trans.put(0, 2, tx);
                    trans.put(1, 0, 0.0f);
                    trans.put(1, 1, scale);
                    trans.put(1, 2, ty);

                    Imgproc.warpAffine(faceMaskMat, rgbaMat, trans, rgbaMat.size(), Imgproc.INTER_LINEAR, Core.BORDER_TRANSPARENT, new Scalar(0));
                }

                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                if (detectResult.Count > 0)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult [0]);

                    if (displayFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);
                    }

                    MatOfPoint3f objectPoints   = null;
                    bool         isRightEyeOpen = false;
                    bool         isLeftEyeOpen  = false;
                    bool         isMouthOpen    = false;
                    if (points.Count == 68)
                    {
                        objectPoints = objectPoints68;

                        imagePoints.fromArray(
                            new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye (Interpupillary breadth)
                            new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye (Interpupillary breadth)
                            new Point(points [30].x, points [30].y),                                             //nose (Nose top)
                            new Point(points [48].x, points [48].y),                                             //l mouth (Mouth breadth)
                            new Point(points [54].x, points [54].y),                                             //r mouth (Mouth breadth)
                            new Point(points [0].x, points [0].y),                                               //l ear (Bitragion breadth)
                            new Point(points [16].x, points [16].y)                                              //r ear (Bitragion breadth)
                            );

                        if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 5.0)
                        {
                            isRightEyeOpen = true;
                        }

                        if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 5.0)
                        {
                            isLeftEyeOpen = true;
                        }

                        float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                        float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                        if (mouseDistance > noseDistance / 5.0)
                        {
                            isMouthOpen = true;
                        }
                        else
                        {
                            isMouthOpen = false;
                        }
                    }
                    else if (points.Count == 5)
                    {
                        objectPoints = objectPoints5;

                        imagePoints.fromArray(
                            new Point(points [3].x, points [3].y), //l eye (Inner corner of the eye)
                            new Point(points [1].x, points [1].y), //r eye (Inner corner of the eye)
                            new Point(points [2].x, points [2].y), //l eye (Tail of the eye)
                            new Point(points [0].x, points [0].y), //r eye (Tail of the eye)
                            new Point(points [4].x, points [4].y)  //nose (Nose top)
                            );

                        if (fpsMonitor != null)
                        {
                            fpsMonitor.consoleText = "This example supports mainly the face landmark points of 68 points.";
                        }
                    }

                    // Estimate head pose.
                    if (rvec == null || tvec == null)
                    {
                        rvec = new Mat(3, 1, CvType.CV_64FC1);
                        tvec = new Mat(3, 1, CvType.CV_64FC1);
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }

                    double tvec_z = tvec.get(2, 0) [0];

                    if (double.IsNaN(tvec_z) || tvec_z < 0)    // if tvec is wrong data, do not use extrinsic guesses.
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
                    }
                    else
                    {
                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec, true, Calib3d.SOLVEPNP_ITERATIVE);
                    }
                    //Debug.Log (tvec.dump());

                    if (!double.IsNaN(tvec_z))
                    {
                        if (displayHead)
                        {
                            head.SetActive(true);
                        }
                        if (displayAxes)
                        {
                            axes.SetActive(true);
                        }

                        if (displayEffects)
                        {
                            rightEye.SetActive(isRightEyeOpen);
                            leftEye.SetActive(isLeftEyeOpen);

                            if (isMouthOpen)
                            {
                                mouth.SetActive(true);
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled = true;
                                    #if UNITY_5_5_OR_NEWER
                                    var main = ps.main;
                                    main.startSizeMultiplier = 20;
                                    #else
                                    ps.startSize = 20;
                                    #endif
                                }
                            }
                            else
                            {
                                foreach (ParticleSystem ps in mouthParticleSystem)
                                {
                                    var em = ps.emission;
                                    em.enabled = false;
                                }
                            }
                        }

                        // Convert to unity pose data.
                        double[] rvecArr = new double[3];
                        rvec.get(0, 0, rvecArr);
                        double[] tvecArr = new double[3];
                        tvec.get(0, 0, tvecArr);
                        PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);

                        // Changes in pos/rot below these thresholds are ignored.
                        if (enableLowPassFilter)
                        {
                            ARUtils.LowpassPoseData(ref oldPoseData, ref poseData, positionLowPass, rotationLowPass);
                        }
                        oldPoseData = poseData;

                        // Create transform matrix.
                        transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);
                    }


                    // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                    ARM = invertYM * transformationM;

                    // Apply Z-axis inverted matrix.
                    ARM = ARM * invertZM;

                    if (shouldMoveARCamera)
                    {
                        ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
                        ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                    }
                    else
                    {
                        ARM = ARCamera.transform.localToWorldMatrix * ARM;
                        ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                    }
                }

                //Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.fastMatToTexture2D(rgbaMat, texture);
            }
        }
        void OnPhotoCaptured(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
        {
            Matrix4x4 cameraToWorldMatrix;

            photoCaptureFrame.TryGetCameraToWorldMatrix(out cameraToWorldMatrix);
            Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;

            Matrix4x4 projectionMatrix;

            photoCaptureFrame.TryGetProjectionMatrix(out projectionMatrix);

            photoCaptureFrame.UploadImageDataToTexture(m_Texture);


            OpenCVForUnity.Utils.texture2DToMat(m_Texture, rgbaMat);

            // fill all black.
//            Imgproc.rectangle (rgbaMat, new Point (0, 0), new Point (rgbaMat.width (), rgbaMat.height ()), new Scalar (0, 0, 0, 0), -1);
            // draw an edge lines.
            Imgproc.rectangle(rgbaMat, new Point(0, 0), new Point(rgbaMat.width(), rgbaMat.height()), new Scalar(255, 0, 0, 255), 2);
            // draw a diagonal line.
//            Imgproc.line (rgbaMat, new Point (0, 0), new Point (rgbaMat.cols (), rgbaMat.rows ()), new Scalar (255, 0, 0, 255));


            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

            //detect face
            List <FaceLandmarkDetector.RectDetection> detectResult = faceLandmarkDetector.DetectRectDetection();

            foreach (var r in detectResult)
            {
                Debug.Log("rect : " + r.rect);

                //detect landmark points
                List <Vector2> points = faceLandmarkDetector.DetectLandmark(r.rect);

                Debug.Log("face points count : " + points.Count);
                //draw landmark points
                OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);

                //draw face rect
                OpenCVForUnityUtils.DrawFaceRect(rgbaMat, r.rect, new Scalar(255, 0, 0, 255), 2);
            }


            Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.5, new Scalar(0, 255, 0, 255), 2, Imgproc.LINE_AA, false);

            OpenCVForUnity.Utils.matToTexture2D(rgbaMat, m_Texture, colors);



            m_Texture.wrapMode = TextureWrapMode.Clamp;

            m_CanvasRenderer.enabled = true;
            m_CanvasRenderer.sharedMaterial.SetTexture("_MainTex", m_Texture);
            m_CanvasRenderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix);
            m_CanvasRenderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix", projectionMatrix);
            m_CanvasRenderer.sharedMaterial.SetFloat("_VignetteScale", 0.0f);

            // Position the canvas object slightly in front
            // of the real world web camera.
            Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2);

            // Rotate the canvas object so that it faces the user.
            Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));

            m_Canvas.transform.position = position;
            m_Canvas.transform.rotation = rotation;

            Debug.Log("Took picture!");
            Debug.Log("projectionMatrix:\n" + projectionMatrix.ToString());
            m_CapturingPhoto = false;
        }