Пример #1
0
    void DetectFaces()
    {
        detectResult = faceLandmarkDetector.Detect();

        spawnParent = new GameObject("Spawn Parent").transform;
        spawnParent.SetParent(targetImage.transform, false);

        for (int i = 0; i < detectResult.Count; i++)
        {
            Debug.Log("face : " + (rect = detectResult[i]));

            points = faceLandmarkDetector.DetectLandmark(rect);
            Debug.Log("face points count : " + points.Count);
            //for (int j = 0; j < points.Count; j++)
            {
                //Debug.Log($"Face Point {j} : {points[j]}");

                //if (j == 2 || j == 36)
                {
                    switch (library)
                    {
                    case LibraryName.Dlib_6:
                        leftPoint  = points[2];
                        rightPoint = points[5];
                        break;

                    case LibraryName.Dlib_17:
                        leftPoint  = points[2];
                        rightPoint = points[5];
                        break;

                    case LibraryName.Dlib_68:
                        leftPoint  = points[36];
                        rightPoint = points[45];
                        break;
                    }

                    var spex = new GameObject("Glass " + i).AddComponent <Image>();
                    spex.preserveAspect = true;
                    spex.transform.SetParent(spawnParent, false);
                    spex.sprite = glassSprite;
                    spexMap.Add(spex);

                    ReOrient(ref spex, ref leftPoint, ref rightPoint);
                }
            }
        }
    }
Пример #2
0
        // Update is called once per frame
        void Update()
        {
            if (Input.GetKeyDown(KeyCode.Escape))
            {
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
                SceneManager.LoadScene("FacemojiStart");
#else
                Application.LoadLevel("FacemojiStart");
#endif
            }

            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                foreach (var rect in detectResult)
                {
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                    if (points.Count > 0)
                    {
                        live2DModelUpdate(points);

                        //currentFacePoints = points;

                        break;
                    }
                }
            }
        }
Пример #3
0
    private void Update()
    {
        if (!hasInit || !streamManager.WebCam.didUpdateThisFrame)
        {
            return;
        }

        //colors = streamManager.WebCam.GetPixels32();

        //faceLandmarkDetector.SetImage(colors, textureWidth, textureHeight, 4, true);

        faceLandmarkDetector.SetImage(streamManager.WebCam);
        detectResult = faceLandmarkDetector.Detect();

        reArrange = true;

        //foreach (var rect in detectResult)
        //{
        //    faceLandmarkDetector.DetectLandmark(rect);

        //    faceLandmarkDetector.DrawDetectLandmarkResult(colors, textureWidth, textureHeight, 4, true, 0, 255, 0, 255);
        //}

        //faceLandmarkDetector.DrawDetectResult(colors, textureWidth, textureHeight, 4, true, 255, 0, 0, 255, 2);

        //texture.SetPixels32(colors);
        //texture.Apply(false);
    }
Пример #4
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                foreach (var rect in detectResult)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                    if (points.Count > 0)
                    {
                        //draw landmark points
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);
                    }

                    //draw face rect
                    OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2);
                }

                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }
        private void Run()
        {
            gameObject.transform.localScale = new Vector3(texture2D.width, texture2D.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = gameObject.transform.localScale.x;
            float height = gameObject.transform.localScale.y;

            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }

            FaceLandmarkDetector faceLandmarkDetector = new FaceLandmarkDetector(frontal_cat_face_svm_filepath, sp_cat_face_68_dat_filepath);

            faceLandmarkDetector.SetImage(texture2D);

            //detect face rects
            List <Rect> detectResult = faceLandmarkDetector.Detect();

            foreach (var rect in detectResult)
            {
                Debug.Log("face : " + rect);

                //detect landmark points
                List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                Debug.Log("face points count : " + points.Count);
                foreach (var point in points)
                {
                    Debug.Log("face point : x " + point.x + " y " + point.y);
                }

                //draw landmark points
                faceLandmarkDetector.DrawDetectLandmarkResult(texture2D, 0, 255, 0, 255);
            }

            //draw face rects
            faceLandmarkDetector.DrawDetectResult(texture2D, 255, 0, 0, 255, 3);

            faceLandmarkDetector.Dispose();

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture2D;

            if (fpsMonitor != null)
            {
                fpsMonitor.Add("dlib object detector", "frontal_cat_face.svm");
                fpsMonitor.Add("dlib shape predictor", "sp_cat_face_68.dat");
                fpsMonitor.Add("width", width.ToString());
                fpsMonitor.Add("height", height.ToString());
                fpsMonitor.Add("orientation", Screen.orientation.ToString());
            }
        }
Пример #6
0
        private void DetectInRegion(Mat img, Rect region, List <Rect> detectedObjectsInRegions, FaceLandmarkDetector landmarkDetector)
        {
            Rect r0 = new Rect(new Point(), img.size());
            Rect r1 = new Rect(region.x, region.y, region.width, region.height);

            Rect.inflate(r1, (int)((r1.width * coeffTrackingWindowSize) - r1.width) / 2,
                         (int)((r1.height * coeffTrackingWindowSize) - r1.height) / 2);
            r1 = Rect.intersect(r0, r1);

            if ((r1.width <= 0) || (r1.height <= 0))
            {
                Debug.Log("detectInRegion: Empty intersection");
                return;
            }

            using (Mat img1_roi = new Mat(img, r1))
                using (Mat img1 = new Mat(r1.size(), img.type()))
                {
                    img1_roi.copyTo(img1);

                    OpenCVForUnityUtils.SetImage(landmarkDetector, img1);

                    List <UnityEngine.Rect> detectResult = landmarkDetector.Detect();

                    int len = detectResult.Count;
                    for (int i = 0; i < len; i++)
                    {
                        UnityEngine.Rect tmp = detectResult[i];
                        Rect             r   = new Rect((int)(tmp.x + r1.x), (int)(tmp.y + r1.y), (int)tmp.width, (int)tmp.height);
                        detectedObjectsInRegions.Add(r);
                    }
                }
        }
        // Update is called once per frame
        void Update()
        {
            if (!initDone)
            {
                return;
            }

            if (screenOrientation != Screen.orientation)
            {
                screenOrientation = Screen.orientation;
                updateLayout();
            }

                        #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                        #else
            if (webCamTexture.didUpdateThisFrame)
            {
                                #endif


                webCamTexture.GetPixels32(colors);
                faceLandmarkDetector.SetImage <Color32> (colors, webCamTexture.width, webCamTexture.height, 4, flip);

                //detect face rects
                List <Rect> detectResult = faceLandmarkDetector.Detect();

                foreach (var rect in detectResult)
                {
//			Debug.Log ("face : " + rect);

                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);


//			Debug.Log ("face point : " + points.Count);
                    if (points.Count > 0)
                    {
//				Debug.Log ("face points : x " + point.x + " y " + point.y);

                        //draw landmark points
                        faceLandmarkDetector.DrawDetectLandmarkResult <Color32> (colors, webCamTexture.width, webCamTexture.height, 4, flip, 0, 255, 0, 255);
                    }

                    //draw face rect
                    faceLandmarkDetector.DrawDetectResult <Color32> (colors, webCamTexture.width, webCamTexture.height, 4, flip, 255, 0, 0, 255, 2);
                }

                texture2D.SetPixels32(colors);
                texture2D.Apply();
            }
        }

        void OnDisable()
        {
            webCamTexture.Stop();
            faceLandmarkDetector.Dispose();
        }
Пример #8
0
        private void Run()
        {
            //if true, The error log of the Native side Dlib will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);


            gameObject.transform.localScale = new Vector3(texture2D.width, texture2D.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = gameObject.transform.localScale.x;
            float height = gameObject.transform.localScale.y;

            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }

            FaceLandmarkDetector faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath);

            faceLandmarkDetector.SetImage(texture2D);

            //detect face rects
            List <Rect> detectResult = faceLandmarkDetector.Detect();

            foreach (var rect in detectResult)
            {
                Debug.Log("face : " + rect);

                //detect landmark points
                List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                Debug.Log("face points count : " + points.Count);
                foreach (var point in points)
                {
                    Debug.Log("face point : x " + point.x + " y " + point.y);
                }

                //draw landmark points
                faceLandmarkDetector.DrawDetectLandmarkResult(texture2D, 0, 255, 0, 255);
            }

            //draw face rect
            faceLandmarkDetector.DrawDetectResult(texture2D, 255, 0, 0, 255, 2);

            faceLandmarkDetector.Dispose();

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture2D;


            Utils.setDebugMode(false);
        }
        // Use this for initialization
        void Start()
        {
            gameObject.transform.localScale = new Vector3(texture2D.width, texture2D.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = gameObject.transform.localScale.x;
            float height = gameObject.transform.localScale.y;

            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            FaceLandmarkDetector faceLandmarkDetector = new FaceLandmarkDetector(Utils.getFilePath("frontal_cat_face.svm"), Utils.getFilePath("shape_predictor_68_cat_face_landmarks.dat"));

            faceLandmarkDetector.SetImage(texture2D);

            //detect face rects
            List <Rect> detectResult = faceLandmarkDetector.Detect();

            foreach (var rect in detectResult)
            {
                Debug.Log("face : " + rect);

                //detect landmark points
                List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                Debug.Log("face points count : " + points.Count);
                if (points.Count > 0)
                {
                    foreach (var point in points)
                    {
                        Debug.Log("face point : x " + point.x + " y " + point.y);
                    }

                    //draw landmark points
                    faceLandmarkDetector.DrawDetectLandmarkResult(texture2D, 0, 255, 0, 255);
                }

                //draw face rects
                faceLandmarkDetector.DrawDetectResult(texture2D, 255, 0, 0, 255, 3);
            }

            faceLandmarkDetector.Dispose();

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture2D;
        }
Пример #10
0
        // Update is called once per frame
        void Update()
        {
            if (!hasInitDone)
            {
                return;
            }

            if (screenOrientation != Screen.orientation)
            {
                StartCoroutine(initWebCamTextureContract());
            }


            if (webCamTexture.didUpdateThisFrame)
            {
                webCamTexture.GetPixels32(colors);

                //Adjust an array of color pixels according to screen orientation and WebCamDevice parameter.
                colors = RotateAndFlip(colors, webCamTexture.width, webCamTexture.height);


                faceLandmarkDetector.SetImage <Color32>(colors, texture2D.width, texture2D.height, 4, true);

                //detect face rects
                List <Rect> detectResult = faceLandmarkDetector.Detect();

                bool eventTrigger = true;
                foreach (var rect in detectResult)
                {
                    //Debug.Log ("face : " + rect);

                    //detect landmark points
                    var landmarkList = faceLandmarkDetector.DetectLandmark(rect);
                    if (eventTrigger)
                    {
                        if (OnFacelandmarkUpdated != null)
                        {
                            OnFacelandmarkUpdated(this, new FacelandmarkResultEventArgs(rect, landmarkList));
                        }
                        eventTrigger = false;
                    }
                    //draw landmark points
                    faceLandmarkDetector.DrawDetectLandmarkResult <Color32>(colors, texture2D.width, texture2D.height, 4, true, 0, 255, 0, 255);
                }

                //draw face rect
                faceLandmarkDetector.DrawDetectResult <Color32>(colors, texture2D.width, texture2D.height, 4, true, 255, 0, 0, 255, 2);



                texture2D.SetPixels32(colors);
                texture2D.Apply();
            }
        }
Пример #11
0
        void PointsThread()
        {
            pointsthread_running = true;

            // This pattern lets us interrupt the work at a safe point if neeeded.
            while (pointsthread_running)
            {
                if (texture != null)
                {
                    //detect face rects
                    List <Rect>    detectResult = faceLandmarkDetector.Detect();
                    List <Vector2> localpoints;
                    float          localHeight;
                    float          localWidth;
                    Rect           localRect;
                    if (detectResult.Count > 0)
                    {
                        Rect imgRect = detectResult[0];

                        localpoints = faceLandmarkDetector.DetectLandmark(imgRect);
                        localHeight = detectResult[0].height;
                        localWidth  = detectResult[0].width;
                        localRect   = detectResult[0];
                        lock (copyablepointslocker)
                        {
                            updatedpoints  = true;
                            copyablepoints = localpoints;
                            copyableHeight = localHeight;
                            copyableWidth  = localWidth;
                            copyableRect   = localRect;
                        }
                    }
                    else
                    {
                        lock (copyablepointslocker)
                        {
                            updatedpoints  = true;
                            copyablepoints = new List <Vector2>();
                            copyableHeight = 0;
                            copyableWidth  = 0;
                            copyableRect   = new Rect();
                        }
                    }
                }
                // Prevent thread spinning
                Thread.Sleep(period_between_cv);
            }

            pointsthread_running = false;
        }
        private string Benchmark(Texture2D targetImg, FaceLandmarkDetector detector, int times = 100)
        {
            System.Diagnostics.Stopwatch sw = new System.Diagnostics.Stopwatch();

            string result = "sp_name: " + dlibShapePredictorFileName + "\n";

            result += "times: " + times + " (size: " + targetImg.width + "*" + targetImg.height + ")" + "\n";

            detector.SetImage(targetImg);

            // FaceLandmarkDetector.Detect() benchmark.
            sw.Start();
            for (int i = 0; i < times; ++i)
            {
                detector.Detect();
            }
            sw.Stop();
            result += " Detect(): " + sw.ElapsedMilliseconds + "ms" + " Avg:" + sw.ElapsedMilliseconds / times + "ms" + "\n";


            // FaceLandmarkDetector.DetectLandmark() benchmark.
            List <Rect> detectResult = detector.Detect();

            sw.Reset();
            sw.Start();
            for (int i = 0; i < times; ++i)
            {
                foreach (var rect in detectResult)
                {
                    detector.DetectLandmark(rect);
                }
            }
            sw.Stop();
            result += " DetectLandmark(): " + sw.ElapsedMilliseconds + "ms" + " Avg:" + sw.ElapsedMilliseconds / times + "ms";

            return(result);
        }
Пример #13
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                // Resize image for face detection
                Imgproc.resize(rgbaMat, rgbaMat_downscale, new Size(), 1.0 / FACE_DOWNSAMPLE_RATIO, 1.0 / FACE_DOWNSAMPLE_RATIO, Imgproc.INTER_LINEAR);


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat_downscale);


                // Detect faces on resize image
                if (count % SKIP_FRAMES == 0)
                {
                    //detect face rects
                    detectResult = faceLandmarkDetector.Detect();
                }

                foreach (var rect in detectResult)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                    if (points.Count > 0)
                    {
                        List <Vector2> originalPoints = new List <Vector2> (points.Count);
                        foreach (var point in points)
                        {
                            originalPoints.Add(new Vector2(point.x * FACE_DOWNSAMPLE_RATIO, point.y * FACE_DOWNSAMPLE_RATIO));
                        }

                        //draw landmark points
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, originalPoints, new Scalar(0, 255, 0, 255), 2);
                    }

                    UnityEngine.Rect originalRect = new UnityEngine.Rect(rect.x * FACE_DOWNSAMPLE_RATIO, rect.y * FACE_DOWNSAMPLE_RATIO, rect.width * FACE_DOWNSAMPLE_RATIO, rect.height * FACE_DOWNSAMPLE_RATIO);
                    //draw face rect
                    OpenCVForUnityUtils.DrawFaceRect(rgbaMat, originalRect, new Scalar(255, 0, 0, 255), 2);
                }

                Imgproc.putText(rgbaMat, "Original: (" + rgbaMat.width() + "," + rgbaMat.height() + ") DownScale; (" + rgbaMat_downscale.width() + "," + rgbaMat_downscale.height() + ") SkipFrames: " + SKIP_FRAMES, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, colors);

                count++;
            }
        }
    public List <Rect> DetectFaces(Texture2D imgTexture)
    {
        if (string.IsNullOrEmpty(dlibShapePredictorFilePath))
        {
            Debug.LogError(
                "shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
        }

        faceLandmarkDetector.SetImage(imgTexture);

        //detect face rects
        List <Rect> faceRects = faceLandmarkDetector.Detect();

        return(faceRects);
    }
Пример #15
0
        private void DetectObject(Mat img, out List <Rect> detectedObjects, FaceLandmarkDetector landmarkDetector)
        {
            OpenCVForUnityUtils.SetImage(landmarkDetector, img);

            List <UnityEngine.Rect> detectResult = landmarkDetector.Detect();

            detectedObjects = new List <Rect>();

            int len = detectResult.Count;

            for (int i = 0; i < len; i++)
            {
                UnityEngine.Rect r = detectResult[i];
                detectedObjects.Add(new Rect((int)r.x, (int)r.y, (int)r.width, (int)r.height));
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (adjustPixelsDirection)
            {
                // Catch the orientation change of the screen.
                if (screenOrientation != Screen.orientation && (screenWidth != Screen.width || screenHeight != Screen.height))
                {
                    Initialize();
                }
                else
                {
                    screenWidth  = Screen.width;
                    screenHeight = Screen.height;
                }
            }


            if (hasInitDone && webCamTexture.isPlaying && webCamTexture.didUpdateThisFrame)
            {
                Color32[] colors = GetColors();

                if (colors != null)
                {
                    faceLandmarkDetector.SetImage <Color32> (colors, texture.width, texture.height, 4, true);

                    //detect face rects
                    List <Rect> detectResult = faceLandmarkDetector.Detect();

                    foreach (var rect in detectResult)
                    {
                        //Debug.Log ("face : " + rect);

                        //detect landmark points
                        faceLandmarkDetector.DetectLandmark(rect);

                        //draw landmark points
                        faceLandmarkDetector.DrawDetectLandmarkResult <Color32> (colors, texture.width, texture.height, 4, true, 0, 255, 0, 255);
                    }

                    //draw face rect
                    faceLandmarkDetector.DrawDetectResult <Color32> (colors, texture.width, texture.height, 4, true, 255, 0, 0, 255, 2);

                    texture.SetPixels32(colors);
                    texture.Apply(false);
                }
            }
        }
        private void ShowImage(Texture2D texture2D)
        {
            if (dstTexture2D != null)
            {
                Texture2D.Destroy(dstTexture2D);
            }
            dstTexture2D = new Texture2D(texture2D.width, texture2D.height, texture2D.format, false);
            dstTexture2D.SetPixels32(texture2D.GetPixels32());
            dstTexture2D.Apply();

            gameObject.transform.localScale = new Vector3(texture2D.width, texture2D.height, 1);

            float width  = gameObject.transform.localScale.x;
            float height = gameObject.transform.localScale.y;

            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }

            faceLandmarkDetector.SetImage(texture2D);

            //detect face rects
            List <Rect> detectResult = faceLandmarkDetector.Detect();

            foreach (var rect in detectResult)
            {
                //detect landmark points
                faceLandmarkDetector.DetectLandmark(rect);
                // draw landmark points
                faceLandmarkDetector.DrawDetectLandmarkResult(dstTexture2D, 0, 255, 0, 255);
            }

            // draw face rect
            faceLandmarkDetector.DrawDetectResult(dstTexture2D, 255, 0, 0, 255, 2);

            gameObject.GetComponent <Renderer>().material.mainTexture = dstTexture2D;
        }
Пример #18
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.isPlaying() && webCamTextureToMatHelper.didUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);

                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                foreach (var rect in detectResult)
                {
                    OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2);

                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                    if (points.Count > 0)
                    {
                        live2DModelUpdate(points);

                        currentFacePoints = points;

                        break;
                    }
                }



                if (isHideCameraImage)
                {
                    Imgproc.rectangle(rgbaMat, new Point(0, 0), new Point(rgbaMat.width(), rgbaMat.height()), new Scalar(0, 0, 0, 255), -1);
                }

                if (currentFacePoints != null)
                {
                    OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, currentFacePoints, new Scalar(0, 255, 0, 255), 2);
                }


                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }
Пример #19
0
    void Update()
    {
        // 画面の回転を取得
        // 傾きが変わった場合
        if (_screenOrientation != Screen.orientation &&
            (_screenWidth != Screen.width || _screenHeight != Screen.height))
        {
            Initialize();
        }
        else
        {
            _screenWidth  = Screen.width;
            _screenHeight = Screen.height;
        }

        // ランドマーク推定
        if (_hasInitDone && _webCamTexture.isPlaying && _webCamTexture.didUpdateThisFrame)
        {
            Color32[] colors = GetColors();
            if (colors != null)
            {
                faceLandmarkDetector.SetImage <Color32>(colors, _texture.width, _texture.height, 4, true);

                // 顔が含まれる領域を取得
                List <Rect> detectResult = faceLandmarkDetector.Detect();

                foreach (var rect in detectResult)
                {
                    // 顔認識(ランドマーク推定)
                    Landmarks = faceLandmarkDetector.DetectLandmark(rect);
                    // ランドマークを描画
                    faceLandmarkDetector.DrawDetectLandmarkResult <Color32>(colors, _texture.width, _texture.height, 4,
                                                                            true, 0, 255, 0, 255);
                }

                // 顔領域を矩形で描画
                faceLandmarkDetector.DrawDetectResult <Color32>(colors, _texture.width, _texture.height, 4, true, 255, 0,
                                                                0, 255, 2);

                _texture.SetPixels32(colors);
                _texture.Apply(false);
            }
        }
    }
        // Update is called once per frame
        void Update()
        {
            if (capture == null)
            {
                return;
            }

            //Loop play
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            //error PlayerLoop called recursively! on iOS.reccomend WebCamTexture.
            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                foreach (var rect in detectResult)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                    //draw landmark points
                    OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, points, new Scalar(0, 255, 0), 2);

                    //draw face rect
                    OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 0, 0), 2);
                }

                Imgproc.putText(rgbMat, "W:" + rgbMat.width() + " H:" + rgbMat.height() + " SO:" + Screen.orientation, new Point(5, rgbMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbMat, texture, colors);
            }
        }
        // Update is called once per frame
        void Update()
        {
            Debug.Log("called update func");
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat       rgbaMat = webCamTextureToMatHelper.GetMat();
                Color32[] rgbabuf = webCamTextureToMatHelper.GetBufferColors();

                if (rgbabuf != null && faceLandmarkDetector != null && texture != null)
                {
                    Debug.Log("on Update above SetImage");
                    faceLandmarkDetector.SetImage <Color32> (rgbabuf, texture.width, texture.height, 4, true);

                    //detect face rects
                    List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                    foreach (var rect in detectResult)
                    {
                        Debug.Log("face : " + rect);

                        //detect landmark points
                        faceLandmarkDetector.DetectLandmark(rect);

                        //draw landmark points
                        faceLandmarkDetector.DrawDetectLandmarkResult <Color32> (rgbabuf, texture.width, texture.height, 4, true, 255, 255, 255, 255);
                        //faceLandmarkDetector.DrawDetectLandmarkResult<Color32>(drawbuf, texture.width, texture.height, 4, true, 255, 255, 255, 255);

                        List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                        if (points.Count > 0)
                        {
                            live2DModelUpdate(points);
                        }
                    }


                    if (isHideCameraImage == false)
                    {
                        texture.SetPixels32(rgbabuf);
                        texture.Apply(false);
                    }
                }
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Mat downScaleRgbaMat = webCamTextureToMatHelper.GetDownScaleMat(rgbaMat);

                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, downScaleRgbaMat);

                // Detect faces on resize image
                if (!webCamTextureToMatHelper.IsSkipFrame())
                {
                    //detect face rects
                    detectResult = faceLandmarkDetector.Detect();
                }

                float DOWNSCALE_RATIO = webCamTextureToMatHelper.DOWNSCALE_RATIO;

                foreach (var rect in detectResult)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                    List <Vector2> originalPoints = new List <Vector2> (points.Count);
                    foreach (var point in points)
                    {
                        originalPoints.Add(new Vector2(point.x * DOWNSCALE_RATIO, point.y * DOWNSCALE_RATIO));
                    }

                    //draw landmark points
                    OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, originalPoints, new Scalar(0, 255, 0, 255), 2);

                    UnityEngine.Rect originalRect = new UnityEngine.Rect(rect.x * DOWNSCALE_RATIO, rect.y * DOWNSCALE_RATIO, rect.width * DOWNSCALE_RATIO, rect.height * DOWNSCALE_RATIO);
                    //draw face rect
                    OpenCVForUnityUtils.DrawFaceRect(rgbaMat, originalRect, new Scalar(255, 0, 0, 255), 2);
                }

                Imgproc.putText(rgbaMat, "Original: (" + rgbaMat.width() + "," + rgbaMat.height() + ") DownScale; (" + downScaleRgbaMat.width() + "," + downScaleRgbaMat.height() + ") SkipFrames: " + webCamTextureToMatHelper.SKIP_FRAMES, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }
        }
        private void DetectFaces(Mat rgbaMat, out List <Rect> detectResult, bool useDlibFaceDetecter)
        {
            detectResult = new List <Rect>();

            if (useDlibFaceDetecter)
            {
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                foreach (var unityRect in result)
                {
                    detectResult.Add(new Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                }
            }
            else
            {
                // convert image to greyscale.
                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                using (Mat equalizeHistMat = new Mat())
                    using (MatOfRect faces = new MatOfRect())
                    {
                        Imgproc.equalizeHist(grayMat, equalizeHistMat);

                        cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                        detectResult = faces.toList();

                        // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
                        foreach (Rect r in detectResult)
                        {
                            r.y += (int)(r.height * 0.1f);
                        }
                    }
            }
        }
        // Update is called once per frame
        void Update()
        {
            //Loop play
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);

                //detect face rects
                List <UnityEngine.Rect> detectResult = faceLandmarkDetector.Detect();

                if (detectResult.Count > 0)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(detectResult [0]);

                    if (points.Count > 0)
                    {
                        if (shouldDrawFacePoints)
                        {
                            OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, points, new Scalar(0, 255, 0), 2);
                        }

                        imagePoints.fromArray(
                            new Point((points [38].x + points [41].x) / 2, (points [38].y + points [41].y) / 2), //l eye
                            new Point((points [43].x + points [46].x) / 2, (points [43].y + points [46].y) / 2), //r eye
                            new Point(points [33].x, points [33].y),                                             //nose
                            new Point(points [48].x, points [48].y),                                             //l mouth
                            new Point(points [54].x, points [54].y)                                              //r mouth
                            ,
                            new Point(points [0].x, points [0].y),                                               //l ear
                            new Point(points [16].x, points [16].y)                                              //r ear
                            );


                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);


                        if (tvec.get(2, 0) [0] > 0)
                        {
                            if (Mathf.Abs((float)(points [43].y - points [46].y)) > Mathf.Abs((float)(points [42].x - points [45].x)) / 6.0)
                            {
                                if (shouldDrawEffects)
                                {
                                    rightEye.SetActive(true);
                                }
                            }

                            if (Mathf.Abs((float)(points [38].y - points [41].y)) > Mathf.Abs((float)(points [39].x - points [36].x)) / 6.0)
                            {
                                if (shouldDrawEffects)
                                {
                                    leftEye.SetActive(true);
                                }
                            }
                            if (shouldDrawHead)
                            {
                                head.SetActive(true);
                            }
                            if (shouldDrawAxes)
                            {
                                axes.SetActive(true);
                            }



                            float noseDistance  = Mathf.Abs((float)(points [27].y - points [33].y));
                            float mouseDistance = Mathf.Abs((float)(points [62].y - points [66].y));
                            if (mouseDistance > noseDistance / 5.0)
                            {
                                if (shouldDrawEffects)
                                {
                                    mouth.SetActive(true);
                                    foreach (ParticleSystem ps in mouthParticleSystem)
                                    {
                                        ps.enableEmission = true;
                                        ps.startSize      = 40 * (mouseDistance / noseDistance);
                                    }
                                }
                            }
                            else
                            {
                                if (shouldDrawEffects)
                                {
                                    foreach (ParticleSystem ps in mouthParticleSystem)
                                    {
                                        ps.enableEmission = false;
                                    }
                                }
                            }


                            Calib3d.Rodrigues(rvec, rotM);

                            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
                            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
                            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                            if (shouldMoveARCamera)
                            {
                                if (ARGameObject != null)
                                {
                                    ARM = ARGameObject.transform.localToWorldMatrix * invertZM * transformationM.inverse * invertYM;
                                    ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                                    ARGameObject.SetActive(true);
                                }
                            }
                            else
                            {
                                ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;

                                if (ARGameObject != null)
                                {
                                    ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                                    ARGameObject.SetActive(true);
                                }
                            }
                        }
                    }
                }
                else
                {
                    rightEye.SetActive(false);
                    leftEye.SetActive(false);
                    head.SetActive(false);
                    mouth.SetActive(false);
                    axes.SetActive(false);
                }

                Imgproc.putText(rgbMat, "W:" + rgbMat.width() + " H:" + rgbMat.height() + " SO:" + Screen.orientation, new Point(5, rgbMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbMat, texture, colors);
            }
        }
Пример #25
0
        // Update is called once per frame
        void Update()
        {
            // loop play.
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                // detect faces.
                List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> ();
                if (useDlibFaceDetecter)
                {
                    OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
                    List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                    foreach (var unityRect in result)
                    {
                        detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                    }
                }
                else
                {
                    // convert image to greyscale.
                    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);

                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            detectResult = faces.toList();

                            // adjust to Dilb's result.
                            foreach (OpenCVForUnity.Rect r in detectResult)
                            {
                                r.y += (int)(r.height * 0.1f);
                            }
                        }
                }

                // face tracking.
                rectangleTracker.UpdateTrackedObjects(detectResult);
                List <TrackedRect> trackedRects = new List <TrackedRect> ();
                rectangleTracker.GetObjects(trackedRects, true);

                // create noise filter.
                foreach (var openCVRect in trackedRects)
                {
                    if (openCVRect.state == TrackedState.NEW)
                    {
                        if (!lowPassFilterDict.ContainsKey(openCVRect.id))
                        {
                            lowPassFilterDict.Add(openCVRect.id, new LowPassPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()));
                        }
                        if (!opticalFlowFilterDict.ContainsKey(openCVRect.id))
                        {
                            opticalFlowFilterDict.Add(openCVRect.id, new OFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()));
                        }
                    }
                    else if (openCVRect.state == TrackedState.DELETED)
                    {
                        if (lowPassFilterDict.ContainsKey(openCVRect.id))
                        {
                            lowPassFilterDict [openCVRect.id].Dispose();
                            lowPassFilterDict.Remove(openCVRect.id);
                        }
                        if (opticalFlowFilterDict.ContainsKey(openCVRect.id))
                        {
                            opticalFlowFilterDict [openCVRect.id].Dispose();
                            opticalFlowFilterDict.Remove(openCVRect.id);
                        }
                    }
                }

                // create LUT texture.
                foreach (var openCVRect in trackedRects)
                {
                    if (openCVRect.state == TrackedState.NEW)
                    {
                        faceMaskColorCorrector.CreateLUTTex(openCVRect.id);
                    }
                    else if (openCVRect.state == TrackedState.DELETED)
                    {
                        faceMaskColorCorrector.DeleteLUTTex(openCVRect.id);
                    }
                }


                // detect face landmark points.
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
                List <List <Vector2> > landmarkPoints = new List <List <Vector2> > ();
                for (int i = 0; i < trackedRects.Count; i++)
                {
                    TrackedRect      tr   = trackedRects [i];
                    UnityEngine.Rect rect = new UnityEngine.Rect(tr.x, tr.y, tr.width, tr.height);

                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                    // apply noise filter.
                    if (enableNoiseFilter)
                    {
                        if (tr.state > TrackedState.NEW && tr.state < TrackedState.DELETED)
                        {
                            opticalFlowFilterDict [tr.id].Process(rgbMat, points, points);
                            lowPassFilterDict [tr.id].Process(rgbMat, points, points);
                        }
                    }

                    landmarkPoints.Add(points);
                }

                // face masking.
                if (faceMaskTexture != null && landmarkPoints.Count >= 1)   // Apply face masking between detected faces and a face mask image.

                {
                    float maskImageWidth  = faceMaskTexture.width;
                    float maskImageHeight = faceMaskTexture.height;

                    TrackedRect tr;

                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        tr = trackedRects [i];

                        if (tr.state == TrackedState.NEW)
                        {
                            meshOverlay.CreateObject(tr.id, faceMaskTexture);
                        }
                        if (tr.state < TrackedState.DELETED)
                        {
                            MaskFace(meshOverlay, tr, landmarkPoints [i], faceLandmarkPointsInMask, maskImageWidth, maskImageHeight);

                            if (enableColorCorrection)
                            {
                                CorrectFaceMaskColor(tr.id, faceMaskMat, rgbMat, faceLandmarkPointsInMask, landmarkPoints [i]);
                            }
                        }
                        else if (tr.state == TrackedState.DELETED)
                        {
                            meshOverlay.DeleteObject(tr.id);
                        }
                    }
                }
                else if (landmarkPoints.Count >= 1)     // Apply face masking between detected faces.

                {
                    float maskImageWidth  = texture.width;
                    float maskImageHeight = texture.height;

                    TrackedRect tr;

                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        tr = trackedRects [i];

                        if (tr.state == TrackedState.NEW)
                        {
                            meshOverlay.CreateObject(tr.id, texture);
                        }
                        if (tr.state < TrackedState.DELETED)
                        {
                            MaskFace(meshOverlay, tr, landmarkPoints [i], landmarkPoints [0], maskImageWidth, maskImageHeight);

                            if (enableColorCorrection)
                            {
                                CorrectFaceMaskColor(tr.id, rgbMat, rgbMat, landmarkPoints [0], landmarkPoints [i]);
                            }
                        }
                        else if (tr.state == TrackedState.DELETED)
                        {
                            meshOverlay.DeleteObject(tr.id);
                        }
                    }
                }

                // draw face rects.
                if (displayFaceRects)
                {
                    for (int i = 0; i < detectResult.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 0, 0, 255), 2);
                    }

                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(trackedRects [i].x, trackedRects [i].y, trackedRects [i].width, trackedRects [i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 255, 0, 255), 2);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceAngles (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    }
                }

                // draw face points.
                if (displayDebugFacePoints)
                {
                    for (int i = 0; i < landmarkPoints.Count; i++)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, landmarkPoints [i], new Scalar(0, 255, 0, 255), 2);
                    }
                }


                // display face mask image.
                if (faceMaskTexture != null && faceMaskMat != null)
                {
                    if (displayFaceRects)
                    {
                        OpenCVForUnityUtils.DrawFaceRect(faceMaskMat, faceRectInMask, new Scalar(255, 0, 0, 255), 2);
                    }
                    if (displayDebugFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(faceMaskMat, faceLandmarkPointsInMask, new Scalar(0, 255, 0, 255), 2);
                    }

                    float scale = (rgbMat.width() / 4f) / faceMaskMat.width();
                    float tx    = rgbMat.width() - faceMaskMat.width() * scale;
                    float ty    = 0.0f;
                    Mat   trans = new Mat(2, 3, CvType.CV_32F);//1.0, 0.0, tx, 0.0, 1.0, ty);
                    trans.put(0, 0, scale);
                    trans.put(0, 1, 0.0f);
                    trans.put(0, 2, tx);
                    trans.put(1, 0, 0.0f);
                    trans.put(1, 1, scale);
                    trans.put(1, 2, ty);

                    Imgproc.warpAffine(faceMaskMat, rgbMat, trans, rgbMat.size(), Imgproc.INTER_LINEAR, Core.BORDER_TRANSPARENT, new Scalar(0));

                    if (displayFaceRects || displayDebugFacePointsToggle)
                    {
                        OpenCVForUnity.Utils.texture2DToMat(faceMaskTexture, faceMaskMat);
                    }
                }

//                Imgproc.putText (rgbMat, "W:" + rgbMat.width () + " H:" + rgbMat.height () + " SO:" + Screen.orientation, new Point (5, rgbMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.fastMatToTexture2D(rgbMat, texture);
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                // detect faces.
                List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> ();
                if (useDlibFaceDetecter)
                {
                    OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                    List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                    foreach (var unityRect in result)
                    {
                        detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                    }
                }
                else
                {
                    // convert image to greyscale.
                    Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            detectResult = faces.toList();

                            // adjust to Dilb's result.
                            foreach (OpenCVForUnity.Rect r in detectResult)
                            {
                                r.y += (int)(r.height * 0.1f);
                            }
                        }
                }

                // face traking.
                if (enableTracking)
                {
                    rectangleTracker.UpdateTrackedObjects(detectResult);
                    detectResult = new List <OpenCVForUnity.Rect> ();
                    rectangleTracker.GetObjects(detectResult, true);
                }

                // detect face landmark points.
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                List <List <Vector2> > landmarkPoints = new List <List <Vector2> > ();
                foreach (var openCVRect in detectResult)
                {
                    UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);

                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);
                    landmarkPoints.Add(points);
                }

                // filter non frontal faces.
                if (filterNonFrontalFaces)
                {
                    for (int i = 0; i < landmarkPoints.Count; i++)
                    {
                        if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit)
                        {
                            detectResult.RemoveAt(i);
                            landmarkPoints.RemoveAt(i);
                            i--;
                        }
                    }
                }

                // face swapping.
                if (landmarkPoints.Count >= 2)
                {
                    int ann = 0, bob = 1;
                    for (int i = 0; i < landmarkPoints.Count - 1; i += 2)
                    {
                        ann = i;
                        bob = i + 1;

                        faceSwapper.SwapFaces(rgbaMat, landmarkPoints [ann], landmarkPoints [bob], 1);
                    }
                }

                // draw face rects.
                if (displayFaceRects)
                {
                    for (int i = 0; i < detectResult.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceParam.getAngleOfFrontalFace (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    }
                }

                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Mat   downScaleRgbaMat = null;
                float DOWNSCALE_RATIO  = 1.0f;
                if (enableDownScale)
                {
                    downScaleRgbaMat = imageOptimizationHelper.GetDownScaleMat(rgbaMat);
                    DOWNSCALE_RATIO  = imageOptimizationHelper.downscaleRatio;
                }
                else
                {
                    downScaleRgbaMat = rgbaMat;
                    DOWNSCALE_RATIO  = 1.0f;
                }


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, downScaleRgbaMat);

                // Detect faces on resize image
                if (!enableSkipFrame || !imageOptimizationHelper.IsCurrentFrameSkipped())
                {
                    //detect face rects
                    if (useOpenCVFaceDetector)
                    {
                        // convert image to greyscale.
                        Imgproc.cvtColor(downScaleRgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                        using (Mat equalizeHistMat = new Mat())
                            using (MatOfRect faces = new MatOfRect()) {
                                Imgproc.equalizeHist(grayMat, equalizeHistMat);

                                cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                                List <OpenCVForUnity.Rect> opencvDetectResult = faces.toList();

                                // adjust to Dilb's result.
                                detectionResult.Clear();
                                foreach (var opencvRect in opencvDetectResult)
                                {
                                    detectionResult.Add(new UnityEngine.Rect((float)opencvRect.x, (float)opencvRect.y + (float)(opencvRect.height * 0.1f), (float)opencvRect.width, (float)opencvRect.height));
                                }
                            }
                    }
                    else
                    {
                        detectionResult = faceLandmarkDetector.Detect();
                    }
                }


                foreach (var rect in detectionResult)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                    List <Vector2> originalPoints = new List <Vector2> (points.Count);
                    foreach (var point in points)
                    {
                        originalPoints.Add(new Vector2(point.x * DOWNSCALE_RATIO, point.y * DOWNSCALE_RATIO));
                    }

                    //draw landmark points
                    OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, originalPoints, new Scalar(0, 255, 0, 255), 2);

                    UnityEngine.Rect originalRect = new UnityEngine.Rect(rect.x * DOWNSCALE_RATIO, rect.y * DOWNSCALE_RATIO, rect.width * DOWNSCALE_RATIO, rect.height * DOWNSCALE_RATIO);
                    //draw face rect
                    OpenCVForUnityUtils.DrawFaceRect(rgbaMat, originalRect, new Scalar(255, 0, 0, 255), 2);
                }

                //Imgproc.putText (rgbaMat, "Original:(" + rgbaMat.width () + "," + rgbaMat.height () + ") DownScale:(" + downScaleRgbaMat.width () + "," + downScaleRgbaMat.height () + ") FrameSkipping: " + imageOptimizationHelper.frameSkippingRatio, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.fastMatToTexture2D(rgbaMat, texture);
            }
        }
Пример #28
0
        private void Run()
        {
            if (string.IsNullOrEmpty(object_detector_filepath))
            {
                Debug.LogError("object detecter file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            }
            if (string.IsNullOrEmpty(shape_predictor_filepath))
            {
                Debug.LogError("shape predictor file does not exist. Please copy from “DlibFaceLandmarkDetector/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            }

            Texture2D dstTexture2D = new Texture2D(texture2D.width, texture2D.height, texture2D.format, false);

            Graphics.CopyTexture(texture2D, dstTexture2D);

            gameObject.transform.localScale = new Vector3(texture2D.width, texture2D.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = gameObject.transform.localScale.x;
            float height = gameObject.transform.localScale.y;

            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }

            FaceLandmarkDetector faceLandmarkDetector = new FaceLandmarkDetector(object_detector_filepath, shape_predictor_filepath);

            faceLandmarkDetector.SetImage(texture2D);

            //detect face rects
            List <Rect> detectResult = faceLandmarkDetector.Detect();

            foreach (var rect in detectResult)
            {
                Debug.Log("face : " + rect);

                //detect landmark points
                List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                Debug.Log("face points count : " + points.Count);
                foreach (var point in points)
                {
                    Debug.Log("face point : x " + point.x + " y " + point.y);
                }

                //draw landmark points
                faceLandmarkDetector.DrawDetectLandmarkResult(dstTexture2D, 0, 255, 0, 255);
            }

            //draw face rects
            faceLandmarkDetector.DrawDetectResult(dstTexture2D, 255, 0, 0, 255, 3);

            faceLandmarkDetector.Dispose();

            gameObject.GetComponent <Renderer> ().material.mainTexture = dstTexture2D;

            if (fpsMonitor != null)
            {
                fpsMonitor.Add("dlib object detector", "frontal_cat_face.svm");
                fpsMonitor.Add("dlib shape predictor", "sp_cat_face_68.dat");
                fpsMonitor.Add("width", width.ToString());
                fpsMonitor.Add("height", height.ToString());
                fpsMonitor.Add("orientation", Screen.orientation.ToString());
            }
        }
        // Update is called once per frame
        void Update()
        {
            // loop play.
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                // detect faces.
                List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> ();
                if (useDlibFaceDetecter)
                {
                    OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
                    List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                    foreach (var unityRect in result)
                    {
                        detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                    }
                }
                else
                {
                    // convert image to greyscale.
                    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);

                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            detectResult = faces.toList();

                            // adjust to Dilb's result.
                            foreach (OpenCVForUnity.Rect r in detectResult)
                            {
                                r.y += (int)(r.height * 0.1f);
                            }
                        }
                }


                // face traking.
                rectangleTracker.UpdateTrackedObjects(detectResult);
                List <TrackedRect> trackedRects = new List <TrackedRect> ();
                rectangleTracker.GetObjects(trackedRects, true);

                // detect face landmark points.
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
                List <List <Vector2> > landmarkPoints = new List <List <Vector2> > ();
                for (int i = 0; i < trackedRects.Count; i++)
                {
                    TrackedRect      tr   = trackedRects [i];
                    UnityEngine.Rect rect = new UnityEngine.Rect(tr.x, tr.y, tr.width, tr.height);

                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);
                    landmarkPoints.Add(points);
                }

                // face masking.
                if (faceMaskTexture != null && landmarkPoints.Count >= 1)
                {
                    OpenCVForUnity.Utils.texture2DToMat(faceMaskTexture, faceMaskMat);

                    float imageWidth      = meshOverlay.width;
                    float imageHeight     = meshOverlay.height;
                    float maskImageWidth  = faceMaskTexture.width;
                    float maskImageHeight = faceMaskTexture.height;

                    TrackedRect tr;
                    TrackedMesh tm;
                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        tr = trackedRects [i];

                        if (tr.state == TrackedState.NEW)
                        {
                            meshOverlay.CreateObject(tr.id, faceMaskTexture);
                        }
                        if (tr.state < TrackedState.DELETED)
                        {
                            tm = meshOverlay.GetObjectById(tr.id);

                            Vector3[] vertices = tm.meshFilter.mesh.vertices;
                            if (vertices.Length == landmarkPoints [i].Count)
                            {
                                for (int j = 0; j < vertices.Length; j++)
                                {
                                    vertices [j].x = landmarkPoints [i] [j].x / imageWidth - 0.5f;
                                    vertices [j].y = 0.5f - landmarkPoints [i] [j].y / imageHeight;
                                }
                            }
                            Vector2[] uv = tm.meshFilter.mesh.uv;
                            if (uv.Length == faceLandmarkPointsInMask.Count)
                            {
                                for (int jj = 0; jj < uv.Length; jj++)
                                {
                                    uv [jj].x = faceLandmarkPointsInMask [jj].x / maskImageWidth;
                                    uv [jj].y = (maskImageHeight - faceLandmarkPointsInMask [jj].y) / maskImageHeight;
                                }
                            }
                            meshOverlay.UpdateObject(tr.id, vertices, null, uv);

                            if (tr.numFramesNotDetected > 3)
                            {
                                tm.material.SetFloat(shader_FadeID, 1f);
                            }
                            else if (tr.numFramesNotDetected > 0 && tr.numFramesNotDetected <= 3)
                            {
                                tm.material.SetFloat(shader_FadeID, 0.3f + (0.7f / 4f) * tr.numFramesNotDetected);
                            }
                            else
                            {
                                tm.material.SetFloat(shader_FadeID, 0.3f);
                            }

                            // filter non frontal faces.
                            if (filterNonFrontalFaces && frontalFaceChecker.GetFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit)
                            {
                                tm.material.SetFloat(shader_FadeID, 1f);
                            }
                        }
                        else if (tr.state == TrackedState.DELETED)
                        {
                            meshOverlay.DeleteObject(tr.id);
                        }
                    }
                }
                else if (landmarkPoints.Count >= 1)
                {
                    float imageWidth      = meshOverlay.width;
                    float imageHeight     = meshOverlay.height;
                    float maskImageWidth  = texture.width;
                    float maskImageHeight = texture.height;

                    TrackedRect tr;
                    TrackedMesh tm;
                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        tr = trackedRects [i];

                        if (tr.state == TrackedState.NEW)
                        {
                            meshOverlay.CreateObject(tr.id, texture);
                        }
                        if (tr.state < TrackedState.DELETED)
                        {
                            tm = meshOverlay.GetObjectById(tr.id);

                            Vector3[] vertices = tm.meshFilter.mesh.vertices;
                            if (vertices.Length == landmarkPoints [i].Count)
                            {
                                for (int j = 0; j < vertices.Length; j++)
                                {
                                    vertices [j].x = landmarkPoints[i][j].x / imageWidth - 0.5f;
                                    vertices [j].y = 0.5f - landmarkPoints[i][j].y / imageHeight;
                                }
                            }
                            Vector2[] uv = tm.meshFilter.mesh.uv;
                            if (uv.Length == landmarkPoints [0].Count)
                            {
                                for (int jj = 0; jj < uv.Length; jj++)
                                {
                                    uv [jj].x = landmarkPoints[0][jj].x / maskImageWidth;
                                    uv [jj].y = (maskImageHeight - landmarkPoints[0][jj].y) / maskImageHeight;
                                }
                            }
                            meshOverlay.UpdateObject(tr.id, vertices, null, uv);

                            if (tr.numFramesNotDetected > 3)
                            {
                                tm.material.SetFloat(shader_FadeID, 1f);
                            }
                            else if (tr.numFramesNotDetected > 0 && tr.numFramesNotDetected <= 3)
                            {
                                tm.material.SetFloat(shader_FadeID, 0.3f + (0.7f / 4f) * tr.numFramesNotDetected);
                            }
                            else
                            {
                                tm.material.SetFloat(shader_FadeID, 0.3f);
                            }

                            // filter non frontal faces.
                            if (filterNonFrontalFaces && frontalFaceChecker.GetFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit)
                            {
                                tm.material.SetFloat(shader_FadeID, 1f);
                            }
                        }
                        else if (tr.state == TrackedState.DELETED)
                        {
                            meshOverlay.DeleteObject(tr.id);
                        }
                    }
                }

                // draw face rects.
                if (displayFaceRects)
                {
                    for (int i = 0; i < detectResult.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 0, 0, 255), 2);
                    }

                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(trackedRects [i].x, trackedRects [i].y, trackedRects [i].width, trackedRects [i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 255, 0, 255), 2);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceAngles (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    }
                }

                // draw face points.
                if (displayDebugFacePoints)
                {
                    for (int i = 0; i < landmarkPoints.Count; i++)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, landmarkPoints [i], new Scalar(0, 255, 0, 255), 2);
                    }
                }


                // display face mask image.
                if (faceMaskTexture != null && faceMaskMat != null)
                {
                    if (displayFaceRects)
                    {
                        OpenCVForUnityUtils.DrawFaceRect(faceMaskMat, faceRectInMask, new Scalar(255, 0, 0, 255), 2);
                    }
                    if (displayDebugFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(faceMaskMat, faceLandmarkPointsInMask, new Scalar(0, 255, 0, 255), 2);
                    }

                    float scale = (rgbMat.width() / 4f) / faceMaskMat.width();
                    float tx    = rgbMat.width() - faceMaskMat.width() * scale;
                    float ty    = 0.0f;
                    Mat   trans = new Mat(2, 3, CvType.CV_32F);//1.0, 0.0, tx, 0.0, 1.0, ty);
                    trans.put(0, 0, scale);
                    trans.put(0, 1, 0.0f);
                    trans.put(0, 2, tx);
                    trans.put(1, 0, 0.0f);
                    trans.put(1, 1, scale);
                    trans.put(1, 2, ty);

                    Imgproc.warpAffine(faceMaskMat, rgbMat, trans, rgbMat.size(), Imgproc.INTER_LINEAR, Core.BORDER_TRANSPARENT, new Scalar(0));
                }

                Imgproc.putText(rgbMat, "W:" + rgbMat.width() + " H:" + rgbMat.height() + " SO:" + Screen.orientation, new Point(5, rgbMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbMat, texture);
            }
        }
Пример #30
0
        private void Run()
        {
            meshOverlay = this.GetComponent <TrackedMeshOverlay> ();

            displayFaceRectsToggle.isOn       = displayFaceRects;
            useDlibFaceDetecterToggle.isOn    = useDlibFaceDetecter;
            enableColorCorrectionToggle.isOn  = enableColorCorrection;
            filterNonFrontalFacesToggle.isOn  = filterNonFrontalFaces;
            displayDebugFacePointsToggle.isOn = displayDebugFacePoints;

            if (imgTexture == null)
            {
                imgTexture = Resources.Load("family") as Texture2D;
            }

            gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            meshOverlay.UpdateOverlayTransform(gameObject.transform);
            meshOverlay.Reset();


            float width  = 0;
            float height = 0;

            width  = gameObject.transform.localScale.x;
            height = gameObject.transform.localScale.y;

            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }

            Mat rgbaMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);

            OpenCVForUnity.Utils.texture2DToMat(imgTexture, rgbaMat);
            Debug.Log("rgbaMat ToString " + rgbaMat.ToString());

            if (faceLandmarkDetector == null)
            {
                faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath);
            }

            faceMaskColorCorrector = faceMaskColorCorrector ?? new FaceMaskColorCorrector();
            FrontalFaceChecker frontalFaceChecker = new FrontalFaceChecker(width, height);

            // detect faces.
            List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> ();

            if (useDlibFaceDetecter)
            {
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                foreach (var unityRect in result)
                {
                    detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                }
            }
            else
            {
                if (cascade == null)
                {
                    cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath);
                }
//                if (cascade.empty ()) {
//                    Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
//                }

                // convert image to greyscale.
                Mat gray = new Mat();
                Imgproc.cvtColor(rgbaMat, gray, Imgproc.COLOR_RGBA2GRAY);

                MatOfRect faces = new MatOfRect();
                Imgproc.equalizeHist(gray, gray);
                cascade.detectMultiScale(gray, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size());
                //Debug.Log ("faces " + faces.dump ());

                detectResult = faces.toList();

                // adjust to Dilb's result.
                foreach (OpenCVForUnity.Rect r in detectResult)
                {
                    r.y += (int)(r.height * 0.1f);
                }

                gray.Dispose();
            }

            // detect face landmark points.
            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
            List <List <Vector2> > landmarkPoints = new List <List <Vector2> > ();

            foreach (var openCVRect in detectResult)
            {
                UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);

                Debug.Log("face : " + rect);
                //OpenCVForUnityUtils.DrawFaceRect(imgMat, rect, new Scalar(255, 0, 0, 255), 2);

                List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);
                //OpenCVForUnityUtils.DrawFaceLandmark(imgMat, points, new Scalar(0, 255, 0, 255), 2);
                landmarkPoints.Add(points);
            }

            // mask faces.
            int[] face_nums = new int[landmarkPoints.Count];
            for (int i = 0; i < face_nums.Length; i++)
            {
                face_nums [i] = i;
            }
            face_nums = face_nums.OrderBy(i => System.Guid.NewGuid()).ToArray();

            float imageWidth      = meshOverlay.width;
            float imageHeight     = meshOverlay.height;
            float maskImageWidth  = imgTexture.width;
            float maskImageHeight = imgTexture.height;

            TrackedMesh tm;

            for (int i = 0; i < face_nums.Length; i++)
            {
                meshOverlay.CreateObject(i, imgTexture);
                tm = meshOverlay.GetObjectById(i);

                Vector3[] vertices = tm.meshFilter.mesh.vertices;
                if (vertices.Length == landmarkPoints [face_nums [i]].Count)
                {
                    for (int j = 0; j < vertices.Length; j++)
                    {
                        vertices [j].x = landmarkPoints [face_nums [i]] [j].x / imageWidth - 0.5f;
                        vertices [j].y = 0.5f - landmarkPoints [face_nums [i]] [j].y / imageHeight;
                    }
                }
                Vector2[] uv = tm.meshFilter.mesh.uv;
                if (uv.Length == landmarkPoints [face_nums [0]].Count)
                {
                    for (int jj = 0; jj < uv.Length; jj++)
                    {
                        uv [jj].x = landmarkPoints [face_nums [0]] [jj].x / maskImageWidth;
                        uv [jj].y = (maskImageHeight - landmarkPoints [face_nums [0]] [jj].y) / maskImageHeight;
                    }
                }
                meshOverlay.UpdateObject(i, vertices, null, uv);

                if (enableColorCorrection)
                {
                    faceMaskColorCorrector.CreateLUTTex(i);
                    Texture2D LUTTex = faceMaskColorCorrector.UpdateLUTTex(i, rgbaMat, rgbaMat, landmarkPoints [face_nums [0]], landmarkPoints [face_nums [i]]);
                    tm.sharedMaterial.SetTexture("_LUTTex", LUTTex);
                    tm.sharedMaterial.SetFloat("_ColorCorrection", 1f);
                }
                else
                {
                    tm.sharedMaterial.SetFloat("_ColorCorrection", 0f);
                }

                // filter non frontal faces.
                if (filterNonFrontalFaces && frontalFaceChecker.GetFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit)
                {
                    tm.sharedMaterial.SetFloat("_Fade", 1f);
                }
                else
                {
                    tm.sharedMaterial.SetFloat("_Fade", 0.3f);
                }
            }

            // draw face rects.
            if (displayFaceRects)
            {
                int ann = face_nums[0];
                UnityEngine.Rect rect_ann = new UnityEngine.Rect(detectResult [ann].x, detectResult [ann].y, detectResult [ann].width, detectResult [ann].height);
                OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_ann, new Scalar(255, 255, 0, 255), 2);

                int bob = 0;
                for (int i = 1; i < face_nums.Length; i++)
                {
                    bob = face_nums [i];
                    UnityEngine.Rect rect_bob = new UnityEngine.Rect(detectResult [bob].x, detectResult [bob].y, detectResult [bob].width, detectResult [bob].height);
                    OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_bob, new Scalar(255, 0, 0, 255), 2);
                }
            }

            // draw face points.
            if (displayDebugFacePoints)
            {
                for (int i = 0; i < landmarkPoints.Count; i++)
                {
                    OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, landmarkPoints [i], new Scalar(0, 255, 0, 255), 2);
                }
            }


            Texture2D texture = new Texture2D(rgbaMat.cols(), rgbaMat.rows(), TextureFormat.RGBA32, false);

            OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture);
            gameObject.transform.GetComponent <Renderer> ().material.mainTexture = texture;

            frontalFaceChecker.Dispose();
            rgbaMat.Dispose();
        }