Ejemplo n.º 1
0
        public List <Point[]> convertMatOfRectToPoints(MatOfRect rects)
        {
            List <OpenCVForUnity.CoreModule.Rect> R = rects.toList();

            List <Point[]> points = new List <Point[]> (R.Count);

            int n = reference.rows() / 2;

            float[] reference_float = new float[reference.total()];
            Utils.copyFromMat <float> (reference, reference_float);

            foreach (var r in R)
            {
                Vector3 scale = detector_offset * r.width;
                Point[] p     = new Point[n];
                for (int i = 0; i < n; i++)
                {
                    p [i]   = new Point();
                    p [i].x = scale.z * reference_float [2 * i] + r.x + 0.5 * r.width + scale.x;
                    p [i].y = scale.z * reference_float [(2 * i) + 1] + r.y + 0.5 * r.height + scale.y;
                }

                points.Add(p);
            }

            return(points);
        }
Ejemplo n.º 2
0
        private UnityEngine.Rect DetectFace(Mat mat)
        {
            if (useDlibFaceDetecter)
            {
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, mat);
                List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();
                if (result.Count >= 1)
                {
                    return(result [0]);
                }
            }
            else
            {
                using (Mat grayMat = new Mat())
                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect()) {
                            // convert image to greyscale.
                            Imgproc.cvtColor(mat, grayMat, Imgproc.COLOR_RGB2GRAY);
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            List <OpenCVForUnity.Rect> faceList = faces.toList();
                            if (faceList.Count >= 1)
                            {
                                UnityEngine.Rect r = new UnityEngine.Rect(faceList [0].x, faceList [0].y, faceList [0].width, faceList [0].height);
                                // adjust to Dilb's result.
                                r.y += (int)(r.height * 0.1f);
                                return(r);
                            }
                        }
            }
            return(new UnityEngine.Rect());
        }
Ejemplo n.º 3
0
    void Update()
    {
        if (rawImage)
        {
            if (startCVCam && cvCameraMat && cvCameraMat.isPlaying() && cvCameraMat.didUpdateThisFrame() && cvCameraMat.didUpdateThisFrame() && cvCameraMat.didUpdateThisFrame() && texture != null)
            {
                Mat cvCamMat = cvCameraMat.GetMat();
                if (cvCamMat != null && !cvCamMat.empty() && !faceCascade.empty())
                {
                    Mat grayMat = new Mat();
                    Imgproc.cvtColor(cvCamMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                    Mat equalizeHistMat = new Mat();
                    Imgproc.equalizeHist(grayMat, equalizeHistMat);

                    MatOfRect faces = new MatOfRect();
                    faceCascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.13, equalizeHistMat.cols() * 0.13), new Size());

                    if (faces.rows() > 0)
                    {
                        List <OpenCVForUnity.Rect> rectsList = faces.toList();
                        for (int i = 0; i < rectsList.ToArray().Length; i++)
                        {
                            OpenCVForUnity.Rect faceRect = rectsList[i];
                            x = faceRect.x;
                            y = faceRect.y;

                            if (i > 0)
                            {
                                //左上が(0、0)、右下が(100、100)
                                OpenCVForUnity.Rect beforeFaceRect = rectsList [i - 1];
                                formerX = beforeFaceRect.x;
                                formerY = beforeFaceRect.y;
                                dx      = x - formerX;
                                dy      = y - formerY;
                                _gameManager.setDxDy(dx, dy);
                                Debug.Log(x + ":" + y);
                            }
                        }
                    }
                }

                if (cvCamMat != null && !cvCamMat.empty())
                {
                    try {
                        cvCameraMat.matToTexture2D(cvCamMat, texture);
                    } catch (System.ArgumentException e) {
                        Debug.Log(e.Message);
                    } catch (System.Exception e) {
                        Debug.Log("OtherError");
                    }
                    cvCamMat = null;
                }
            }
        }
        else
        {
            Debug.LogError("NotFound:rawImage");
        }
    }
Ejemplo n.º 4
0
        private void DetectObject(Mat img, out List <Rect> detectedObjects, CascadeClassifier cascade, bool correctToDlibResult = false)
        {
            int d = Mathf.Min(img.width(), img.height());

            d = (int)Mathf.Round(d * minDetectionSizeRatio);

            MatOfRect objects = new MatOfRect();

            if (cascade != null)
            {
                cascade.detectMultiScale(img, objects, 1.1, 2, Objdetect.CASCADE_SCALE_IMAGE, new Size(d, d), new Size());
            }

            detectedObjects = objects.toList();

            if (correctToDlibResult)
            {
                int len = detectedObjects.Count;
                for (int i = 0; i < len; i++)
                {
                    Rect r = detectedObjects[i];
                    // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
                    r.x     += (int)(r.width * 0.05f);
                    r.y     += (int)(r.height * 0.1f);
                    r.width  = (int)(r.width * 0.9f);
                    r.height = (int)(r.height * 0.9f);
                }
            }
        }
        private void DetectObject(Mat img, out List <Rect> detectedObjects, CascadeClassifier cascade)
        {
            int d = Mathf.Min(img.width(), img.height());

            d = (int)Mathf.Round(d * minDetectionSizeRatio);

            MatOfRect objects = new MatOfRect();

            if (cascade != null)
            {
                cascade.detectMultiScale(img, objects, 1.1, 2, Objdetect.CASCADE_SCALE_IMAGE, new Size(d, d), new Size());
            }

            detectedObjects = objects.toList();
        }
Ejemplo n.º 6
0
	public List<Point[]> convertMatOfRectToPoints (MatOfRect rects)
	{
		List<OpenCVForUnity.Rect> R = rects.toList ();
		
		List<Point[]> points = new List<Point[]> (R.Count);
		
		foreach (var r in R) {
			
			Vector3 scale = detector_offset * r.width;
			int n = reference.rows () / 2;
			Point[] p = new Point[n];
			for (int i = 0; i < n; i++) {
				p [i] = new Point ();
				p [i].x = scale.z * reference.get (2 * i, 0) [0] + r.x + 0.5 * r.width + scale.x;
				p [i].y = scale.z * reference.get (2 * i + 1, 0) [0] + r.y + 0.5 * r.height + scale.y;
			}
			
			points.Add (p);
		}

		return points;
	}
Ejemplo n.º 7
0
    public List <Point[]> convertMatOfRectToPoints(MatOfRect rects)
    {
        List <OpenCVForUnity.Rect> R = rects.toList();

        List <Point[]> points = new List <Point[]> (R.Count);

        foreach (var r in R)
        {
            Vector3 scale = detector_offset * r.width;
            int     n     = reference.rows() / 2;
            Point[] p     = new Point[n];
            for (int i = 0; i < n; i++)
            {
                p [i]   = new Point();
                p [i].x = scale.z * reference.get(2 * i, 0) [0] + r.x + 0.5 * r.width + scale.x;
                p [i].y = scale.z * reference.get(2 * i + 1, 0) [0] + r.y + 0.5 * r.height + scale.y;
            }

            points.Add(p);
        }

        return(points);
    }
        private void DetectFaces(Mat rgbaMat, out List <Rect> detectResult, bool useDlibFaceDetecter)
        {
            detectResult = new List <Rect>();

            if (useDlibFaceDetecter)
            {
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                foreach (var unityRect in result)
                {
                    detectResult.Add(new Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                }
            }
            else
            {
                // convert image to greyscale.
                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                using (Mat equalizeHistMat = new Mat())
                    using (MatOfRect faces = new MatOfRect())
                    {
                        Imgproc.equalizeHist(grayMat, equalizeHistMat);

                        cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                        detectResult = faces.toList();

                        // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
                        foreach (Rect r in detectResult)
                        {
                            r.y += (int)(r.height * 0.1f);
                        }
                    }
            }
        }
Ejemplo n.º 9
0
        // Update is called once per frame
        void Update()
        {
            if (sourceToMatHelper.IsPlaying() && sourceToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbMat = sourceToMatHelper.GetMat();

                // detect faces.
                List <Rect> detectResult = new List <Rect>();
                if (useDlibFaceDetecter)
                {
                    OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
                    List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                    foreach (var unityRect in result)
                    {
                        detectResult.Add(new Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                    }
                }
                else
                {
                    // convert image to greyscale.
                    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);

                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect())
                        {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            detectResult = faces.toList();

                            // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
                            foreach (Rect r in detectResult)
                            {
                                r.y += (int)(r.height * 0.1f);
                            }
                        }
                }

                // face tracking.
                List <TrackedRect> trackedRects = new List <TrackedRect>();
                rectangleTracker.UpdateTrackedObjects(detectResult);
                rectangleTracker.GetObjects(trackedRects, true);

                // create noise filter.
                foreach (var openCVRect in trackedRects)
                {
                    if (openCVRect.state == TrackedState.NEW)
                    {
                        if (!lowPassFilterDict.ContainsKey(openCVRect.id))
                        {
                            lowPassFilterDict.Add(openCVRect.id, new LowPassPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()));
                        }
                        if (!opticalFlowFilterDict.ContainsKey(openCVRect.id))
                        {
                            opticalFlowFilterDict.Add(openCVRect.id, new OFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()));
                        }
                    }
                    else if (openCVRect.state == TrackedState.DELETED)
                    {
                        if (lowPassFilterDict.ContainsKey(openCVRect.id))
                        {
                            lowPassFilterDict[openCVRect.id].Dispose();
                            lowPassFilterDict.Remove(openCVRect.id);
                        }
                        if (opticalFlowFilterDict.ContainsKey(openCVRect.id))
                        {
                            opticalFlowFilterDict[openCVRect.id].Dispose();
                            opticalFlowFilterDict.Remove(openCVRect.id);
                        }
                    }
                }

                // detect face landmark points.
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
                List <List <Vector2> > landmarkPoints = new List <List <Vector2> >();
                foreach (var openCVRect in trackedRects)
                {
                    if (openCVRect.state > TrackedState.NEW_DISPLAYED && openCVRect.state < TrackedState.NEW_HIDED)
                    {
                        UnityEngine.Rect rect   = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
                        List <Vector2>   points = faceLandmarkDetector.DetectLandmark(rect);

                        // apply noise filter.
                        if (enableNoiseFilter)
                        {
                            opticalFlowFilterDict[openCVRect.id].Process(rgbMat, points, points);
                            lowPassFilterDict[openCVRect.id].Process(rgbMat, points, points);
                        }

                        landmarkPoints.Add(points);
                    }
                }

                // filter non frontal faces.
                if (filterNonFrontalFaces)
                {
                    for (int i = 0; i < landmarkPoints.Count; i++)
                    {
                        if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints[i]) < frontalFaceRateLowerLimit)
                        {
                            trackedRects.RemoveAt(i);
                            landmarkPoints.RemoveAt(i);
                            i--;
                        }
                    }
                }

                // face swapping.
                if (landmarkPoints.Count >= 2)
                {
                    int ann = 0, bob = 1;
                    for (int i = 0; i < landmarkPoints.Count - 1; i += 2)
                    {
                        ann = i;
                        bob = i + 1;

                        faceSwapper.SwapFaces(rgbMat, landmarkPoints[ann], landmarkPoints[bob], 1);
                    }
                }

                // draw face rects.
                if (displayFaceRects)
                {
                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        Rect             openCVRect = trackedRects[i];
                        UnityEngine.Rect rect       = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 0, 0, 255), 2);
                        //Imgproc.putText (rgbMat, " " + frontalFaceChecker.GetFrontalFaceAngles (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                        //Imgproc.putText (rgbMat, " " + frontalFaceChecker.GetFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    }
                }

                //Imgproc.putText (rgbMat, "W:" + rgbMat.width () + " H:" + rgbMat.height () + " SO:" + Screen.orientation, new Point (5, rgbMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(rgbMat, texture);
            }
        }
        private void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);


            Mat img = Imgcodecs.imread(scenetext01_jpg_filepath);

            #if !UNITY_WSA_10_0
            if (img.empty())
            {
                Debug.LogError("text/scenetext01.jpg is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/text/” to “Assets/StreamingAssets/” folder. ");
            }
            #endif

            //# for visualization
            Mat vis = new Mat();
            img.copyTo(vis);
            Imgproc.cvtColor(vis, vis, Imgproc.COLOR_BGR2RGB);


            //# Extract channels to be processed individually
            List <Mat> channels = new List <Mat> ();
            Text.computeNMChannels(img, channels);

            //# Append negative channels to detect ER- (bright regions over dark background)
            int cn = channels.Count;
            for (int i = 0; i < cn; i++)
            {
                channels.Add(new Scalar(255) - channels [i]);
            }

            //# Apply the default cascade classifier to each independent channel (could be done in parallel)
            Debug.Log("Extracting Class Specific Extremal Regions from " + channels.Count + " channels ...");
            Debug.Log("    (...) this may take a while (...)");
            foreach (var channel in channels)
            {
                ERFilter er1 = Text.createERFilterNM1(trained_classifierNM1_xml_filepath, 16, 0.00015f, 0.13f, 0.2f, true, 0.1f);

                ERFilter er2 = Text.createERFilterNM2(trained_classifierNM2_xml_filepath, 0.5f);

                List <MatOfPoint> regions = new List <MatOfPoint> ();
                Text.detectRegions(channel, er1, er2, regions);

                MatOfRect matOfRects = new MatOfRect();
                Text.erGrouping(img, channel, regions, matOfRects);
//                Text.erGrouping (img, channel, regions, matOfRects, Text.ERGROUPING_ORIENTATION_ANY, Utils.getFilePath ("text/trained_classifier_erGrouping.xml"), 0.5f);

                List <OpenCVForUnity.Rect> rects = matOfRects.toList();

                //#Visualization
                foreach (var rect in rects)
                {
                    Imgproc.rectangle(vis, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(255, 0, 0), 2);
                    Imgproc.rectangle(vis, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(255, 255, 255), 1);
                }
            }

            Texture2D texture = new Texture2D(vis.cols(), vis.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(vis, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;


            Utils.setDebugMode(false);
        }
Ejemplo n.º 11
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat());


                Mat rgbaMatClipROI = new Mat(rgbaMat, processingAreaRect);

                rgbaMatClipROI.copyTo(processingAreaMat);


                // fill all black.
                Imgproc.rectangle(rgbaMat, new Point(0, 0), new Point(rgbaMat.width(), rgbaMat.height()), new Scalar(0, 0, 0, 0), -1);


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, processingAreaMat);

                // detect faces.
                List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> ();
                if (isUsingDlibFaceDetecter)
                {
                    List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                    foreach (var unityRect in result)
                    {
                        detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                    }
                }
                else
                {
                    // convert image to greyscale.
                    Imgproc.cvtColor(processingAreaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);


                    Imgproc.equalizeHist(grayMat, grayMat);

                    cascade.detectMultiScale(grayMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(grayMat.cols() * 0.15, grayMat.cols() * 0.15), new Size());

                    detectResult = faces.toList();


                    // Adjust to Dilb's result.
                    foreach (OpenCVForUnity.Rect r in detectResult)
                    {
                        r.y += (int)(r.height * 0.1f);
                    }
                }


                foreach (var rect in detectResult)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height));

                    //draw landmark points
                    OpenCVForUnityUtils.DrawFaceLandmark(rgbaMatClipROI, points, new Scalar(0, 255, 0, 255), 2);

                    //draw face rect
                    OpenCVForUnityUtils.DrawFaceRect(rgbaMatClipROI, new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height), new Scalar(255, 0, 0, 255), 2);
                }

                Imgproc.putText(rgbaMatClipROI, "W:" + rgbaMatClipROI.width() + " H:" + rgbaMatClipROI.height() + " SO:" + Screen.orientation, new Point(5, rgbaMatClipROI.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 0, 0, 255), 1, Imgproc.LINE_AA, false);


                Imgproc.rectangle(rgbaMat, new Point(0, 0), new Point(rgbaMat.width(), rgbaMat.height()), new Scalar(255, 0, 0, 255), 2);

                // Draw prosessing area rectangle.
                Imgproc.rectangle(rgbaMat, processingAreaRect.tl(), processingAreaRect.br(), new Scalar(255, 255, 0, 255), 2);

                OpenCVForUnity.Utils.fastMatToTexture2D(rgbaMat, texture);

                rgbaMatClipROI.Dispose();
            }

            if (webCamTextureToMatHelper.IsPlaying())
            {
                Matrix4x4 cameraToWorldMatrix = Camera.main.cameraToWorldMatrix;
                ;
                Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;

                texture.wrapMode = TextureWrapMode.Clamp;

                quad_renderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix);

                // Position the canvas object slightly in front
                // of the real world web camera.
                Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2);

                // Rotate the canvas object so that it faces the user.
                Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));

                gameObject.transform.position = position;
                gameObject.transform.rotation = rotation;
            }
        }
Ejemplo n.º 12
0
        // Update is called once per frame
        void Update()
        {
            lock (sync) {
                while (ExecuteOnMainThread.Count > 0)
                {
                    ExecuteOnMainThread.Dequeue().Invoke();
                }
            }

            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat());

                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
                Imgproc.equalizeHist(grayMat, grayMat);

                if (enable && !detecting)
                {
                    detecting = true;

                    grayMat.copyTo(grayMat4Thread);

                    StartThread(ThreadWorker);
                }

                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, grayMat);

                Rect[] rects;
                if (!isUsingSeparateDetection)
                {
                    if (didUpdateTheDetectionResult)
                    {
                        didUpdateTheDetectionResult = false;

                        rectangleTracker.UpdateTrackedObjects(detectionResult.toList());
                    }

                    rectangleTracker.GetObjects(resultObjects, true);

                    rects = rectangleTracker.CreateCorrectionBySpeedOfRects();

                    if (rects.Length > 0)
                    {
                        OpenCVForUnity.Rect rect = rects [0];

                        // Adjust to Dilb's result.
                        rect.y += (int)(rect.height * 0.1f);

                        //detect landmark points
                        List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height));

                        UpdateARHeadTransform(points);
                    }
                }
                else
                {
                    if (didUpdateTheDetectionResult)
                    {
                        didUpdateTheDetectionResult = false;

                        //Debug.Log("process: get rectsWhereRegions were got from detectionResult");
                        rectsWhereRegions = detectionResult.toArray();
                    }
                    else
                    {
                        //Debug.Log("process: get rectsWhereRegions from previous positions");
                        rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects();
                    }

                    detectedObjectsInRegions.Clear();
                    if (rectsWhereRegions.Length > 0)
                    {
                        int len = rectsWhereRegions.Length;
                        for (int i = 0; i < len; i++)
                        {
                            detectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
                        }
                    }

                    rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions);
                    rectangleTracker.GetObjects(resultObjects, true);

                    if (resultObjects.Count > 0)
                    {
                        OpenCVForUnity.Rect rect = resultObjects [0];

                        // Adjust to Dilb's result.
                        rect.y += (int)(rect.height * 0.1f);

                        //detect landmark points
                        List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height));

                        UpdateARHeadTransform(points);
                    }
                }
            }
        }
Ejemplo n.º 13
0
        /// <summary>
        /// Raises the webcam texture to mat helper error occurred event.
        /// </summary>
        /// <param name="errorCode">Error code.</param>
        //        public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
        //        {
        //            Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
        //        }

        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                //convert image to greyscale
                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);


                if (isAutoResetMode || faceTracker.getPoints().Count <= 0)
                {
                    //                                      Debug.Log ("detectFace");

                    //convert image to greyscale
                    using (var equalizeHistMat = new Mat())
                        using (var faces = new MatOfRect())
                        {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat,
                                                     faces,
                                                     1.1f,
                                                     2,
                                                     0 | Objdetect.CASCADE_SCALE_IMAGE,
                                                     new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15,
                                                                             equalizeHistMat.cols() * 0.15),
                                                     new Size()
                                                     );

                            if (faces.rows() > 0)
                            {
                                //                          Debug.Log ("faces " + faces.dump ());

                                List <OpenCVForUnity.Rect> rectsList  = faces.toList();
                                List <Point[]>             pointsList = faceTracker.getPoints();

                                if (isAutoResetMode)
                                {
                                    //add initial face points from MatOfRect
                                    if (pointsList.Count <= 0)
                                    {
                                        faceTracker.addPoints(faces);
                                        //	Debug.Log ("reset faces ");
                                    }
                                    else
                                    {
                                        for (int i = 0; i < rectsList.Count; i++)
                                        {
                                            var trackRect = new OpenCVForUnity.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3);
                                            //It determines whether nose point has been included in trackRect.
                                            if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67]))
                                            {
                                                rectsList.RemoveAt(i);
                                                pointsList.RemoveAt(i);
                                                //                                          Debug.Log ("remove " + i);
                                            }
                                            Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2);
                                        }
                                    }
                                }
                                else
                                {
                                    faceTracker.addPoints(faces);
                                }

                                // ターゲットメッシュのリストを更新
                                {
                                    while (targetMeshList.Count < rectsList.Count)
                                    {
                                        var obj = Instantiate(targetMeshPrefab).GetComponent <MeshRenderer>();
                                        obj.transform.rotation = Quaternion.Euler(-90, 0, 0);
                                        targetMeshList.Add(obj);
                                    }

                                    for (int i = targetMeshList.Count - 1; i >= 0; --i)
                                    {
                                        if (i >= rectsList.Count)
                                        {
                                            targetMeshList[i].material.color = Color.clear;
                                        }
                                        else
                                        {
                                            targetMeshList[i].material.color = Color.red;
                                        }
                                    }
                                }

                                //draw face rect
                                for (int i = 0; i < rectsList.Count; i++)
                                {
                                                                #if OPENCV_2
                                    Core.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsLIst [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2);
                                                                #else
                                    Imgproc.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2);
                                                                #endif

                                    // 顔の中心にオブジェクトを移動
                                    {
                                        var rect = rectsList[i];

//									Debug.Log( string.Format("Rect位置( {0}, {1})  Rectサイズ( {2}, {3})", rect.x, rect.y, rect.width, rect.height) );
                                        var pos = new Vector2(
                                            rect.x - (rect.width / 2)
                                            ,
//										rect.y + ( rect.height / 2 )
                                            rect.y
                                            );
                                        // オブジェクトを移動する
                                        targetMeshList[i].transform.localPosition = Vector2ToVector3(pos);
                                    }
                                }

                                // 顔の中心位置にオブジェクトを移動
                                if (false)
                                {
                                    for (int i = 0; i < pointsList.Count; ++i)
                                    {
                                        Vector2 pos;

                                        // 中心位置を求める
                                        {
                                            double sumX = 0, sumY = 0;
                                            for (int j = 0; j < pointsList[i].Length; ++j)
                                            {
                                                var point = pointsList[i][j];
                                                sumX += point.x;
                                                sumY += point.y;
                                            }

                                            var averageX = sumX / pointsList[i].Length;
                                            var averageY = sumY / pointsList[i].Length;

                                            pos = new Vector2((float)averageX, (float)averageY);
                                        }

                                        {
                                            double leftEnd = 0, topEnd = 0;
                                        }


                                        // オブジェクトを移動する
                                        targetMeshList[i].transform.localPosition = Vector2ToVector3(pos);
                                    }
                                }
                            }
                            else
                            {
                                if (isAutoResetMode)
                                {
                                    faceTracker.reset();
                                }
                            }
                        }
                }

                //track face points.if face points <= 0, always return false.
                if (faceTracker.track(grayMat, faceTrackerParams))
                {
                    faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
                }

                                #if OPENCV_2
                Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);
                                #else
                Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                                #endif

                //              Core.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);

                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }

            if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
            {
                faceTracker.reset();
            }
        }
        // Update is called once per frame
        void Update()
        {
            // loop play.
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                // detect faces.
                List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> ();
                if (useDlibFaceDetecter)
                {
                    OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
                    List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                    foreach (var unityRect in result)
                    {
                        detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                    }
                }
                else
                {
                    // convert image to greyscale.
                    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);

                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            detectResult = faces.toList();

                            // adjust to Dilb's result.
                            foreach (OpenCVForUnity.Rect r in detectResult)
                            {
                                r.y += (int)(r.height * 0.1f);
                            }
                        }
                }


                // face traking.
                rectangleTracker.UpdateTrackedObjects(detectResult);
                List <TrackedRect> trackedRects = new List <TrackedRect> ();
                rectangleTracker.GetObjects(trackedRects, true);

                // detect face landmark points.
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
                List <List <Vector2> > landmarkPoints = new List <List <Vector2> > ();
                for (int i = 0; i < trackedRects.Count; i++)
                {
                    TrackedRect      tr   = trackedRects [i];
                    UnityEngine.Rect rect = new UnityEngine.Rect(tr.x, tr.y, tr.width, tr.height);

                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);
                    landmarkPoints.Add(points);
                }

                // face masking.
                if (faceMaskTexture != null && landmarkPoints.Count >= 1)
                {
                    OpenCVForUnity.Utils.texture2DToMat(faceMaskTexture, faceMaskMat);

                    float imageWidth      = meshOverlay.width;
                    float imageHeight     = meshOverlay.height;
                    float maskImageWidth  = faceMaskTexture.width;
                    float maskImageHeight = faceMaskTexture.height;

                    TrackedRect tr;
                    TrackedMesh tm;
                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        tr = trackedRects [i];

                        if (tr.state == TrackedState.NEW)
                        {
                            meshOverlay.CreateObject(tr.id, faceMaskTexture);
                        }
                        if (tr.state < TrackedState.DELETED)
                        {
                            tm = meshOverlay.GetObjectById(tr.id);

                            Vector3[] vertices = tm.meshFilter.mesh.vertices;
                            if (vertices.Length == landmarkPoints [i].Count)
                            {
                                for (int j = 0; j < vertices.Length; j++)
                                {
                                    vertices [j].x = landmarkPoints [i] [j].x / imageWidth - 0.5f;
                                    vertices [j].y = 0.5f - landmarkPoints [i] [j].y / imageHeight;
                                }
                            }
                            Vector2[] uv = tm.meshFilter.mesh.uv;
                            if (uv.Length == faceLandmarkPointsInMask.Count)
                            {
                                for (int jj = 0; jj < uv.Length; jj++)
                                {
                                    uv [jj].x = faceLandmarkPointsInMask [jj].x / maskImageWidth;
                                    uv [jj].y = (maskImageHeight - faceLandmarkPointsInMask [jj].y) / maskImageHeight;
                                }
                            }
                            meshOverlay.UpdateObject(tr.id, vertices, null, uv);

                            if (tr.numFramesNotDetected > 3)
                            {
                                tm.material.SetFloat(shader_FadeID, 1f);
                            }
                            else if (tr.numFramesNotDetected > 0 && tr.numFramesNotDetected <= 3)
                            {
                                tm.material.SetFloat(shader_FadeID, 0.3f + (0.7f / 4f) * tr.numFramesNotDetected);
                            }
                            else
                            {
                                tm.material.SetFloat(shader_FadeID, 0.3f);
                            }

                            // filter non frontal faces.
                            if (filterNonFrontalFaces && frontalFaceChecker.GetFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit)
                            {
                                tm.material.SetFloat(shader_FadeID, 1f);
                            }
                        }
                        else if (tr.state == TrackedState.DELETED)
                        {
                            meshOverlay.DeleteObject(tr.id);
                        }
                    }
                }
                else if (landmarkPoints.Count >= 1)
                {
                    float imageWidth      = meshOverlay.width;
                    float imageHeight     = meshOverlay.height;
                    float maskImageWidth  = texture.width;
                    float maskImageHeight = texture.height;

                    TrackedRect tr;
                    TrackedMesh tm;
                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        tr = trackedRects [i];

                        if (tr.state == TrackedState.NEW)
                        {
                            meshOverlay.CreateObject(tr.id, texture);
                        }
                        if (tr.state < TrackedState.DELETED)
                        {
                            tm = meshOverlay.GetObjectById(tr.id);

                            Vector3[] vertices = tm.meshFilter.mesh.vertices;
                            if (vertices.Length == landmarkPoints [i].Count)
                            {
                                for (int j = 0; j < vertices.Length; j++)
                                {
                                    vertices [j].x = landmarkPoints[i][j].x / imageWidth - 0.5f;
                                    vertices [j].y = 0.5f - landmarkPoints[i][j].y / imageHeight;
                                }
                            }
                            Vector2[] uv = tm.meshFilter.mesh.uv;
                            if (uv.Length == landmarkPoints [0].Count)
                            {
                                for (int jj = 0; jj < uv.Length; jj++)
                                {
                                    uv [jj].x = landmarkPoints[0][jj].x / maskImageWidth;
                                    uv [jj].y = (maskImageHeight - landmarkPoints[0][jj].y) / maskImageHeight;
                                }
                            }
                            meshOverlay.UpdateObject(tr.id, vertices, null, uv);

                            if (tr.numFramesNotDetected > 3)
                            {
                                tm.material.SetFloat(shader_FadeID, 1f);
                            }
                            else if (tr.numFramesNotDetected > 0 && tr.numFramesNotDetected <= 3)
                            {
                                tm.material.SetFloat(shader_FadeID, 0.3f + (0.7f / 4f) * tr.numFramesNotDetected);
                            }
                            else
                            {
                                tm.material.SetFloat(shader_FadeID, 0.3f);
                            }

                            // filter non frontal faces.
                            if (filterNonFrontalFaces && frontalFaceChecker.GetFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit)
                            {
                                tm.material.SetFloat(shader_FadeID, 1f);
                            }
                        }
                        else if (tr.state == TrackedState.DELETED)
                        {
                            meshOverlay.DeleteObject(tr.id);
                        }
                    }
                }

                // draw face rects.
                if (displayFaceRects)
                {
                    for (int i = 0; i < detectResult.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 0, 0, 255), 2);
                    }

                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(trackedRects [i].x, trackedRects [i].y, trackedRects [i].width, trackedRects [i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 255, 0, 255), 2);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceAngles (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    }
                }

                // draw face points.
                if (displayDebugFacePoints)
                {
                    for (int i = 0; i < landmarkPoints.Count; i++)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, landmarkPoints [i], new Scalar(0, 255, 0, 255), 2);
                    }
                }


                // display face mask image.
                if (faceMaskTexture != null && faceMaskMat != null)
                {
                    if (displayFaceRects)
                    {
                        OpenCVForUnityUtils.DrawFaceRect(faceMaskMat, faceRectInMask, new Scalar(255, 0, 0, 255), 2);
                    }
                    if (displayDebugFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(faceMaskMat, faceLandmarkPointsInMask, new Scalar(0, 255, 0, 255), 2);
                    }

                    float scale = (rgbMat.width() / 4f) / faceMaskMat.width();
                    float tx    = rgbMat.width() - faceMaskMat.width() * scale;
                    float ty    = 0.0f;
                    Mat   trans = new Mat(2, 3, CvType.CV_32F);//1.0, 0.0, tx, 0.0, 1.0, ty);
                    trans.put(0, 0, scale);
                    trans.put(0, 1, 0.0f);
                    trans.put(0, 2, tx);
                    trans.put(1, 0, 0.0f);
                    trans.put(1, 1, scale);
                    trans.put(1, 2, ty);

                    Imgproc.warpAffine(faceMaskMat, rgbMat, trans, rgbMat.size(), Imgproc.INTER_LINEAR, Core.BORDER_TRANSPARENT, new Scalar(0));
                }

                Imgproc.putText(rgbMat, "W:" + rgbMat.width() + " H:" + rgbMat.height() + " SO:" + Screen.orientation, new Point(5, rgbMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbMat, texture);
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Mat   downScaleRgbaMat = null;
                float DOWNSCALE_RATIO  = 1.0f;
                if (enableDownScale)
                {
                    downScaleRgbaMat = imageOptimizationHelper.GetDownScaleMat(rgbaMat);
                    DOWNSCALE_RATIO  = imageOptimizationHelper.downscaleRatio;
                }
                else
                {
                    downScaleRgbaMat = rgbaMat;
                    DOWNSCALE_RATIO  = 1.0f;
                }


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, downScaleRgbaMat);

                // Detect faces on resize image
                if (!enableSkipFrame || !imageOptimizationHelper.IsCurrentFrameSkipped())
                {
                    //detect face rects
                    if (useOpenCVFaceDetector)
                    {
                        // convert image to greyscale.
                        Imgproc.cvtColor(downScaleRgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                        using (Mat equalizeHistMat = new Mat())
                            using (MatOfRect faces = new MatOfRect()) {
                                Imgproc.equalizeHist(grayMat, equalizeHistMat);

                                cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                                List <OpenCVForUnity.Rect> opencvDetectResult = faces.toList();

                                // adjust to Dilb's result.
                                detectionResult.Clear();
                                foreach (var opencvRect in opencvDetectResult)
                                {
                                    detectionResult.Add(new UnityEngine.Rect((float)opencvRect.x, (float)opencvRect.y + (float)(opencvRect.height * 0.1f), (float)opencvRect.width, (float)opencvRect.height));
                                }
                            }
                    }
                    else
                    {
                        detectionResult = faceLandmarkDetector.Detect();
                    }
                }


                foreach (var rect in detectionResult)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                    List <Vector2> originalPoints = new List <Vector2> (points.Count);
                    foreach (var point in points)
                    {
                        originalPoints.Add(new Vector2(point.x * DOWNSCALE_RATIO, point.y * DOWNSCALE_RATIO));
                    }

                    //draw landmark points
                    OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, originalPoints, new Scalar(0, 255, 0, 255), 2);

                    UnityEngine.Rect originalRect = new UnityEngine.Rect(rect.x * DOWNSCALE_RATIO, rect.y * DOWNSCALE_RATIO, rect.width * DOWNSCALE_RATIO, rect.height * DOWNSCALE_RATIO);
                    //draw face rect
                    OpenCVForUnityUtils.DrawFaceRect(rgbaMat, originalRect, new Scalar(255, 0, 0, 255), 2);
                }

                //Imgproc.putText (rgbaMat, "Original:(" + rgbaMat.width () + "," + rgbaMat.height () + ") DownScale:(" + downScaleRgbaMat.width () + "," + downScaleRgbaMat.height () + ") FrameSkipping: " + imageOptimizationHelper.frameSkippingRatio, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.fastMatToTexture2D(rgbaMat, texture);
            }
        }
        // Update is called once per frame
        void Update()
        {
            lock (sync) {
                while (ExecuteOnMainThread.Count > 0)
                {
                    ExecuteOnMainThread.Dequeue().Invoke();
                }
            }

            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat());

                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
                Imgproc.equalizeHist(grayMat, grayMat);

                if (enable && !detecting)
                {
                    detecting = true;

                    grayMat.copyTo(grayMat4Thread);

                    StartThread(ThreadWorker);
                }

                if (!isShowingWebCamImage)
                {
                    // fill all black.
                    Imgproc.rectangle(rgbaMat, new Point(0, 0), new Point(rgbaMat.width(), rgbaMat.height()), new Scalar(0, 0, 0, 0), -1);
                }

                Rect[] rects;
                if (!isUsingSeparateDetection)
                {
                    if (didUpdateTheDetectionResult)
                    {
                        didUpdateTheDetectionResult = false;

                        rectangleTracker.UpdateTrackedObjects(detectionResult.toList());
                    }

                    rectangleTracker.GetObjects(resultObjects, true);
                    rects = resultObjects.ToArray();

//                    rects = rectangleTracker.CreateCorrectionBySpeedOfRects ();

                    for (int i = 0; i < rects.Length; i++)
                    {
                        //Debug.Log ("detected face[" + i + "] " + rects [i]);
                        Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2);
                    }
                }
                else
                {
                    if (didUpdateTheDetectionResult)
                    {
                        didUpdateTheDetectionResult = false;

                        //Debug.Log("process: get rectsWhereRegions were got from detectionResult");
                        rectsWhereRegions = detectionResult.toArray();

                        rects = rectsWhereRegions;
                        for (int i = 0; i < rects.Length; i++)
                        {
                            Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 0, 255, 255), 1);
                        }
                    }
                    else
                    {
                        //Debug.Log("process: get rectsWhereRegions from previous positions");
                        rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects();

                        rects = rectsWhereRegions;
                        for (int i = 0; i < rects.Length; i++)
                        {
                            Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 255, 0, 255), 1);
                        }
                    }

                    detectedObjectsInRegions.Clear();
                    if (rectsWhereRegions.Length > 0)
                    {
                        int len = rectsWhereRegions.Length;
                        for (int i = 0; i < len; i++)
                        {
                            detectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
                        }
                    }

                    rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions);
                    rectangleTracker.GetObjects(resultObjects, true);

                    rects = resultObjects.ToArray();
                    for (int i = 0; i < rects.Length; i++)
                    {
                        //Debug.Log ("detected face[" + i + "] " + rects [i]);
                        Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2);
                    }
                }

                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }

            if (webCamTextureToMatHelper.IsPlaying())
            {
                Matrix4x4 cameraToWorldMatrix = Camera.main.cameraToWorldMatrix;;
                Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;

                texture.wrapMode = TextureWrapMode.Clamp;

                quad_renderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix);

                // Position the canvas object slightly in front
                // of the real world web camera.
                Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2);

                // Rotate the canvas object so that it faces the user.
                Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));

                gameObject.transform.position = position;
                gameObject.transform.rotation = rotation;
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                //convert image to greyscale
                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);


                if (isAutoResetMode || faceTracker.getPoints().Count <= 0)
                {
//                                      Debug.Log ("detectFace");

                    //convert image to greyscale
                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect())
                        {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
                                                     //                                                                                 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                     | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            if (faces.rows() > 0)
                            {
//                                              Debug.Log ("faces " + faces.dump ());

                                List <OpenCVForUnity.Rect> rectsList  = faces.toList();
                                List <Point[]>             pointsList = faceTracker.getPoints();

                                if (isAutoResetMode)
                                {
                                    //add initial face points from MatOfRect
                                    if (pointsList.Count <= 0)
                                    {
                                        faceTracker.addPoints(faces);
//                                                                      Debug.Log ("reset faces ");
                                    }
                                    else
                                    {
                                        for (int i = 0; i < rectsList.Count; i++)
                                        {
                                            OpenCVForUnity.Rect trackRect = new OpenCVForUnity.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3);
                                            //It determines whether nose point has been included in trackRect.
                                            if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67]))
                                            {
                                                rectsList.RemoveAt(i);
                                                pointsList.RemoveAt(i);
//                                                                                      Debug.Log ("remove " + i);
                                            }
                                            Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2);
                                        }
                                    }
                                }
                                else
                                {
                                    faceTracker.addPoints(faces);
                                }
                                //draw face rect
                                for (int i = 0; i < rectsList.Count; i++)
                                {
                                #if OPENCV_2
                                    Core.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsLIst [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2);
                                #else
                                    Imgproc.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2);
                                #endif
                                }
                            }
                            else
                            {
                                if (isAutoResetMode)
                                {
                                    faceTracker.reset();
                                }
                            }
                        }
                }

                //track face points.if face points <= 0, always return false.
                if (faceTracker.track(grayMat, faceTrackerParams))
                {
                    faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
                }


                #if OPENCV_2
                Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);
                #else
                Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                #endif


//                              Core.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);

                Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }

            if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
            {
                faceTracker.reset();
            }
        }
        public void OnFrameMatAcquired(Mat bgraMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix)
        {
            Imgproc.cvtColor(bgraMat, grayMat, Imgproc.COLOR_BGRA2GRAY);
            Imgproc.equalizeHist(grayMat, grayMat);

            if (enableDetection && !isDetecting)
            {
                isDetecting = true;

                grayMat.copyTo(grayMat4Thread);

                System.Threading.Tasks.Task.Run(() => {
                    isThreadRunning = true;
                    DetectObject();
                    isThreadRunning = false;
                    OnDetectionDone();
                });
            }


            Rect[] rects;
            if (!useSeparateDetection)
            {
                if (hasUpdatedDetectionResult)
                {
                    hasUpdatedDetectionResult = false;

                    lock (rectangleTracker) {
                        rectangleTracker.UpdateTrackedObjects(detectionResult.toList());
                    }
                }

                lock (rectangleTracker) {
                    rectangleTracker.GetObjects(resultObjects, true);
                }
                rects = resultObjects.ToArray();
            }
            else
            {
                if (hasUpdatedDetectionResult)
                {
                    hasUpdatedDetectionResult = false;

                    //UnityEngine.WSA.Application.InvokeOnAppThread (() => {
                    //    Debug.Log("process: get rectsWhereRegions were got from detectionResult");
                    //}, true);

                    lock (rectangleTracker) {
                        rectsWhereRegions = detectionResult.toArray();
                    }

                    rects = rectsWhereRegions;
                }
                else
                {
                    //UnityEngine.WSA.Application.InvokeOnAppThread (() => {
                    //    Debug.Log("process: get rectsWhereRegions from previous positions");
                    //}, true);

                    lock (rectangleTracker) {
                        rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects();
                    }

                    rects = rectsWhereRegions;
                }

                detectedObjectsInRegions.Clear();
                if (rectsWhereRegions.Length > 0)
                {
                    int len = rectsWhereRegions.Length;
                    for (int i = 0; i < len; i++)
                    {
                        DetectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
                    }
                }

                lock (rectangleTracker) {
                    rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions);
                    rectangleTracker.GetObjects(resultObjects, true);
                }

                rects = resultObjects.ToArray();
            }


            UnityEngine.WSA.Application.InvokeOnAppThread(() => {
                if (!webCamTextureToMatHelper.IsPlaying())
                {
                    return;
                }

                DrawRects(rects, bgraMat.width(), bgraMat.height());
                bgraMat.Dispose();

                Vector3 ccCameraSpacePos = UnProjectVector(projectionMatrix, new Vector3(0.0f, 0.0f, overlayDistance));
                Vector3 tlCameraSpacePos = UnProjectVector(projectionMatrix, new Vector3(-overlayDistance, overlayDistance, overlayDistance));

                //position
                Vector3 position = cameraToWorldMatrix.MultiplyPoint3x4(ccCameraSpacePos);
                gameObject.transform.position = position;

                //scale
                Vector3 scale = new Vector3(Mathf.Abs(tlCameraSpacePos.x - ccCameraSpacePos.x) * 2, Mathf.Abs(tlCameraSpacePos.y - ccCameraSpacePos.y) * 2, 1);
                gameObject.transform.localScale = scale;

                // Rotate the canvas object so that it faces the user.
                Quaternion rotation           = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));
                gameObject.transform.rotation = rotation;

                rectOverlay.UpdateOverlayTransform(gameObject.transform);
            }, false);
        }
Ejemplo n.º 19
0
        public void OnFrameMatAcquired(Mat bgraMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix)
        {
            Mat downScaleFrameMat = imageOptimizationHelper.GetDownScaleMat(bgraMat);

            Imgproc.cvtColor(downScaleFrameMat, grayMat, Imgproc.COLOR_BGRA2GRAY);
            Imgproc.equalizeHist(grayMat, grayMat);

            if (enableDetection && !isDetecting)
            {
                isDetecting = true;

                grayMat.copyTo(grayMat4Thread);

                System.Threading.Tasks.Task.Run(() => {
                    isThreadRunning = true;
                    DetectObject();
                    isThreadRunning = false;
                    OnDetectionDone();
                });
            }

            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, grayMat);

            Mat bgraMat4preview = null;

            if (displayCameraPreview)
            {
                bgraMat4preview = new Mat();
                downScaleFrameMat.copyTo(bgraMat4preview);
            }

            List <Vector2> points = null;

            Rect[] rects;
            if (!useSeparateDetection)
            {
                if (hasUpdatedDetectionResult)
                {
                    hasUpdatedDetectionResult = false;

                    lock (rectangleTracker) {
                        rectangleTracker.UpdateTrackedObjects(detectionResult.toList());
                    }
                }

                lock (rectangleTracker) {
                    rectangleTracker.GetObjects(resultObjects, true);
                }
                rects = resultObjects.ToArray();

                if (rects.Length > 0)
                {
                    OpenCVForUnity.Rect rect = rects [0];

                    // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
                    rect.y += (int)(rect.height * 0.1f);

                    //detect landmark points
                    points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height));

                    if (enableOpticalFlowFilter)
                    {
                        opticalFlowFilter.Process(bgraMat, points, points, false);
                    }

                    if (displayCameraPreview && bgraMat4preview != null)
                    {
                        //draw landmark points
                        OpenCVForUnityUtils.DrawFaceLandmark(bgraMat4preview, points, new Scalar(0, 255, 0, 255), 2);
                    }
                }
            }
            else
            {
                if (hasUpdatedDetectionResult)
                {
                    hasUpdatedDetectionResult = false;

                    //UnityEngine.WSA.Application.InvokeOnAppThread (() => {
                    //    Debug.Log("process: get rectsWhereRegions were got from detectionResult");
                    //}, true);

                    lock (rectangleTracker) {
                        rectsWhereRegions = detectionResult.toArray();
                    }
                }
                else
                {
                    //UnityEngine.WSA.Application.InvokeOnAppThread (() => {
                    //    Debug.Log("process: get rectsWhereRegions from previous positions");
                    //}, true);

                    lock (rectangleTracker) {
                        rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects();
                    }
                }

                detectedObjectsInRegions.Clear();
                if (rectsWhereRegions.Length > 0)
                {
                    int len = rectsWhereRegions.Length;
                    for (int i = 0; i < len; i++)
                    {
                        DetectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
                    }
                }

                lock (rectangleTracker) {
                    rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions);
                    rectangleTracker.GetObjects(resultObjects, true);
                }

                if (resultObjects.Count > 0)
                {
                    OpenCVForUnity.Rect rect = resultObjects [0];

                    // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
                    rect.y += (int)(rect.height * 0.1f);

                    //detect landmark points
                    points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height));

                    if (enableOpticalFlowFilter)
                    {
                        opticalFlowFilter.Process(bgraMat, points, points, false);
                    }

                    if (displayCameraPreview && bgraMat4preview != null)
                    {
                        //draw landmark points
                        OpenCVForUnityUtils.DrawFaceLandmark(bgraMat4preview, points, new Scalar(0, 255, 0, 255), 2);
                    }
                }
            }


            UnityEngine.WSA.Application.InvokeOnAppThread(() => {
                if (!webCamTextureToMatHelper.IsPlaying())
                {
                    return;
                }

                if (displayCameraPreview && bgraMat4preview != null)
                {
                    OpenCVForUnity.Utils.fastMatToTexture2D(bgraMat4preview, texture);
                }

                if (points != null)
                {
                    UpdateARHeadTransform(points, cameraToWorldMatrix);
                }

                bgraMat.Dispose();
                if (bgraMat4preview != null)
                {
                    bgraMat4preview.Dispose();
                }
            }, false);
        }
Ejemplo n.º 20
0
        ///// <summary>
        ///// Modify `data` by applying a posterization transformation to it.
        ///// </summary>
        ///// <param name="data">The byte array to modify.</param>
        ///// <param name="levels">The threshold levels to split each byte into.</param>
        //public static void ProcessImage(byte[] data, byte levels)
        //{
        //    byte factor = (byte) (byte.MaxValue / levels);
        //    for (int i = 0; i < data.Length; i++)
        //    {
        //        data[i] = (byte) (data[i] / factor * factor);
        //    }
        //}

        private void ProcessMat(Mat frameMat)
        {
            if (grayMat == null)
            {
                grayMat = new Mat(frameMat.rows(), frameMat.cols(), CvType.CV_8UC1);
            }



            // detect faces on the downscale image
            if (!enableSkipFrame || !imageOptimizationHelper.IsCurrentFrameSkipped())
            {
                Mat   downScaleRgbaMat = null;
                float DOWNSCALE_RATIO  = 1.0f;
                if (enableDownScale)
                {
                    downScaleRgbaMat = imageOptimizationHelper.GetDownScaleMat(frameMat);
                    DOWNSCALE_RATIO  = imageOptimizationHelper.downscaleRatio;
                }
                else
                {
                    downScaleRgbaMat = frameMat;
                    DOWNSCALE_RATIO  = 1.0f;
                }

                // set the downscale mat
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, downScaleRgbaMat);

                //detect face rects
                if (useOpenCVFaceDetector)
                {
                    // convert image to greyscale.
                    Imgproc.cvtColor(downScaleRgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect())
                        {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            List <OpenCVForUnity.CoreModule.Rect> opencvDetectResult = faces.toList();

                            // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
                            detectionResult.Clear();
                            foreach (var opencvRect in opencvDetectResult)
                            {
                                detectionResult.Add(new UnityEngine.Rect((float)opencvRect.x, (float)opencvRect.y + (float)(opencvRect.height * 0.1f), (float)opencvRect.width, (float)opencvRect.height));
                            }
                        }
                }
                else
                {
                    // Dlib's face detection processing time increases in proportion to image size.
                    detectionResult = faceLandmarkDetector.Detect();
                }

                if (enableDownScale)
                {
                    for (int i = 0; i < detectionResult.Count; ++i)
                    {
                        var rect = detectionResult[i];
                        detectionResult[i] = new UnityEngine.Rect(
                            rect.x * DOWNSCALE_RATIO,
                            rect.y * DOWNSCALE_RATIO,
                            rect.width * DOWNSCALE_RATIO,
                            rect.height * DOWNSCALE_RATIO);
                    }
                }
            }

            // set the original scale image
            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, frameMat);
            // detect face landmarks on the original image
            foreach (var rect in detectionResult)
            {
                //detect landmark points
                List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                //draw landmark points
                OpenCVForUnityUtils.DrawFaceLandmark(frameMat, points, new Scalar(0, 255, 0, 255), 2);
                //draw face rect
                OpenCVForUnityUtils.DrawFaceRect(frameMat, rect, new Scalar(255, 0, 0, 255), 2);
            }
        }
Ejemplo n.º 21
0
        private void Run()
        {
            meshOverlay = this.GetComponent <TrackedMeshOverlay> ();

            displayFaceRectsToggle.isOn       = displayFaceRects;
            useDlibFaceDetecterToggle.isOn    = useDlibFaceDetecter;
            enableColorCorrectionToggle.isOn  = enableColorCorrection;
            filterNonFrontalFacesToggle.isOn  = filterNonFrontalFaces;
            displayDebugFacePointsToggle.isOn = displayDebugFacePoints;

            if (imgTexture == null)
            {
                imgTexture = Resources.Load("family") as Texture2D;
            }

            gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            meshOverlay.UpdateOverlayTransform(gameObject.transform);
            meshOverlay.Reset();


            float width  = 0;
            float height = 0;

            width  = gameObject.transform.localScale.x;
            height = gameObject.transform.localScale.y;

            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }

            Mat rgbaMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);

            OpenCVForUnity.Utils.texture2DToMat(imgTexture, rgbaMat);
            Debug.Log("rgbaMat ToString " + rgbaMat.ToString());

            if (faceLandmarkDetector == null)
            {
                faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath);
            }

            faceMaskColorCorrector = faceMaskColorCorrector ?? new FaceMaskColorCorrector();
            FrontalFaceChecker frontalFaceChecker = new FrontalFaceChecker(width, height);

            // detect faces.
            List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> ();

            if (useDlibFaceDetecter)
            {
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                foreach (var unityRect in result)
                {
                    detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                }
            }
            else
            {
                if (cascade == null)
                {
                    cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath);
                }
//                if (cascade.empty ()) {
//                    Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
//                }

                // convert image to greyscale.
                Mat gray = new Mat();
                Imgproc.cvtColor(rgbaMat, gray, Imgproc.COLOR_RGBA2GRAY);

                MatOfRect faces = new MatOfRect();
                Imgproc.equalizeHist(gray, gray);
                cascade.detectMultiScale(gray, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size());
                //Debug.Log ("faces " + faces.dump ());

                detectResult = faces.toList();

                // adjust to Dilb's result.
                foreach (OpenCVForUnity.Rect r in detectResult)
                {
                    r.y += (int)(r.height * 0.1f);
                }

                gray.Dispose();
            }

            // detect face landmark points.
            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
            List <List <Vector2> > landmarkPoints = new List <List <Vector2> > ();

            foreach (var openCVRect in detectResult)
            {
                UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);

                Debug.Log("face : " + rect);
                //OpenCVForUnityUtils.DrawFaceRect(imgMat, rect, new Scalar(255, 0, 0, 255), 2);

                List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);
                //OpenCVForUnityUtils.DrawFaceLandmark(imgMat, points, new Scalar(0, 255, 0, 255), 2);
                landmarkPoints.Add(points);
            }

            // mask faces.
            int[] face_nums = new int[landmarkPoints.Count];
            for (int i = 0; i < face_nums.Length; i++)
            {
                face_nums [i] = i;
            }
            face_nums = face_nums.OrderBy(i => System.Guid.NewGuid()).ToArray();

            float imageWidth      = meshOverlay.width;
            float imageHeight     = meshOverlay.height;
            float maskImageWidth  = imgTexture.width;
            float maskImageHeight = imgTexture.height;

            TrackedMesh tm;

            for (int i = 0; i < face_nums.Length; i++)
            {
                meshOverlay.CreateObject(i, imgTexture);
                tm = meshOverlay.GetObjectById(i);

                Vector3[] vertices = tm.meshFilter.mesh.vertices;
                if (vertices.Length == landmarkPoints [face_nums [i]].Count)
                {
                    for (int j = 0; j < vertices.Length; j++)
                    {
                        vertices [j].x = landmarkPoints [face_nums [i]] [j].x / imageWidth - 0.5f;
                        vertices [j].y = 0.5f - landmarkPoints [face_nums [i]] [j].y / imageHeight;
                    }
                }
                Vector2[] uv = tm.meshFilter.mesh.uv;
                if (uv.Length == landmarkPoints [face_nums [0]].Count)
                {
                    for (int jj = 0; jj < uv.Length; jj++)
                    {
                        uv [jj].x = landmarkPoints [face_nums [0]] [jj].x / maskImageWidth;
                        uv [jj].y = (maskImageHeight - landmarkPoints [face_nums [0]] [jj].y) / maskImageHeight;
                    }
                }
                meshOverlay.UpdateObject(i, vertices, null, uv);

                if (enableColorCorrection)
                {
                    faceMaskColorCorrector.CreateLUTTex(i);
                    Texture2D LUTTex = faceMaskColorCorrector.UpdateLUTTex(i, rgbaMat, rgbaMat, landmarkPoints [face_nums [0]], landmarkPoints [face_nums [i]]);
                    tm.sharedMaterial.SetTexture("_LUTTex", LUTTex);
                    tm.sharedMaterial.SetFloat("_ColorCorrection", 1f);
                }
                else
                {
                    tm.sharedMaterial.SetFloat("_ColorCorrection", 0f);
                }

                // filter non frontal faces.
                if (filterNonFrontalFaces && frontalFaceChecker.GetFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit)
                {
                    tm.sharedMaterial.SetFloat("_Fade", 1f);
                }
                else
                {
                    tm.sharedMaterial.SetFloat("_Fade", 0.3f);
                }
            }

            // draw face rects.
            if (displayFaceRects)
            {
                int ann = face_nums[0];
                UnityEngine.Rect rect_ann = new UnityEngine.Rect(detectResult [ann].x, detectResult [ann].y, detectResult [ann].width, detectResult [ann].height);
                OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_ann, new Scalar(255, 255, 0, 255), 2);

                int bob = 0;
                for (int i = 1; i < face_nums.Length; i++)
                {
                    bob = face_nums [i];
                    UnityEngine.Rect rect_bob = new UnityEngine.Rect(detectResult [bob].x, detectResult [bob].y, detectResult [bob].width, detectResult [bob].height);
                    OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_bob, new Scalar(255, 0, 0, 255), 2);
                }
            }

            // draw face points.
            if (displayDebugFacePoints)
            {
                for (int i = 0; i < landmarkPoints.Count; i++)
                {
                    OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, landmarkPoints [i], new Scalar(0, 255, 0, 255), 2);
                }
            }


            Texture2D texture = new Texture2D(rgbaMat.cols(), rgbaMat.rows(), TextureFormat.RGBA32, false);

            OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture);
            gameObject.transform.GetComponent <Renderer> ().material.mainTexture = texture;

            frontalFaceChecker.Dispose();
            rgbaMat.Dispose();
        }
    // Update is called once per frame
    void Update()
    {
        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            Mat rgbaMat = webCamTextureToMatHelper.GetMat();

            //convert image to greyscale
            Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);


            if (isAutoResetMode || faceTracker.getPoints().Count <= 0)
            {
//                    Debug.Log ("detectFace");

                //convert image to greyscale
                using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) {
                        Imgproc.equalizeHist(grayMat, equalizeHistMat);

                        cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
                                                 //                                                                                 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                 | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                        if (faces.rows() > 0)
                        {
//                            Debug.Log ("faces " + faces.dump ());

                            List <OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList();
                            List <Point[]> pointsList = faceTracker.getPoints();

                            if (isAutoResetMode)
                            {
                                //add initial face points from MatOfRect
                                if (pointsList.Count <= 0)
                                {
                                    faceTracker.addPoints(faces);
//                                    Debug.Log ("reset faces ");
                                }
                                else
                                {
                                    for (int i = 0; i < rectsList.Count; i++)
                                    {
                                        OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3);
                                        //It determines whether nose point has been included in trackRect.
                                        if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67]))
                                        {
                                            rectsList.RemoveAt(i);
                                            pointsList.RemoveAt(i);
//                                                                                      Debug.Log ("remove " + i);
                                        }
                                        //uncomment below for rectangle around face
                                        Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2);
                                    }
                                }
                            }
                            else
                            {
                                faceTracker.addPoints(faces);
                            }
                            //draw face rect
                            for (int i = 0; i < rectsList.Count; i++)
                            {
                                //uncomment below for rectangle around face
                                Imgproc.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2);
                            }
                        }
                        else
                        {
                            if (isAutoResetMode)
                            {
                                faceTracker.reset();
                            }
                        }
                    }
            }

            //track face points.if face points <= 0, always return false.
            if (faceTracker.track(grayMat, faceTrackerParams))
            {
                //GameObject.FindGameObjectWithTag("left hand").transform.localScale = new Vector3(0.05f, 0.05f, 50);
                //GameObject.FindGameObjectWithTag("right hand").transform.localScale = new Vector3(0.05f, 0.05f, 50);
                //facecount = 0;
                if (facerec > 15)
                {
                    GameObject.FindGameObjectWithTag("left hand").transform.localScale  = new Vector3(0.2f, 0.2f, 50);
                    GameObject.FindGameObjectWithTag("right hand").transform.localScale = new Vector3(0.2f, 0.2f, 50);
                    facecount = 0;
                }
                else
                {
                    facerec++;
                }
                //uncomment below for rectangle around face
                //faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
            }
            else
            {
                //facecount prevents flickering of hand from poor face recognition
                if (facecount > 15)
                {
                    facerec = 0;
                    GameObject.FindGameObjectWithTag("left hand").transform.localScale  = new Vector3(0f, 0f, 0);
                    GameObject.FindGameObjectWithTag("right hand").transform.localScale = new Vector3(0f, 0f, 0);                    facecount++;
                }
                else
                {
                    facecount++;
                }
            }

            //Imgproc.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

//                Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

            Utils.fastMatToTexture2D(rgbaMat, texture);
        }

        //facetrac resets upon screen click and space bar
        if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
        {
            faceTracker.reset();
        }

        if (Input.GetKeyDown(KeyCode.Escape))
        {
            if (SpeechRecognizer.IsRecording())
            {
                SpeechRecognizer.StopIfRecording();
                //resultText.text = "I stopped recording";
            }
            Application.Quit();
            //Application.LoadLevel ("MainActivity.class");
        }
    }
Ejemplo n.º 23
0
        public void OnFrameMatAcquired(Mat bgraMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix)
        {
            Imgproc.cvtColor(bgraMat, grayMat, Imgproc.COLOR_BGRA2GRAY);
            Imgproc.equalizeHist(grayMat, grayMat);

            if (enableDetection && !isDetecting)
            {
                isDetecting = true;

                grayMat.copyTo(grayMat4Thread);

                System.Threading.Tasks.Task.Run(() => {
                    isThreadRunning = true;
                    DetectObject();
                    isThreadRunning = false;
                    OnDetectionDone();
                });
            }


            if (!displayCameraImage)
            {
                // fill all black.
                Imgproc.rectangle(bgraMat, new Point(0, 0), new Point(bgraMat.width(), bgraMat.height()), new Scalar(0, 0, 0, 0), -1);
            }


            Rect[] rects;
            if (!useSeparateDetection)
            {
                if (hasUpdatedDetectionResult)
                {
                    hasUpdatedDetectionResult = false;

                    lock (rectangleTracker) {
                        rectangleTracker.UpdateTrackedObjects(detectionResult.toList());
                    }
                }

                lock (rectangleTracker) {
                    rectangleTracker.GetObjects(resultObjects, true);
                }
                rects = resultObjects.ToArray();

                for (int i = 0; i < rects.Length; i++)
                {
                    //UnityEngine.WSA.Application.InvokeOnAppThread (() => {
                    //    Debug.Log ("detected face[" + i + "] " + rects [i]);
                    //}, true);

                    Imgproc.rectangle(bgraMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 0, 255, 255), 3);
                }
            }
            else
            {
                if (hasUpdatedDetectionResult)
                {
                    hasUpdatedDetectionResult = false;

                    //UnityEngine.WSA.Application.InvokeOnAppThread (() => {
                    //    Debug.Log("process: get rectsWhereRegions were got from detectionResult");
                    //}, true);

                    lock (rectangleTracker) {
                        rectsWhereRegions = detectionResult.toArray();
                    }

                    rects = rectsWhereRegions;
                    for (int i = 0; i < rects.Length; i++)
                    {
                        Imgproc.rectangle(bgraMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 1);
                    }
                }
                else
                {
                    //UnityEngine.WSA.Application.InvokeOnAppThread (() => {
                    //    Debug.Log("process: get rectsWhereRegions from previous positions");
                    //}, true);

                    lock (rectangleTracker) {
                        rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects();
                    }

                    rects = rectsWhereRegions;
                    for (int i = 0; i < rects.Length; i++)
                    {
                        Imgproc.rectangle(bgraMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 255, 0, 255), 1);
                    }
                }

                detectedObjectsInRegions.Clear();
                if (rectsWhereRegions.Length > 0)
                {
                    int len = rectsWhereRegions.Length;
                    for (int i = 0; i < len; i++)
                    {
                        DetectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
                    }
                }

                lock (rectangleTracker) {
                    rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions);
                    rectangleTracker.GetObjects(resultObjects, true);
                }

                rects = resultObjects.ToArray();

                for (int i = 0; i < rects.Length; i++)
                {
                    //UnityEngine.WSA.Application.InvokeOnAppThread (() => {
                    //    Debug.Log ("detected face[" + i + "] " + rects [i]);
                    //}, true);

                    Imgproc.rectangle(bgraMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 0, 255, 255), 3);
                }
            }


            UnityEngine.WSA.Application.InvokeOnAppThread(() => {
                if (!webCamTextureToMatHelper.IsPlaying())
                {
                    return;
                }

                Utils.fastMatToTexture2D(bgraMat, texture);
                bgraMat.Dispose();

                Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;

                quad_renderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix);

                // Position the canvas object slightly in front
                // of the real world web camera.
                Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2);
                position        *= 1.2f;

                // Rotate the canvas object so that it faces the user.
                Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));

                gameObject.transform.position = position;
                gameObject.transform.rotation = rotation;
            }, false);
        }
Ejemplo n.º 24
0
        private void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);


            Mat frame = Imgcodecs.imread(scenetext01_jpg_filepath);

            #if !UNITY_WSA_10_0
            if (frame.empty())
            {
                Debug.LogError("text/scenetext01.jpg is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/text/” to “Assets/StreamingAssets/” folder. ");
            }
            #endif

            Mat binaryMat = new Mat();
            Mat maskMat   = new Mat();


            List <MatOfPoint> regions = new List <MatOfPoint> ();

            ERFilter er_filter1 = Text.createERFilterNM1(trained_classifierNM1_xml_filepath, 8, 0.00015f, 0.13f, 0.2f, true, 0.1f);

            ERFilter er_filter2 = Text.createERFilterNM2(trained_classifierNM2_xml_filepath, 0.5f);


            Mat transition_p = new Mat(62, 62, CvType.CV_64FC1);
            //            string filename = "OCRHMM_transitions_table.xml";
            //            FileStorage fs(filename, FileStorage::READ);
            //            fs["transition_probabilities"] >> transition_p;
            //            fs.release();

            //Load TransitionProbabilitiesData.
            transition_p.put(0, 0, GetTransitionProbabilitiesData(OCRHMM_transitions_table_xml_filepath));

            Mat           emission_p = Mat.eye(62, 62, CvType.CV_64FC1);
            string        voc        = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
            OCRHMMDecoder decoder    = OCRHMMDecoder.create(
                OCRHMM_knn_model_data_xml_gz_filepath,
                voc, transition_p, emission_p);

            //Text Detection
            Imgproc.cvtColor(frame, frame, Imgproc.COLOR_BGR2RGB);
            Imgproc.cvtColor(frame, binaryMat, Imgproc.COLOR_RGB2GRAY);
            Imgproc.threshold(binaryMat, binaryMat, 0, 255, Imgproc.THRESH_BINARY | Imgproc.THRESH_OTSU);
            Core.absdiff(binaryMat, new Scalar(255), maskMat);


            Text.detectRegions(binaryMat, er_filter1, er_filter2, regions);
            Debug.Log("regions.Count " + regions.Count);


            MatOfRect groups_rects           = new MatOfRect();
            List <OpenCVForUnity.Rect> rects = new List <OpenCVForUnity.Rect> ();
            Text.erGrouping(frame, binaryMat, regions, groups_rects);


            for (int i = 0; i < regions.Count; i++)
            {
                regions [i].Dispose();
            }
            regions.Clear();


            rects.AddRange(groups_rects.toList());

            groups_rects.Dispose();


            //Text Recognition (OCR)

            List <Mat> detections = new List <Mat> ();

            for (int i = 0; i < (int)rects.Count; i++)
            {
                Mat group_img = new Mat();
                maskMat.submat(rects [i]).copyTo(group_img);
                Core.copyMakeBorder(group_img, group_img, 15, 15, 15, 15, Core.BORDER_CONSTANT, new Scalar(0));
                detections.Add(group_img);
            }

            Debug.Log("detections.Count " + detections.Count);


            //#Visualization
            for (int i = 0; i < rects.Count; i++)
            {
                Imgproc.rectangle(frame, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0), 2);
                Imgproc.rectangle(frame, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 255, 255), 1);

                string output = decoder.run(detections [i], 0);
                if (!string.IsNullOrEmpty(output))
                {
                    Debug.Log("output " + output);
                    Imgproc.putText(frame, output, new Point(rects [i].x, rects [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(0, 0, 255), 1, Imgproc.LINE_AA, false);
                }
            }


            Texture2D texture = new Texture2D(frame.cols(), frame.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(frame, texture);

//            Texture2D texture = new Texture2D (detections [0].cols (), detections [0].rows (), TextureFormat.RGBA32, false);
//
//            Utils.matToTexture2D (detections [0], texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;


            for (int i = 0; i < detections.Count; i++)
            {
                detections [i].Dispose();
            }
            binaryMat.Dispose();
            maskMat.Dispose();

            Utils.setDebugMode(false);
        }
Ejemplo n.º 25
0
        // Update is called once per frame
        void Update()
        {
            // loop play.
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                // detect faces.
                List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> ();
                if (useDlibFaceDetecter)
                {
                    OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
                    List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                    foreach (var unityRect in result)
                    {
                        detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                    }
                }
                else
                {
                    // convert image to greyscale.
                    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);

                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            detectResult = faces.toList();

                            // adjust to Dilb's result.
                            foreach (OpenCVForUnity.Rect r in detectResult)
                            {
                                r.y += (int)(r.height * 0.1f);
                            }
                        }
                }

                // face tracking.
                rectangleTracker.UpdateTrackedObjects(detectResult);
                List <TrackedRect> trackedRects = new List <TrackedRect> ();
                rectangleTracker.GetObjects(trackedRects, true);

                // create noise filter.
                foreach (var openCVRect in trackedRects)
                {
                    if (openCVRect.state == TrackedState.NEW)
                    {
                        if (!lowPassFilterDict.ContainsKey(openCVRect.id))
                        {
                            lowPassFilterDict.Add(openCVRect.id, new LowPassPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()));
                        }
                        if (!opticalFlowFilterDict.ContainsKey(openCVRect.id))
                        {
                            opticalFlowFilterDict.Add(openCVRect.id, new OFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()));
                        }
                    }
                    else if (openCVRect.state == TrackedState.DELETED)
                    {
                        if (lowPassFilterDict.ContainsKey(openCVRect.id))
                        {
                            lowPassFilterDict [openCVRect.id].Dispose();
                            lowPassFilterDict.Remove(openCVRect.id);
                        }
                        if (opticalFlowFilterDict.ContainsKey(openCVRect.id))
                        {
                            opticalFlowFilterDict [openCVRect.id].Dispose();
                            opticalFlowFilterDict.Remove(openCVRect.id);
                        }
                    }
                }

                // create LUT texture.
                foreach (var openCVRect in trackedRects)
                {
                    if (openCVRect.state == TrackedState.NEW)
                    {
                        faceMaskColorCorrector.CreateLUTTex(openCVRect.id);
                    }
                    else if (openCVRect.state == TrackedState.DELETED)
                    {
                        faceMaskColorCorrector.DeleteLUTTex(openCVRect.id);
                    }
                }


                // detect face landmark points.
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
                List <List <Vector2> > landmarkPoints = new List <List <Vector2> > ();
                for (int i = 0; i < trackedRects.Count; i++)
                {
                    TrackedRect      tr   = trackedRects [i];
                    UnityEngine.Rect rect = new UnityEngine.Rect(tr.x, tr.y, tr.width, tr.height);

                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                    // apply noise filter.
                    if (enableNoiseFilter)
                    {
                        if (tr.state > TrackedState.NEW && tr.state < TrackedState.DELETED)
                        {
                            opticalFlowFilterDict [tr.id].Process(rgbMat, points, points);
                            lowPassFilterDict [tr.id].Process(rgbMat, points, points);
                        }
                    }

                    landmarkPoints.Add(points);
                }

                // face masking.
                if (faceMaskTexture != null && landmarkPoints.Count >= 1)   // Apply face masking between detected faces and a face mask image.

                {
                    float maskImageWidth  = faceMaskTexture.width;
                    float maskImageHeight = faceMaskTexture.height;

                    TrackedRect tr;

                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        tr = trackedRects [i];

                        if (tr.state == TrackedState.NEW)
                        {
                            meshOverlay.CreateObject(tr.id, faceMaskTexture);
                        }
                        if (tr.state < TrackedState.DELETED)
                        {
                            MaskFace(meshOverlay, tr, landmarkPoints [i], faceLandmarkPointsInMask, maskImageWidth, maskImageHeight);

                            if (enableColorCorrection)
                            {
                                CorrectFaceMaskColor(tr.id, faceMaskMat, rgbMat, faceLandmarkPointsInMask, landmarkPoints [i]);
                            }
                        }
                        else if (tr.state == TrackedState.DELETED)
                        {
                            meshOverlay.DeleteObject(tr.id);
                        }
                    }
                }
                else if (landmarkPoints.Count >= 1)     // Apply face masking between detected faces.

                {
                    float maskImageWidth  = texture.width;
                    float maskImageHeight = texture.height;

                    TrackedRect tr;

                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        tr = trackedRects [i];

                        if (tr.state == TrackedState.NEW)
                        {
                            meshOverlay.CreateObject(tr.id, texture);
                        }
                        if (tr.state < TrackedState.DELETED)
                        {
                            MaskFace(meshOverlay, tr, landmarkPoints [i], landmarkPoints [0], maskImageWidth, maskImageHeight);

                            if (enableColorCorrection)
                            {
                                CorrectFaceMaskColor(tr.id, rgbMat, rgbMat, landmarkPoints [0], landmarkPoints [i]);
                            }
                        }
                        else if (tr.state == TrackedState.DELETED)
                        {
                            meshOverlay.DeleteObject(tr.id);
                        }
                    }
                }

                // draw face rects.
                if (displayFaceRects)
                {
                    for (int i = 0; i < detectResult.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 0, 0, 255), 2);
                    }

                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(trackedRects [i].x, trackedRects [i].y, trackedRects [i].width, trackedRects [i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 255, 0, 255), 2);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceAngles (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    }
                }

                // draw face points.
                if (displayDebugFacePoints)
                {
                    for (int i = 0; i < landmarkPoints.Count; i++)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, landmarkPoints [i], new Scalar(0, 255, 0, 255), 2);
                    }
                }


                // display face mask image.
                if (faceMaskTexture != null && faceMaskMat != null)
                {
                    if (displayFaceRects)
                    {
                        OpenCVForUnityUtils.DrawFaceRect(faceMaskMat, faceRectInMask, new Scalar(255, 0, 0, 255), 2);
                    }
                    if (displayDebugFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(faceMaskMat, faceLandmarkPointsInMask, new Scalar(0, 255, 0, 255), 2);
                    }

                    float scale = (rgbMat.width() / 4f) / faceMaskMat.width();
                    float tx    = rgbMat.width() - faceMaskMat.width() * scale;
                    float ty    = 0.0f;
                    Mat   trans = new Mat(2, 3, CvType.CV_32F);//1.0, 0.0, tx, 0.0, 1.0, ty);
                    trans.put(0, 0, scale);
                    trans.put(0, 1, 0.0f);
                    trans.put(0, 2, tx);
                    trans.put(1, 0, 0.0f);
                    trans.put(1, 1, scale);
                    trans.put(1, 2, ty);

                    Imgproc.warpAffine(faceMaskMat, rgbMat, trans, rgbMat.size(), Imgproc.INTER_LINEAR, Core.BORDER_TRANSPARENT, new Scalar(0));

                    if (displayFaceRects || displayDebugFacePointsToggle)
                    {
                        OpenCVForUnity.Utils.texture2DToMat(faceMaskTexture, faceMaskMat);
                    }
                }

//                Imgproc.putText (rgbMat, "W:" + rgbMat.width () + " H:" + rgbMat.height () + " SO:" + Screen.orientation, new Point (5, rgbMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.fastMatToTexture2D(rgbMat, texture);
            }
        }
Ejemplo n.º 26
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();


                //convert image to greyscale
                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);


                if (isAutoResetMode || faceTracker.getPoints().Count <= 0)
                {
//                    Debug.Log ("detectFace");

                    //convert image to greyscale
                    using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
                                                     | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                     | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());



                            if (faces.rows() > 0)
                            {
//                            Debug.Log ("faces " + faces.dump ());

                                List <OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList();
                                List <Point[]> pointsList = faceTracker.getPoints();

                                if (isAutoResetMode)
                                {
                                    //add initial face points from MatOfRect
                                    if (pointsList.Count <= 0)
                                    {
                                        faceTracker.addPoints(faces);
//                                    Debug.Log ("reset faces ");
                                    }
                                    else
                                    {
                                        for (int i = 0; i < rectsList.Count; i++)
                                        {
                                            OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3);
                                            //It determines whether nose point has been included in trackRect.
                                            if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67]))
                                            {
                                                rectsList.RemoveAt(i);
                                                pointsList.RemoveAt(i);
//                                            Debug.Log ("remove " + i);
                                            }
                                            Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2);
                                        }
                                    }
                                }
                                else
                                {
                                    faceTracker.addPoints(faces);
                                }

                                //draw face rect
                                for (int i = 0; i < rectsList.Count; i++)
                                {
                                    Imgproc.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2);
                                }
                            }
                            else
                            {
                                if (isAutoResetMode)
                                {
                                    faceTracker.reset();

                                    rightEye.SetActive(false);
                                    leftEye.SetActive(false);
                                    head.SetActive(false);
                                    mouth.SetActive(false);
                                    axes.SetActive(false);
                                }
                            }
                        }
                }


                //track face points.if face points <= 0, always return false.
                if (faceTracker.track(grayMat, faceTrackerParams))
                {
                    if (isShowingFacePoints)
                    {
                        faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
                    }

                    Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);


                    Point[] points = faceTracker.getPoints() [0];


                    if (points.Length > 0)
                    {
//                        for (int i = 0; i < points.Length; i++)
//                        {
//                            Imgproc.putText(rgbaMat, "" + i, new Point(points [i].x, points [i].y), Imgproc.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar(0, 0, 255, 255), 2, Imgproc.LINE_AA, false);
//                        }


                        imagePoints.fromArray(
                            points [31], //l eye
                            points [36], //r eye
                            points [67], //nose
                            points [48], //l mouth
                            points [54]  //r mouth
//                          ,
//                          points [0],//l ear
//                          points [14]//r ear
                            );


                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

                        bool isRefresh = false;

                        if (tvec.get(2, 0) [0] > 0 && tvec.get(2, 0) [0] < 1200 * ((float)rgbaMat.cols() / (float)webCamTextureToMatHelper.requestedWidth))
                        {
                            isRefresh = true;

                            if (oldRvec == null)
                            {
                                oldRvec = new Mat();
                                rvec.copyTo(oldRvec);
                            }
                            if (oldTvec == null)
                            {
                                oldTvec = new Mat();
                                tvec.copyTo(oldTvec);
                            }


                            //filter Rvec Noise.
                            using (Mat absDiffRvec = new Mat()) {
                                Core.absdiff(rvec, oldRvec, absDiffRvec);

                                //              Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                using (Mat cmpRvec = new Mat()) {
                                    Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpRvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }

                            //filter Tvec Noise.
                            using (Mat absDiffTvec = new Mat()) {
                                Core.absdiff(tvec, oldTvec, absDiffTvec);

                                //              Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                using (Mat cmpTvec = new Mat()) {
                                    Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpTvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }
                        }

                        if (isRefresh)
                        {
                            if (isShowingEffects)
                            {
                                rightEye.SetActive(true);
                            }
                            if (isShowingEffects)
                            {
                                leftEye.SetActive(true);
                            }
                            if (isShowingHead)
                            {
                                head.SetActive(true);
                            }
                            if (isShowingAxes)
                            {
                                axes.SetActive(true);
                            }


                            if ((Mathf.Abs((float)(points [48].x - points [56].x)) < Mathf.Abs((float)(points [31].x - points [36].x)) / 2.2 &&
                                 Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.9) ||
                                Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.7)
                            {
                                if (isShowingEffects)
                                {
                                    mouth.SetActive(true);
                                }
                            }
                            else
                            {
                                if (isShowingEffects)
                                {
                                    mouth.SetActive(false);
                                }
                            }

                            rvec.copyTo(oldRvec);
                            tvec.copyTo(oldTvec);

                            Calib3d.Rodrigues(rvec, rotM);

                            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
                            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
                            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                            // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                            ARM = invertYM * transformationM;

                            // Apply Z-axis inverted matrix.
                            ARM = ARM * invertZM;

                            if (shouldMoveARCamera)
                            {
                                if (ARGameObject != null)
                                {
                                    ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
                                    ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                                    ARGameObject.SetActive(true);
                                }
                            }
                            else
                            {
                                ARM = ARCamera.transform.localToWorldMatrix * ARM;

                                if (ARGameObject != null)
                                {
                                    ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                                    ARGameObject.SetActive(true);
                                }
                            }
                        }
                    }
                }

//                Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.fastMatToTexture2D(rgbaMat, texture);
            }

            if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
            {
                faceTracker.reset();
                if (oldRvec != null)
                {
                    oldRvec.Dispose();
                    oldRvec = null;
                }
                if (oldTvec != null)
                {
                    oldTvec.Dispose();
                    oldTvec = null;
                }

                rightEye.SetActive(false);
                leftEye.SetActive(false);
                head.SetActive(false);
                mouth.SetActive(false);
                axes.SetActive(false);
            }
        }
Ejemplo n.º 27
0
        private void Run()
        {
            displayFaceRectsToggle.isOn       = displayFaceRects;
            useDlibFaceDetecterToggle.isOn    = useDlibFaceDetecter;
            filterNonFrontalFacesToggle.isOn  = filterNonFrontalFaces;
            useSeamlessCloneToggle.isOn       = useSeamlessClone;
            displayDebugFacePointsToggle.isOn = displayDebugFacePoints;

            if (imgTexture == null)
            {
                imgTexture = Resources.Load("family") as Texture2D;
            }

            gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = 0;
            float height = 0;

            width  = gameObject.transform.localScale.x;
            height = gameObject.transform.localScale.y;


            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }

            Mat rgbaMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);

            OpenCVForUnity.UnityUtils.Utils.texture2DToMat(imgTexture, rgbaMat);
            Debug.Log("rgbaMat ToString " + rgbaMat.ToString());

            if (faceLandmarkDetector == null)
            {
                faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath);
            }

            FrontalFaceChecker frontalFaceChecker = new FrontalFaceChecker(width, height);

            // detect faces.
            List <Rect> detectResult = new List <Rect>();

            if (useDlibFaceDetecter)
            {
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                foreach (var unityRect in result)
                {
                    detectResult.Add(new Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                }
            }
            else
            {
                if (cascade == null)
                {
                    cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath);
                }
                //if (cascade.empty())
                //{
                //    Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
                //}

                // convert image to greyscale.
                Mat gray = new Mat();
                Imgproc.cvtColor(rgbaMat, gray, Imgproc.COLOR_RGBA2GRAY);

                MatOfRect faces = new MatOfRect();
                Imgproc.equalizeHist(gray, gray);
                cascade.detectMultiScale(gray, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size());
                //Debug.Log ("faces " + faces.dump ());

                detectResult = faces.toList();

                // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
                foreach (Rect r in detectResult)
                {
                    r.y += (int)(r.height * 0.1f);
                }

                gray.Dispose();
            }

            // detect face landmark points.
            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
            List <List <Vector2> > landmarkPoints = new List <List <Vector2> >();

            foreach (var openCVRect in detectResult)
            {
                UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);

                Debug.Log("face : " + rect);
                //OpenCVForUnityUtils.DrawFaceRect(imgMat, rect, new Scalar(255, 0, 0, 255), 2);

                List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);
                //OpenCVForUnityUtils.DrawFaceLandmark(imgMat, points, new Scalar(0, 255, 0, 255), 2);
                landmarkPoints.Add(points);
            }



            // filter non frontal facea.
            if (filterNonFrontalFaces)
            {
                for (int i = 0; i < landmarkPoints.Count; i++)
                {
                    if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints[i]) < frontalFaceRateLowerLimit)
                    {
                        detectResult.RemoveAt(i);
                        landmarkPoints.RemoveAt(i);
                        i--;
                    }
                }
            }


            // swap faces.
            int[] face_nums = new int[landmarkPoints.Count];
            for (int i = 0; i < face_nums.Length; i++)
            {
                face_nums[i] = i;
            }
            face_nums = face_nums.OrderBy(i => System.Guid.NewGuid()).ToArray();
            if (landmarkPoints.Count >= 2)
            {
                DlibFaceSwapper faceSwapper = new DlibFaceSwapper();
                faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessClone;
                faceSwapper.isShowingDebugFacePoints      = displayDebugFacePoints;

                int ann = 0, bob = 0;
                for (int i = 0; i < face_nums.Length - 1; i += 2)
                {
                    ann = face_nums[i];
                    bob = face_nums[i + 1];

                    faceSwapper.SwapFaces(rgbaMat, landmarkPoints[ann], landmarkPoints[bob], 1);
                }
                faceSwapper.Dispose();
            }

            // draw face rects.
            if (displayFaceRects)
            {
                int ann = 0, bob = 0;
                for (int i = 0; i < face_nums.Length - 1; i += 2)
                {
                    ann = face_nums[i];
                    bob = face_nums[i + 1];

                    UnityEngine.Rect rect_ann = new UnityEngine.Rect(detectResult[ann].x, detectResult[ann].y, detectResult[ann].width, detectResult[ann].height);
                    UnityEngine.Rect rect_bob = new UnityEngine.Rect(detectResult[bob].x, detectResult[bob].y, detectResult[bob].width, detectResult[bob].height);
                    Scalar           color    = new Scalar(UnityEngine.Random.Range(0, 256), UnityEngine.Random.Range(0, 256), UnityEngine.Random.Range(0, 256), 255);
                    OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_ann, color, 2);
                    OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_bob, color, 2);
                    //Imgproc.putText (rgbaMat, "" + i % 2, new Point (rect_ann.xMin, rect_ann.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, color, 2, Imgproc.LINE_AA, false);
                    //Imgproc.putText (rgbaMat, "" + (i % 2 + 1), new Point (rect_bob.xMin, rect_bob.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, color, 2, Imgproc.LINE_AA, false);
                }
            }

            frontalFaceChecker.Dispose();

            Texture2D texture = new Texture2D(rgbaMat.cols(), rgbaMat.rows(), TextureFormat.RGBA32, false);

            OpenCVForUnity.UnityUtils.Utils.matToTexture2D(rgbaMat, texture);
            gameObject.GetComponent <Renderer>().material.mainTexture = texture;

            rgbaMat.Dispose();
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                // detect faces.
                List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> ();
                if (useDlibFaceDetecter)
                {
                    OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                    List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                    foreach (var unityRect in result)
                    {
                        detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                    }
                }
                else
                {
                    // convert image to greyscale.
                    Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            detectResult = faces.toList();

                            // adjust to Dilb's result.
                            foreach (OpenCVForUnity.Rect r in detectResult)
                            {
                                r.y += (int)(r.height * 0.1f);
                            }
                        }
                }

                // face traking.
                if (enableTracking)
                {
                    rectangleTracker.UpdateTrackedObjects(detectResult);
                    detectResult = new List <OpenCVForUnity.Rect> ();
                    rectangleTracker.GetObjects(detectResult, true);
                }

                // detect face landmark points.
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                List <List <Vector2> > landmarkPoints = new List <List <Vector2> > ();
                foreach (var openCVRect in detectResult)
                {
                    UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);

                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);
                    landmarkPoints.Add(points);
                }

                // filter non frontal faces.
                if (filterNonFrontalFaces)
                {
                    for (int i = 0; i < landmarkPoints.Count; i++)
                    {
                        if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit)
                        {
                            detectResult.RemoveAt(i);
                            landmarkPoints.RemoveAt(i);
                            i--;
                        }
                    }
                }

                // face swapping.
                if (landmarkPoints.Count >= 2)
                {
                    int ann = 0, bob = 1;
                    for (int i = 0; i < landmarkPoints.Count - 1; i += 2)
                    {
                        ann = i;
                        bob = i + 1;

                        faceSwapper.SwapFaces(rgbaMat, landmarkPoints [ann], landmarkPoints [bob], 1);
                    }
                }

                // draw face rects.
                if (displayFaceRects)
                {
                    for (int i = 0; i < detectResult.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceParam.getAngleOfFrontalFace (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    }
                }

                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }
Ejemplo n.º 29
0
        // Update is called once per frame
        void Update()
        {
            lock (sync) {
                while (ExecuteOnMainThread.Count > 0)
                {
                    ExecuteOnMainThread.Dequeue().Invoke();
                }
            }

            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat());

                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
                Imgproc.equalizeHist(grayMat, grayMat);

                if (enableDetection && !isDetecting)
                {
                    isDetecting = true;

                    grayMat.copyTo(grayMat4Thread);

                    StartThread(ThreadWorker);
                }

                Rect[] rects;
                if (!useSeparateDetection)
                {
                    if (hasUpdatedDetectionResult)
                    {
                        hasUpdatedDetectionResult = false;

                        rectangleTracker.UpdateTrackedObjects(detectionResult.toList());
                    }

                    rectangleTracker.GetObjects(resultObjects, true);
                    rects = resultObjects.ToArray();

//                    rects = rectangleTracker.CreateCorrectionBySpeedOfRects ();

                    DrawRects(rects, grayMat.width(), grayMat.height());
                }
                else
                {
                    if (hasUpdatedDetectionResult)
                    {
                        hasUpdatedDetectionResult = false;

                        //Debug.Log("process: get rectsWhereRegions were got from detectionResult");
                        rectsWhereRegions = detectionResult.toArray();

//                        rects = rectsWhereRegions;
//                        for (int i = 0; i < rects.Length; i++) {
//                            Imgproc.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (0, 0, 255, 255), 1);
//                        }
                    }
                    else
                    {
                        //Debug.Log("process: get rectsWhereRegions from previous positions");
                        rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects();

//                        rects = rectsWhereRegions;
//                        for (int i = 0; i < rects.Length; i++) {
//                            Imgproc.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (0, 255, 0, 255), 1);
//                        }
                    }

                    detectedObjectsInRegions.Clear();
                    if (rectsWhereRegions.Length > 0)
                    {
                        int len = rectsWhereRegions.Length;
                        for (int i = 0; i < len; i++)
                        {
                            DetectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
                        }
                    }

                    rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions);
                    rectangleTracker.GetObjects(resultObjects, true);

                    rects = resultObjects.ToArray();
                    DrawRects(rects, grayMat.width(), grayMat.height());
                }
            }

            if (webCamTextureToMatHelper.IsPlaying())
            {
                Matrix4x4 cameraToWorldMatrix = Camera.main.cameraToWorldMatrix;;

                Vector3 ccCameraSpacePos = UnProjectVector(projectionMatrix, new Vector3(0.0f, 0.0f, overlayDistance));
                Vector3 tlCameraSpacePos = UnProjectVector(projectionMatrix, new Vector3(-overlayDistance, overlayDistance, overlayDistance));

                //position
                Vector3 position = cameraToWorldMatrix.MultiplyPoint3x4(ccCameraSpacePos);
                gameObject.transform.position = position;

                //scale
                Vector3 scale = new Vector3(Mathf.Abs(tlCameraSpacePos.x - ccCameraSpacePos.x) * 2, Mathf.Abs(tlCameraSpacePos.y - ccCameraSpacePos.y) * 2, 1);
                gameObject.transform.localScale = scale;

                // Rotate the canvas object so that it faces the user.
                Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));
                gameObject.transform.rotation = rotation;

                rectOverlay.UpdateOverlayTransform(gameObject.transform);
            }
        }
Ejemplo n.º 30
0
        void Run()
        {
            Mat rgbaMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);

            OpenCVForUnity.Utils.texture2DToMat(imgTexture, rgbaMat);
            //Mat dstMat = Imgcodecs.imread(Application.dataPath + "/Resources/changer.jpg");
            //Imgproc.cvtColor(dstMat, dstMat, Imgproc.COLOR_BGR2RGB);

            FrontalFaceChecker frontalFaceChecker = new FrontalFaceChecker(imgTexture.width, imgTexture.height); //检测正面脸,范围整个t2d

            //1、人脸检测。两种检测人脸rect方法,toggle切换
            List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect>();

            if (useDlibFaceDetecter)
            {
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                foreach (UnityEngine.Rect unityRect in result)
                {
                    detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                }
            }
            else
            {
                cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath);

                Mat gray = new Mat();
                Imgproc.cvtColor(rgbaMat, gray, Imgproc.COLOR_RGBA2GRAY);

                MatOfRect faces = new MatOfRect();
                Imgproc.equalizeHist(gray, gray);
                cascade.detectMultiScale(gray, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size());

                detectResult = faces.toList();

                // adjust to Dilb's result.
                foreach (OpenCVForUnity.Rect r in detectResult)
                {
                    r.y += (int)(r.height * 0.1f);
                }
                gray.Dispose();
            }

            //0-拷贝到->1,靠list中排序,改变顺序
            detectResult.Sort(delegate(OpenCVForUnity.Rect x, OpenCVForUnity.Rect y)
            {
                return(x.x.CompareTo(y.x));   //顺序与delegate中一致,升序排列
                //return y.x.CompareTo(x.x); //顺序与delegate中不同,降序排列
            });
            //Debug.Log("[0]" + detectResult[0].x + ", [1]" + detectResult[1].x);

            //2、关键点定位。通过上一步的rect,检测人脸标记点landmark
            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
            List <List <Vector2> > landmarkPoints = new List <List <Vector2> >();


            //TODO...只检测一张脸,另一张改成UV图,手动输入68个points

            /*
             * foreach (OpenCVForUnity.Rect openCVRect in detectResult)
             * {
             *  UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
             *  //Debug.Log("face : " + rect);
             *
             *  OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2); //画框rect
             *  Imgproc.circle(rgbaMat, new Point(rect.x, rect.y), 0, new Scalar(0, 255, 0, 255), 5, Imgproc.LINE_8, 0);
             *  Imgproc.putText(rgbaMat, rect.x + "x" + rect.y, new Point(rect.x + 5, rect.y -5), 1, 1, new Scalar(0, 255, 0, 255));
             *
             *  List<Vector2> points = faceLandmarkDetector.DetectLandmark(rect);
             *  //OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2); //画框landmark
             *  landmarkPoints.Add(points);
             * }
             */

            //检测一张照片的脸的landmark点
            OpenCVForUnity.Rect openCVRect = detectResult[0];
            UnityEngine.Rect    rect       = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
            //OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2); //画框rect
            //Imgproc.circle(rgbaMat, new Point(rect.x, rect.y), 0, new Scalar(0, 255, 0, 255), 5, Imgproc.LINE_8, 0);
            //Imgproc.putText(rgbaMat, rect.x + "x" + rect.y, new Point(rect.x + 5, rect.y - 5), 1, 1, new Scalar(0, 255, 0, 255));
            List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect); //通过检测器从rect中提取point

            //OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2); //画框landmark
            landmarkPoints.Add(points);

            string log = "";

            for (int i = 0; i < points.Count; i++)
            {
                //Debug.Log("[" + i + "] " + points[i]);
                log += "new Vector2(" + ((int)points[i].x + 500) + "," + (int)points[i].y + "),\n";
                //Imgproc.circle(rgbaMat, new Point(10, 10), 0, new Scalar(255, 0, 0, 255), 4, Imgproc.LINE_8, 0); //检查原点
                //Imgproc.putText(rgbaMat, i.ToString(), new Point(landmarkPoints[0][i].x, landmarkPoints[0][i].y), 1, 1, new Scalar(0, 255, 0, 255));
                //Imgproc.circle(rgbaMat, new Point(landmarkPoints[0][i].x, landmarkPoints[0][i].y), 0, new Scalar(0, 255, 0, 255), 2, Imgproc.LINE_8, 0); //绘制68点
                //Imgproc.putText(rgbaMat, rect.x + "x" + rect.y, new Point(rect.x + 5, rect.y - 5), 1, 1, new Scalar(0, 255, 0, 255));
                //Imgproc.putText(rgbaMat, rect.x + "x" + rect.y, new Point(rect.x + 5, rect.y - 5), 1, 1, new Scalar(0, 255, 0, 255));

                /* * * * * * * * * * * * *
                * jaw;       // [0-16]  *
                * rightBrow; // [17-21] *
                * leftBrow;  // [22-26] *
                * nose;      // [27-35] *
                * rightEye;  // [36-41] *
                * leftEye;   // [42-47] *
                * mouth;     // [48-59] *
                * mouth2;    // [60-67] *
                * * * * * * * * * * * * */
                /*
                 * if (i >= 0 && i <= 16)
                 * {
                 *  Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(255, 0, 0, 255), 2, Imgproc.LINE_8, 0); //红
                 * }
                 * else if (i >= 17 && i <= 21)
                 * {
                 *  Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(255, 127, 0, 255), 2, Imgproc.LINE_8, 0); //橙
                 * }
                 * else if (i >= 22 && i <= 26)
                 * {
                 *  Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(255, 255, 0, 255), 2, Imgproc.LINE_8, 0); //黄
                 * }
                 * else if (i >= 27 && i <= 35)
                 * {
                 *  Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(0, 255, 0, 255), 2, Imgproc.LINE_8, 0); //绿
                 * }
                 * else if (i >= 36 && i <= 21)
                 * {
                 *  Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(0, 255, 170, 255), 2, Imgproc.LINE_8, 0); //青
                 * }
                 * else if (i >= 42 && i <= 47)
                 * {
                 *  Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(0, 0, 255, 255), 2, Imgproc.LINE_8, 0); //蓝
                 * }
                 * else if (i >= 48 && i <= 59)
                 * {
                 *  Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(128, 255, 0, 255), 2, Imgproc.LINE_8, 0); //紫
                 * }
                 * else if (i >= 60 && i <= 67)
                 * {
                 *  Imgproc.circle(rgbaMat, new Point(points[i].x, points[i].y), 0, new Scalar(255, 180, 255, 255), 2, Imgproc.LINE_8, 0); //粉红
                 * }
                 */
            }
            //Debug.Log(log);


            //手写一张UV的landmark点
            List <Vector2> uvs = new List <Vector2>()
            {
                //0-7
                new Vector2(583, 210), //0
                new Vector2(584, 250), //1
                new Vector2(585, 305), //2
                new Vector2(600, 360), //3
                new Vector2(630, 410), //4
                new Vector2(670, 425), //5
                new Vector2(710, 440), //6
                new Vector2(740, 445), //7
                //8
                new Vector2(770, 450), //8+轴
                //9-16
                new Vector2(800, 445), //9
                new Vector2(830, 440), //10
                new Vector2(870, 425), //11
                new Vector2(910, 410), //12
                new Vector2(940, 360), //13
                new Vector2(955, 305), //14
                new Vector2(956, 250), //15
                new Vector2(957, 210), //16
                //17-21 leftBrow
                new Vector2(655, 165), //17
                new Vector2(680, 155), //18
                new Vector2(710, 160), //19
                new Vector2(730, 170), //20
                new Vector2(750, 190), //21
                //22-26 rightBrow
                new Vector2(790, 190), //22
                new Vector2(810, 170), //23
                new Vector2(830, 160), //24
                new Vector2(860, 155), //25
                new Vector2(885, 165), //26
                //27-30 nose竖
                new Vector2(770, 220), //27
                new Vector2(770, 250), //28
                new Vector2(770, 275), //29
                new Vector2(770, 300), //30
                //31-35 nose横
                new Vector2(740, 312), //31
                new Vector2(755, 316), //32
                new Vector2(770, 320), //33
                new Vector2(785, 316), //34
                new Vector2(800, 312), //35
                //36-41 leftEye
                new Vector2(670, 215), //36
                new Vector2(690, 200), //37
                new Vector2(715, 205), //38
                new Vector2(730, 225), //39
                new Vector2(710, 230), //40
                new Vector2(690, 227), //41
                //42-47 rightEye
                new Vector2(810, 225), //42
                new Vector2(825, 205), //43
                new Vector2(850, 200), //44
                new Vector2(870, 215), //45
                new Vector2(855, 227), //46
                new Vector2(830, 230), //47
                //48-59 mouth
                new Vector2(720, 360), //48-l
                new Vector2(735, 355), //49
                new Vector2(750, 350), //50
                new Vector2(770, 352), //51+u
                new Vector2(790, 350), //52
                new Vector2(805, 355), //53
                new Vector2(820, 360), //54-r
                new Vector2(805, 375), //55
                new Vector2(790, 382), //56
                new Vector2(770, 380), //57+d
                new Vector2(750, 382), //58
                new Vector2(735, 375), //59
                //60-67 mouth2
                new Vector2(730, 365), //60-l
                new Vector2(750, 357), //61
                new Vector2(770, 354), //62+u
                new Vector2(790, 357), //63
                new Vector2(810, 365), //64-r
                new Vector2(790, 370), //65
                new Vector2(770, 375), //66+d
                new Vector2(750, 370), //67
            };

            landmarkPoints.Add(uvs);


            for (int i = 0; i < landmarkPoints[1].Count; i++)
            {
                //Imgproc.putText(rgbaMat, i.ToString(), new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 1, 1, new Scalar(255, 0, 0, 255));

                /* * * * * * * * * * * * *
                * jaw;       // [0-16]  *
                * rightBrow; // [17-21] *
                * leftBrow;  // [22-26] *
                * nose;      // [27-35] *
                * rightEye;  // [36-41] *
                * leftEye;   // [42-47] *
                * mouth;     // [48-59] *
                * mouth2;    // [60-67] *
                * * * * * * * * * * * * */

                /*
                 * if (i >= 0 && i <= 16)
                 * {
                 *  Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //红
                 * }
                 * else if (i >= 17 && i <= 21)
                 * {
                 *  Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //橙
                 * }
                 * else if (i >= 22 && i <= 26)
                 * {
                 *  Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //黄
                 * }
                 * else if (i >= 27 && i <= 35)
                 * {
                 *  Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //绿
                 * }
                 * else if (i >= 36 && i <= 21)
                 * {
                 *  Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //青
                 * }
                 * else if (i >= 42 && i <= 47)
                 * {
                 *  Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //蓝
                 * }
                 * else if (i >= 48 && i <= 59)
                 * {
                 *  Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //紫
                 * }
                 * else if (i >= 60 && i <= 67)
                 * {
                 *  //Imgproc.circle(rgbaMat, new Point(landmarkPoints[1][i].x, landmarkPoints[1][i].y), 0, new Scalar(255, 0, 0, 255), 5, Imgproc.LINE_8, 0); //粉红
                 * }
                 */
            }



            //过滤非正面的脸
            if (filterNonFrontalFaces)
            {
                for (int i = 0; i < landmarkPoints.Count; i++)
                {
                    if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints[i]) < frontalFaceRateLowerLimit) //阈值0~1
                    {
                        detectResult.RemoveAt(i);
                        landmarkPoints.RemoveAt(i);
                        i--;
                    }
                }
            }

            //1张脸时只提取关键点,2张脸才换脸
            if (landmarkPoints.Count == 2)
            {
                //开始换脸
                DlibFaceChanger faceChanger = new DlibFaceChanger();
                faceChanger.isShowingDebugFacePoints = displayDebugFacePoints;
                faceChanger.SetTargetImage(rgbaMat);                                                //目标Mat
                faceChanger.AddFaceChangeData(rgbaMat, landmarkPoints[0], landmarkPoints[1], 0.9f); //源Mat,从0拷贝到1
                faceChanger.ChangeFace();
                faceChanger.Dispose();
            }

            frontalFaceChecker.Dispose();

            //背景图
            Texture2D t2d = new Texture2D(rgbaMat.width(), rgbaMat.height(), TextureFormat.RGBA32, false);

            OpenCVForUnity.Utils.matToTexture2D(rgbaMat, t2d);
            Sprite sp = Sprite.Create(t2d, new UnityEngine.Rect(0, 0, t2d.width, t2d.height), Vector2.zero);

            srcImage.sprite         = sp;
            srcImage.preserveAspect = true;


            Mat       dstMat  = new Mat(rgbaMat, new OpenCVForUnity.Rect(rgbaMat.width() / 2, 0, rgbaMat.width() / 2, rgbaMat.height()));
            Texture2D dst_t2d = new Texture2D(dstMat.width(), dstMat.height(), TextureFormat.RGBA32, false);

            OpenCVForUnity.Utils.matToTexture2D(dstMat, dst_t2d);
            mesh.materials[1].mainTexture = dst_t2d;

            rgbaMat.Dispose();
        }
Ejemplo n.º 31
0
        // Search for objects such as faces in the image using the given parameters, storing the multiple cv::Rects into 'objects'.
        // Can use Haar cascades or LBP cascades for Face Detection, or even eye, mouth, or car detection.
        // Input is temporarily shrunk to 'scaledWidth' for much faster detection, since 200 is enough to find faces.
        private static void detectObjectsCustom(Mat img, CascadeClassifier cascade, out List <Rect> objects, int scaledWidth, int flags, Size minFeatureSize, float searchScaleFactor, int minNeighbors)
        {
            // If the input image is not grayscale, then convert the RGB or RGBA color image to grayscale.
            if (gray == null)
            {
                gray = new Mat();
            }

            if (img.channels() == 3)
            {
                Imgproc.cvtColor(img, gray, Imgproc.COLOR_RGB2GRAY);
            }
            else if (img.channels() == 4)
            {
                Imgproc.cvtColor(img, gray, Imgproc.COLOR_RGBA2GRAY);
            }
            else
            {
                // Access the input image directly, since it is already grayscale.
                gray = img;
            }

            // Possibly shrink the image, to run much faster.
            if (inputImg == null)
            {
                inputImg = new Mat();
            }

            float scale = img.cols() / (float)scaledWidth;

            if (img.cols() > scaledWidth)
            {
                // Shrink the image while keeping the same aspect ratio.
                int scaledHeight = (int)Mathf.Round(img.rows() / scale);
                Imgproc.resize(gray, inputImg, new Size(scaledWidth, scaledHeight));
            }
            else
            {
                // Access the input image directly, since it is already small.
                inputImg = gray;
            }

            // Standardize the brightness and contrast to improve dark images.
            if (equalizedImg == null)
            {
                equalizedImg = new Mat();
            }

            Imgproc.equalizeHist(inputImg, equalizedImg);

            // Detect objects in the small grayscale image.
            cascade.detectMultiScale(equalizedImg, matOfRectObjects, searchScaleFactor, minNeighbors, flags, minFeatureSize, new Size());
            objects = matOfRectObjects.toList();

            // Enlarge the results if the image was temporarily shrunk before detection.
            if (img.cols() > scaledWidth)
            {
                for (int i = 0; i < objects.Count; i++)
                {
                    objects[i].x      = (int)Mathf.Round(objects[i].x * scale);
                    objects[i].y      = (int)Mathf.Round(objects[i].y * scale);
                    objects[i].width  = (int)Mathf.Round(objects[i].width * scale);
                    objects[i].height = (int)Mathf.Round(objects[i].height * scale);
                }
            }

            // Make sure the object is completely within the image, in case it was on a border.
            for (int i = 0; i < objects.Count; i++)
            {
                if (objects[i].x < 0)
                {
                    objects[i].x = 0;
                }
                if (objects[i].y < 0)
                {
                    objects[i].y = 0;
                }
                if (objects[i].x + objects[i].width > img.cols())
                {
                    objects[i].x = img.cols() - objects[i].width;
                }
                if (objects[i].y + objects[i].height > img.rows())
                {
                    objects[i].y = img.rows() - objects[i].height;
                }
            }
            // Return with the detected face rectangles stored in "objects".
        }