コード例 #1
0
        // Update is called once per frame
        void Update()
        {
            lock (sync) {
                while (ExecuteOnMainThread.Count > 0)
                {
                    ExecuteOnMainThread.Dequeue().Invoke();
                }
            }

            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat());

                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
                Imgproc.equalizeHist(grayMat, grayMat);

                if (enable && !detecting)
                {
                    detecting = true;

                    grayMat.copyTo(grayMat4Thread);

                    StartThread(ThreadWorker);
                }

                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, grayMat);

                Rect[] rects;
                if (!isUsingSeparateDetection)
                {
                    if (didUpdateTheDetectionResult)
                    {
                        didUpdateTheDetectionResult = false;

                        rectangleTracker.UpdateTrackedObjects(detectionResult.toList());
                    }

                    rectangleTracker.GetObjects(resultObjects, true);

                    rects = rectangleTracker.CreateCorrectionBySpeedOfRects();

                    if (rects.Length > 0)
                    {
                        OpenCVForUnity.Rect rect = rects [0];

                        // Adjust to Dilb's result.
                        rect.y += (int)(rect.height * 0.1f);

                        //detect landmark points
                        List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height));

                        UpdateARHeadTransform(points);
                    }
                }
                else
                {
                    if (didUpdateTheDetectionResult)
                    {
                        didUpdateTheDetectionResult = false;

                        //Debug.Log("process: get rectsWhereRegions were got from detectionResult");
                        rectsWhereRegions = detectionResult.toArray();
                    }
                    else
                    {
                        //Debug.Log("process: get rectsWhereRegions from previous positions");
                        rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects();
                    }

                    detectedObjectsInRegions.Clear();
                    if (rectsWhereRegions.Length > 0)
                    {
                        int len = rectsWhereRegions.Length;
                        for (int i = 0; i < len; i++)
                        {
                            detectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
                        }
                    }

                    rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions);
                    rectangleTracker.GetObjects(resultObjects, true);

                    if (resultObjects.Count > 0)
                    {
                        OpenCVForUnity.Rect rect = resultObjects [0];

                        // Adjust to Dilb's result.
                        rect.y += (int)(rect.height * 0.1f);

                        //detect landmark points
                        List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height));

                        UpdateARHeadTransform(points);
                    }
                }
            }
        }
コード例 #2
0
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat());


                Mat rgbaMatClipROI = new Mat(rgbaMat, processingAreaRect);

                rgbaMatClipROI.copyTo(processingAreaMat);


                // fill all black.
                Imgproc.rectangle(rgbaMat, new Point(0, 0), new Point(rgbaMat.width(), rgbaMat.height()), new Scalar(0, 0, 0, 0), -1);


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, processingAreaMat);

                // detect faces.
                List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> ();
                if (isUsingDlibFaceDetecter)
                {
                    List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                    foreach (var unityRect in result)
                    {
                        detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                    }
                }
                else
                {
                    // convert image to greyscale.
                    Imgproc.cvtColor(processingAreaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);


                    Imgproc.equalizeHist(grayMat, grayMat);

                    cascade.detectMultiScale(grayMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(grayMat.cols() * 0.15, grayMat.cols() * 0.15), new Size());

                    detectResult = faces.toList();


                    // Adjust to Dilb's result.
                    foreach (OpenCVForUnity.Rect r in detectResult)
                    {
                        r.y += (int)(r.height * 0.1f);
                    }
                }


                foreach (var rect in detectResult)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height));

                    //draw landmark points
                    OpenCVForUnityUtils.DrawFaceLandmark(rgbaMatClipROI, points, new Scalar(0, 255, 0, 255), 2);

                    //draw face rect
                    OpenCVForUnityUtils.DrawFaceRect(rgbaMatClipROI, new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height), new Scalar(255, 0, 0, 255), 2);
                }

                Imgproc.putText(rgbaMatClipROI, "W:" + rgbaMatClipROI.width() + " H:" + rgbaMatClipROI.height() + " SO:" + Screen.orientation, new Point(5, rgbaMatClipROI.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 0, 0, 255), 1, Imgproc.LINE_AA, false);


                Imgproc.rectangle(rgbaMat, new Point(0, 0), new Point(rgbaMat.width(), rgbaMat.height()), new Scalar(255, 0, 0, 255), 2);

                // Draw prosessing area rectangle.
                Imgproc.rectangle(rgbaMat, processingAreaRect.tl(), processingAreaRect.br(), new Scalar(255, 255, 0, 255), 2);

                OpenCVForUnity.Utils.fastMatToTexture2D(rgbaMat, texture);

                rgbaMatClipROI.Dispose();
            }

            if (webCamTextureToMatHelper.IsPlaying())
            {
                Matrix4x4 cameraToWorldMatrix = Camera.main.cameraToWorldMatrix;
                ;
                Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;

                texture.wrapMode = TextureWrapMode.Clamp;

                quad_renderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix);

                // Position the canvas object slightly in front
                // of the real world web camera.
                Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2);

                // Rotate the canvas object so that it faces the user.
                Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));

                gameObject.transform.position = position;
                gameObject.transform.rotation = rotation;
            }
        }
コード例 #3
0
        // Update is called once per frame
        void Update()
        {
            lock (sync) {
                while (ExecuteOnMainThread.Count > 0)
                {
                    ExecuteOnMainThread.Dequeue().Invoke();
                }
            }

            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat());

                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
                Imgproc.equalizeHist(grayMat, grayMat);

                if (enable && !detecting)
                {
                    detecting = true;

                    grayMat.copyTo(grayMat4Thread);

                    StartThread(ThreadWorker);
                }

                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, grayMat);

                if (!isShowingWebCamImage)
                {
                    // fill all black.
                    Imgproc.rectangle(rgbaMat, new Point(0, 0), new Point(rgbaMat.width(), rgbaMat.height()), new Scalar(0, 0, 0, 0), -1);
                }

                Rect[] rects;
                if (!isUsingSeparateDetection)
                {
                    if (didUpdateTheDetectionResult)
                    {
                        didUpdateTheDetectionResult = false;

                        rectangleTracker.UpdateTrackedObjects(detectionResult.toList());
                    }

                    rectangleTracker.GetObjects(resultObjects, true);
//                    rects = resultObjects.ToArray ();

                    rects = rectangleTracker.CreateCorrectionBySpeedOfRects();

                    if (isShowingDetectedFaceRect)
                    {
                        for (int i = 0; i < rects.Length; i++)
                        {
                            //Debug.Log ("detected face[" + i + "] " + rects [i]);
                            Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 1);
                        }
                    }

                    // Adjust to Dilb's result.
                    foreach (OpenCVForUnity.Rect r in resultObjects)
                    {
                        r.y += (int)(r.height * 0.1f);
                    }

                    foreach (var rect in resultObjects)
                    {
                        //detect landmark points
                        List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height));

                        //draw landmark points
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);

                        if (isShowingDetectedFaceRect)
                        {
                            //draw face rect
                            OpenCVForUnityUtils.DrawFaceRect(rgbaMat, new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height), new Scalar(255, 0, 0, 255), 2);
                        }
                    }
                }
                else
                {
                    if (didUpdateTheDetectionResult)
                    {
                        didUpdateTheDetectionResult = false;

                        //Debug.Log("process: get rectsWhereRegions were got from detectionResult");
                        rectsWhereRegions = detectionResult.toArray();

                        if (isShowingDetectedFaceRect)
                        {
                            rects = rectsWhereRegions;
                            for (int i = 0; i < rects.Length; i++)
                            {
                                Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 0, 255, 255), 1);
                            }
                        }
                    }
                    else
                    {
                        //Debug.Log("process: get rectsWhereRegions from previous positions");
                        rectsWhereRegions = rectangleTracker.CreateCorrectionBySpeedOfRects();

                        if (isShowingDetectedFaceRect)
                        {
                            rects = rectsWhereRegions;
                            for (int i = 0; i < rects.Length; i++)
                            {
                                Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(0, 255, 0, 255), 1);
                            }
                        }
                    }

                    detectedObjectsInRegions.Clear();
                    if (rectsWhereRegions.Length > 0)
                    {
                        int len = rectsWhereRegions.Length;
                        for (int i = 0; i < len; i++)
                        {
                            detectInRegion(grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
                        }
                    }

                    rectangleTracker.UpdateTrackedObjects(detectedObjectsInRegions);
                    rectangleTracker.GetObjects(resultObjects, true);

                    if (isShowingDetectedFaceRect)
                    {
                        rects = resultObjects.ToArray();
                        for (int i = 0; i < rects.Length; i++)
                        {
                            //Debug.Log ("detected face[" + i + "] " + rects [i]);
                            Imgproc.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 1);
                        }
                    }

                    // Adjust to Dilb's result.
                    foreach (OpenCVForUnity.Rect r in resultObjects)
                    {
                        r.y += (int)(r.height * 0.1f);
                    }

                    foreach (var rect in resultObjects)
                    {
                        //detect landmark points
                        List <Vector2> points = faceLandmarkDetector.DetectLandmark(new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height));

                        //draw landmark points
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);

                        if (isShowingDetectedFaceRect)
                        {
                            //draw face rect
                            OpenCVForUnityUtils.DrawFaceRect(rgbaMat, new UnityEngine.Rect(rect.x, rect.y, rect.width, rect.height), new Scalar(255, 0, 0, 255), 2);
                        }
                    }
                }

                OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
            }

            if (webCamTextureToMatHelper.IsPlaying())
            {
                Matrix4x4 cameraToWorldMatrix = Camera.main.cameraToWorldMatrix;
                Matrix4x4 worldToCameraMatrix = cameraToWorldMatrix.inverse;

                texture.wrapMode = TextureWrapMode.Clamp;

                quad_renderer.sharedMaterial.SetMatrix("_WorldToCameraMatrix", worldToCameraMatrix);

                // Position the canvas object slightly in front
                // of the real world web camera.
                Vector3 position = cameraToWorldMatrix.GetColumn(3) - cameraToWorldMatrix.GetColumn(2);

                // Rotate the canvas object so that it faces the user.
                Quaternion rotation = Quaternion.LookRotation(-cameraToWorldMatrix.GetColumn(2), cameraToWorldMatrix.GetColumn(1));

                gameObject.transform.position = position;
                gameObject.transform.rotation = rotation;
            }
        }