// Use this for initialization
        void Start()
        {
            Texture2D imgTexture = Resources.Load ("lena") as Texture2D;

                        Mat imgMat = new Mat (imgTexture.height, imgTexture.width, CvType.CV_8UC4);

                        Utils.texture2DToMat (imgTexture, imgMat);
                        Debug.Log ("imgMat dst ToString " + imgMat.ToString ());

                        //CascadeClassifier cascade = new CascadeClassifier (Utils.getFilePath ("lbpcascade_frontalface.xml"));
                        CascadeClassifier cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));

                        Mat grayMat = new Mat ();
                        Imgproc.cvtColor (imgMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
                        Imgproc.equalizeHist (grayMat, grayMat);

                        MatOfRect faces = new MatOfRect ();

                        if (cascade != null)
                                cascade.detectMultiScale (grayMat, faces, 1.1, 2, 2,
                                           new Size (20, 20), new Size ());

                        OpenCVForUnity.Rect[] rects = faces.toArray ();
                        for (int i = 0; i < rects.Length; i++) {
                                Debug.Log ("detect faces " + rects [i]);

                                Core.rectangle (imgMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2);
                        }

                        Texture2D texture = new Texture2D (imgMat.cols (), imgMat.rows (), TextureFormat.RGBA32, false);

                        Utils.matToTexture2D (imgMat, texture);

                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
        }
Esempio n. 2
0
	public List<Point[]> detect (Mat im, float scaleFactor, int minNeighbours, OpenCVForUnity.Size minSize)
	{
		//convert image to greyscale
		Mat gray = null;
		if (im.channels () == 1) {
			gray = im;
		} else {
			gray = new Mat ();
			Imgproc.cvtColor (im, gray, Imgproc.COLOR_RGBA2GRAY);
		}


		using (Mat equalizeHistMat = new Mat ()) 
		using (MatOfRect faces = new MatOfRect ()) {
			
			Imgproc.equalizeHist (gray, equalizeHistMat);

			detector.detectMultiScale (equalizeHistMat, faces, scaleFactor, minNeighbours, 0
				| Objdetect.CASCADE_FIND_BIGGEST_OBJECT
				| Objdetect.CASCADE_SCALE_IMAGE, minSize, new Size ());
			
			
			if (faces.rows () < 1) {
				return new List<Point[]> ();
			}
			return convertMatOfRectToPoints (faces);
		}
				
	}
				// Use this for initialization
				void Start ()
				{

						//initialize FaceTracker
						FaceTracker faceTracker = new FaceTracker (Utils.getFilePath ("tracker_model.json"));
						//initialize FaceTrackerParams
						FaceTrackerParams faceTrackerParams = new FaceTrackerParams ();


						gameObject.transform.localScale = new Vector3 (imgTexture.width, imgTexture.height, 1);
						Camera.main.orthographicSize = imgTexture.height / 2;
		
						Mat imgMat = new Mat (imgTexture.height, imgTexture.width, CvType.CV_8UC4);
		
						Utils.texture2DToMat (imgTexture, imgMat);
						Debug.Log ("imgMat dst ToString " + imgMat.ToString ());


						CascadeClassifier cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
						if (cascade.empty ()) {
								Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
						}

						//convert image to greyscale
						Mat gray = new Mat ();
						Imgproc.cvtColor (imgMat, gray, Imgproc.COLOR_RGBA2GRAY);

		
						MatOfRect faces = new MatOfRect ();
		
						Imgproc.equalizeHist (gray, gray);
		
						cascade.detectMultiScale (gray, faces, 1.1f, 2, 0
//								                           | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
								| Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (gray.cols () * 0.05, gray.cols () * 0.05), new Size ());
		
						Debug.Log ("faces " + faces.dump ());
		
						if (faces.rows () > 0) {
								//add initial face points from MatOfRect
								faceTracker.addPoints (faces);
						}


						//track face points.if face points <= 0, always return false.
						if (faceTracker.track (imgMat, faceTrackerParams))
								faceTracker.draw (imgMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255));


		
						Texture2D texture = new Texture2D (imgMat.cols (), imgMat.rows (), TextureFormat.RGBA32, false);
		
		
						Utils.matToTexture2D (imgMat, texture);
		
						gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
				}
Esempio n. 4
0
	public List<Point[]> convertMatOfRectToPoints (MatOfRect rects)
	{
		List<OpenCVForUnity.Rect> R = rects.toList ();
		
		List<Point[]> points = new List<Point[]> (R.Count);
		
		foreach (var r in R) {
			
			Vector3 scale = detector_offset * r.width;
			int n = reference.rows () / 2;
			Point[] p = new Point[n];
			for (int i = 0; i < n; i++) {
				p [i] = new Point ();
				p [i].x = scale.z * reference.get (2 * i, 0) [0] + r.x + 0.5 * r.width + scale.x;
				p [i].y = scale.z * reference.get (2 * i + 1, 0) [0] + r.y + 0.5 * r.height + scale.y;
			}
			
			points.Add (p);
		}

		return points;
	}
        private void detectInRegion(Mat img, Rect r, List <Rect> detectedObjectsInRegions)
        {
            Rect r0 = new Rect(new Point(), img.size());
            Rect r1 = new Rect(r.x, r.y, r.width, r.height);

            Inflate(r1, (int)((r1.width * innerParameters.coeffTrackingWindowSize) - r1.width) / 2,
                    (int)((r1.height * innerParameters.coeffTrackingWindowSize) - r1.height) / 2);
            r1 = Intersect(r0, r1);

            if ((r1.width <= 0) || (r1.height <= 0))
            {
                Debug.Log("DetectionBasedTracker::detectInRegion: Empty intersection");
                return;
            }


            int d = Math.Min(r.width, r.height);

            d = (int)Math.Round(d * innerParameters.coeffObjectSizeToTrack);


            MatOfRect tmpobjects = new MatOfRect();

            Mat img1 = new Mat(img, r1); //subimage for rectangle -- without data copying

            regionCascade.detectMultiScale(img1, tmpobjects, 1.1, 2, 0 | Objdetect.CASCADE_DO_CANNY_PRUNING | Objdetect.CASCADE_SCALE_IMAGE | Objdetect.CASCADE_FIND_BIGGEST_OBJECT, new Size(d, d), new Size());


            Rect[] tmpobjectsArray = tmpobjects.toArray();
            int    len             = tmpobjectsArray.Length;

            for (int i = 0; i < len; i++)
            {
                Rect tmp    = tmpobjectsArray [i];
                Rect curres = new Rect(new Point(tmp.x + r1.x, tmp.y + r1.y), tmp.size());
                detectedObjectsInRegions.Add(curres);
            }
        }
Esempio n. 6
0
        // Update is called once per frame
        void Update()
        {
            //Loop play
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            //error PlayerLoop called recursively! on iOS.reccomend WebCamTexture.
            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);

                //Debug.Log ("Mat toString " + rgbMat.ToString ());

                using (MatOfRect locations = new MatOfRect())
                    using (MatOfDouble weights = new MatOfDouble()) {
                        des.setSVMDetector(HOGDescriptor.getDefaultPeopleDetector());
                        des.detectMultiScale(rgbMat, locations, weights);

                        OpenCVForUnity.CoreModule.Rect[] rects = locations.toArray();
                        for (int i = 0; i < rects.Length; i++)
                        {
                            //Debug.Log ("detected person " + rects [i]);
                            Imgproc.rectangle(rgbMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0), 2);
                        }
                        //Debug.Log (locations.ToString ());
                        //Debug.Log (weights.ToString ());
                    }


                Utils.fastMatToTexture2D(rgbMat, texture);

                gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
            }
        }
Esempio n. 7
0
    public List <Point[]> convertMatOfRectToPoints(MatOfRect rects)
    {
        List <OpenCVForUnity.Rect> R = rects.toList();

        List <Point[]> points = new List <Point[]> (R.Count);

        foreach (var r in R)
        {
            Vector3 scale = detector_offset * r.width;
            int     n     = reference.rows() / 2;
            Point[] p     = new Point[n];
            for (int i = 0; i < n; i++)
            {
                p [i]   = new Point();
                p [i].x = scale.z * reference.get(2 * i, 0) [0] + r.x + 0.5 * r.width + scale.x;
                p [i].y = scale.z * reference.get(2 * i + 1, 0) [0] + r.y + 0.5 * r.height + scale.y;
            }

            points.Add(p);
        }

        return(points);
    }
Esempio n. 8
0
        //
        // C++:  void cv::ximgproc::EdgeBoxes::getBoundingBoxes(Mat edge_map, Mat orientation_map, vector_Rect& boxes, Mat& scores = Mat())
        //

        /**
         * Returns array containing proposal boxes.
         *
         *     param edge_map edge image.
         *     param orientation_map orientation map.
         *     param boxes proposal boxes.
         *     param scores of the proposal boxes, provided a vector of float types.
         */
        public void getBoundingBoxes(Mat edge_map, Mat orientation_map, MatOfRect boxes, Mat scores)
        {
            ThrowIfDisposed();
            if (edge_map != null)
            {
                edge_map.ThrowIfDisposed();
            }
            if (orientation_map != null)
            {
                orientation_map.ThrowIfDisposed();
            }
            if (boxes != null)
            {
                boxes.ThrowIfDisposed();
            }
            if (scores != null)
            {
                scores.ThrowIfDisposed();
            }
            Mat boxes_mat = boxes;

            ximgproc_EdgeBoxes_getBoundingBoxes_10(nativeObj, edge_map.nativeObj, orientation_map.nativeObj, boxes_mat.nativeObj, scores.nativeObj);
        }
Esempio n. 9
0
        //
        // C++:  void cv::motempl::segmentMotion(Mat mhi, Mat& segmask, vector_Rect& boundingRects, double timestamp, double segThresh)
        //

        //javadoc: segmentMotion(mhi, segmask, boundingRects, timestamp, segThresh)
        public static void segmentMotion(Mat mhi, Mat segmask, MatOfRect boundingRects, double timestamp, double segThresh)
        {
            if (mhi != null)
            {
                mhi.ThrowIfDisposed();
            }
            if (segmask != null)
            {
                segmask.ThrowIfDisposed();
            }
            if (boundingRects != null)
            {
                boundingRects.ThrowIfDisposed();
            }
#if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER
            Mat boundingRects_mat = boundingRects;
            optflow_Optflow_segmentMotion_10(mhi.nativeObj, segmask.nativeObj, boundingRects_mat.nativeObj, timestamp, segThresh);

            return;
#else
            return;
#endif
        }
        private Mat get_template(CascadeClassifier clssfr, Rect area, int size)
        {
            Mat       template     = new Mat();
            Mat       mROI         = mGray.Submat(area);
            MatOfRect eyes         = new MatOfRect();
            Point     iris         = new Point();
            Rect      eye_template = new Rect();

            clssfr.DetectMultiScale(mROI, eyes, 1.15, 2, Objdetect.CascadeFindBiggestObject | Objdetect.CascadeScaleImage,
                                    new Size(30, 30), new Size());

            Rect[] eyesArray = eyes.ToArray();
            for (int i = 0; i < eyesArray.Length;)
            {
                Rect e = eyesArray[i];
                e.X = area.X + e.X;
                e.Y = area.Y + e.Y;
                Rect eye_only_rectangle = new Rect((int)e.Tl().X,
                                                   (int)(e.Tl().Y + e.Height * 0.4), (int)e.Width,
                                                   (int)(e.Height * 0.6));
                mROI = mGray.Submat(eye_only_rectangle);
                Mat vyrez = mRgba.Submat(eye_only_rectangle);

                Core.MinMaxLocResult mmG = Core.MinMaxLoc(mROI);

                Imgproc.Circle(vyrez, mmG.MinLoc, 2, new Scalar(255, 255, 255, 255), 2);
                iris.X = mmG.MinLoc.X + eye_only_rectangle.X;
                iris.Y = mmG.MinLoc.Y + eye_only_rectangle.Y;

                eye_template = new Rect((int)iris.X - size / 2, (int)iris.Y - size / 2, size, size);

                Imgproc.Rectangle(mRgba, eye_template.Tl(), eye_template.Br(), new Scalar(255, 0, 0, 255), 2);
                template = (mGray.Submat(eye_template)).Clone();
                return(template);
            }
            return(template);
        }
        private IEnumerator init()
        {
            while (true)
            {
                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                if (webCamTexture.didUpdateThisFrame)
                {
                    Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

                    colors = new Color32[webCamTexture.width * webCamTexture.height];

                    rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
                    grayMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

                    texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                    gameObject.transform.eulerAngles = new Vector3(0, 0, -90);
                    gameObject.transform.localScale  = new Vector3(webCamTexture.width, webCamTexture.height, 1);


                    cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml"));
                    faces   = new MatOfRect();

                    gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

                    Camera.main.orthographicSize = webCamTexture.width / 2;

                    initDone = true;

                    break;
                }
                else
                {
                    yield return(0);
                }
            }
        }
        private void DetectFaces(Mat rgbaMat, out List <Rect> detectResult, bool useDlibFaceDetecter)
        {
            detectResult = new List <Rect>();

            if (useDlibFaceDetecter)
            {
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                foreach (var unityRect in result)
                {
                    detectResult.Add(new Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                }
            }
            else
            {
                // convert image to greyscale.
                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                using (Mat equalizeHistMat = new Mat())
                    using (MatOfRect faces = new MatOfRect())
                    {
                        Imgproc.equalizeHist(grayMat, equalizeHistMat);

                        cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                        detectResult = faces.toList();

                        // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
                        foreach (Rect r in detectResult)
                        {
                            r.y += (int)(r.height * 0.1f);
                        }
                    }
            }
        }
Esempio n. 13
0
    // Update is called once per frame
    public void Run(Mat imgMat)
    {
        Mat grayMat = new Mat();

        Imgproc.cvtColor(imgMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
        Imgproc.equalizeHist(grayMat, grayMat);


        MatOfRect faces = new MatOfRect();

        if (cascade != null)
        {
            cascade.detectMultiScale(grayMat, faces, 1.1, 2, 2,
                                     new Size(20, 20), new Size());
        }

        OpenCVForUnity.CoreModule.Rect[] rects = faces.toArray();
        for (int i = 0; i < rects.Length; i++)
        {
            Debug.Log("detect faces " + rects[i]);

            Imgproc.rectangle(imgMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2);
        }
    }
Esempio n. 14
0
        //
        // C++:  void cv::ximgproc::EdgeBoxes::getBoundingBoxes(Mat edge_map, Mat orientation_map, vector_Rect& boxes)
        //

        //javadoc: EdgeBoxes::getBoundingBoxes(edge_map, orientation_map, boxes)
        public void getBoundingBoxes(Mat edge_map, Mat orientation_map, MatOfRect boxes)
        {
            ThrowIfDisposed();
            if (edge_map != null)
            {
                edge_map.ThrowIfDisposed();
            }
            if (orientation_map != null)
            {
                orientation_map.ThrowIfDisposed();
            }
            if (boxes != null)
            {
                boxes.ThrowIfDisposed();
            }
#if ((UNITY_ANDROID || UNITY_IOS || UNITY_WEBGL) && !UNITY_EDITOR) || UNITY_5 || UNITY_5_3_OR_NEWER
            Mat boxes_mat = boxes;
            ximgproc_EdgeBoxes_getBoundingBoxes_10(nativeObj, edge_map.nativeObj, orientation_map.nativeObj, boxes_mat.nativeObj);

            return;
#else
            return;
#endif
        }
Esempio n. 15
0
        // Update is called once per frame
        void Update()
        {
            // loop play.
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                // detect faces.
                List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> ();
                if (useDlibFaceDetecter)
                {
                    OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
                    List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                    foreach (var unityRect in result)
                    {
                        detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                    }
                }
                else
                {
                    // convert image to greyscale.
                    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);

                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            detectResult = faces.toList();

                            // adjust to Dilb's result.
                            foreach (OpenCVForUnity.Rect r in detectResult)
                            {
                                r.y += (int)(r.height * 0.1f);
                            }
                        }
                }

                // face tracking.
                rectangleTracker.UpdateTrackedObjects(detectResult);
                List <TrackedRect> trackedRects = new List <TrackedRect> ();
                rectangleTracker.GetObjects(trackedRects, true);

                // create noise filter.
                foreach (var openCVRect in trackedRects)
                {
                    if (openCVRect.state == TrackedState.NEW)
                    {
                        if (!lowPassFilterDict.ContainsKey(openCVRect.id))
                        {
                            lowPassFilterDict.Add(openCVRect.id, new LowPassPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()));
                        }
                        if (!opticalFlowFilterDict.ContainsKey(openCVRect.id))
                        {
                            opticalFlowFilterDict.Add(openCVRect.id, new OFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()));
                        }
                    }
                    else if (openCVRect.state == TrackedState.DELETED)
                    {
                        if (lowPassFilterDict.ContainsKey(openCVRect.id))
                        {
                            lowPassFilterDict [openCVRect.id].Dispose();
                            lowPassFilterDict.Remove(openCVRect.id);
                        }
                        if (opticalFlowFilterDict.ContainsKey(openCVRect.id))
                        {
                            opticalFlowFilterDict [openCVRect.id].Dispose();
                            opticalFlowFilterDict.Remove(openCVRect.id);
                        }
                    }
                }

                // create LUT texture.
                foreach (var openCVRect in trackedRects)
                {
                    if (openCVRect.state == TrackedState.NEW)
                    {
                        faceMaskColorCorrector.CreateLUTTex(openCVRect.id);
                    }
                    else if (openCVRect.state == TrackedState.DELETED)
                    {
                        faceMaskColorCorrector.DeleteLUTTex(openCVRect.id);
                    }
                }


                // detect face landmark points.
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
                List <List <Vector2> > landmarkPoints = new List <List <Vector2> > ();
                for (int i = 0; i < trackedRects.Count; i++)
                {
                    TrackedRect      tr   = trackedRects [i];
                    UnityEngine.Rect rect = new UnityEngine.Rect(tr.x, tr.y, tr.width, tr.height);

                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                    // apply noise filter.
                    if (enableNoiseFilter)
                    {
                        if (tr.state > TrackedState.NEW && tr.state < TrackedState.DELETED)
                        {
                            opticalFlowFilterDict [tr.id].Process(rgbMat, points, points);
                            lowPassFilterDict [tr.id].Process(rgbMat, points, points);
                        }
                    }

                    landmarkPoints.Add(points);
                }

                // face masking.
                if (faceMaskTexture != null && landmarkPoints.Count >= 1)   // Apply face masking between detected faces and a face mask image.

                {
                    float maskImageWidth  = faceMaskTexture.width;
                    float maskImageHeight = faceMaskTexture.height;

                    TrackedRect tr;

                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        tr = trackedRects [i];

                        if (tr.state == TrackedState.NEW)
                        {
                            meshOverlay.CreateObject(tr.id, faceMaskTexture);
                        }
                        if (tr.state < TrackedState.DELETED)
                        {
                            MaskFace(meshOverlay, tr, landmarkPoints [i], faceLandmarkPointsInMask, maskImageWidth, maskImageHeight);

                            if (enableColorCorrection)
                            {
                                CorrectFaceMaskColor(tr.id, faceMaskMat, rgbMat, faceLandmarkPointsInMask, landmarkPoints [i]);
                            }
                        }
                        else if (tr.state == TrackedState.DELETED)
                        {
                            meshOverlay.DeleteObject(tr.id);
                        }
                    }
                }
                else if (landmarkPoints.Count >= 1)     // Apply face masking between detected faces.

                {
                    float maskImageWidth  = texture.width;
                    float maskImageHeight = texture.height;

                    TrackedRect tr;

                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        tr = trackedRects [i];

                        if (tr.state == TrackedState.NEW)
                        {
                            meshOverlay.CreateObject(tr.id, texture);
                        }
                        if (tr.state < TrackedState.DELETED)
                        {
                            MaskFace(meshOverlay, tr, landmarkPoints [i], landmarkPoints [0], maskImageWidth, maskImageHeight);

                            if (enableColorCorrection)
                            {
                                CorrectFaceMaskColor(tr.id, rgbMat, rgbMat, landmarkPoints [0], landmarkPoints [i]);
                            }
                        }
                        else if (tr.state == TrackedState.DELETED)
                        {
                            meshOverlay.DeleteObject(tr.id);
                        }
                    }
                }

                // draw face rects.
                if (displayFaceRects)
                {
                    for (int i = 0; i < detectResult.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 0, 0, 255), 2);
                    }

                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(trackedRects [i].x, trackedRects [i].y, trackedRects [i].width, trackedRects [i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 255, 0, 255), 2);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceAngles (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    }
                }

                // draw face points.
                if (displayDebugFacePoints)
                {
                    for (int i = 0; i < landmarkPoints.Count; i++)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, landmarkPoints [i], new Scalar(0, 255, 0, 255), 2);
                    }
                }


                // display face mask image.
                if (faceMaskTexture != null && faceMaskMat != null)
                {
                    if (displayFaceRects)
                    {
                        OpenCVForUnityUtils.DrawFaceRect(faceMaskMat, faceRectInMask, new Scalar(255, 0, 0, 255), 2);
                    }
                    if (displayDebugFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(faceMaskMat, faceLandmarkPointsInMask, new Scalar(0, 255, 0, 255), 2);
                    }

                    float scale = (rgbMat.width() / 4f) / faceMaskMat.width();
                    float tx    = rgbMat.width() - faceMaskMat.width() * scale;
                    float ty    = 0.0f;
                    Mat   trans = new Mat(2, 3, CvType.CV_32F);//1.0, 0.0, tx, 0.0, 1.0, ty);
                    trans.put(0, 0, scale);
                    trans.put(0, 1, 0.0f);
                    trans.put(0, 2, tx);
                    trans.put(1, 0, 0.0f);
                    trans.put(1, 1, scale);
                    trans.put(1, 2, ty);

                    Imgproc.warpAffine(faceMaskMat, rgbMat, trans, rgbMat.size(), Imgproc.INTER_LINEAR, Core.BORDER_TRANSPARENT, new Scalar(0));

                    if (displayFaceRects || displayDebugFacePointsToggle)
                    {
                        OpenCVForUnity.Utils.texture2DToMat(faceMaskTexture, faceMaskMat);
                    }
                }

//                Imgproc.putText (rgbMat, "W:" + rgbMat.width () + " H:" + rgbMat.height () + " SO:" + Screen.orientation, new Point (5, rgbMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.fastMatToTexture2D(rgbMat, texture);
            }
        }
				private IEnumerator waitThreadDoneCoroutine ()
				{
						while (true) {
								if (comm.done) {

										//Debug.Log("Thread done.");

										isThreadRunning = false;
										resultDetect = comm.result;

										break;
								} else {
										yield return null;
								}
						}
				}
Esempio n. 17
0
        private void Run()
        {
            meshOverlay = this.GetComponent <TrackedMeshOverlay> ();

            displayFaceRectsToggle.isOn       = displayFaceRects;
            useDlibFaceDetecterToggle.isOn    = useDlibFaceDetecter;
            enableColorCorrectionToggle.isOn  = enableColorCorrection;
            filterNonFrontalFacesToggle.isOn  = filterNonFrontalFaces;
            displayDebugFacePointsToggle.isOn = displayDebugFacePoints;

            if (imgTexture == null)
            {
                imgTexture = Resources.Load("family") as Texture2D;
            }

            gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            meshOverlay.UpdateOverlayTransform(gameObject.transform);
            meshOverlay.Reset();


            float width  = 0;
            float height = 0;

            width  = gameObject.transform.localScale.x;
            height = gameObject.transform.localScale.y;

            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }

            Mat rgbaMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);

            OpenCVForUnity.Utils.texture2DToMat(imgTexture, rgbaMat);
            Debug.Log("rgbaMat ToString " + rgbaMat.ToString());

            if (faceLandmarkDetector == null)
            {
                faceLandmarkDetector = new FaceLandmarkDetector(sp_human_face_68_dat_filepath);
            }

            faceMaskColorCorrector = faceMaskColorCorrector ?? new FaceMaskColorCorrector();
            FrontalFaceChecker frontalFaceChecker = new FrontalFaceChecker(width, height);

            // detect faces.
            List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> ();

            if (useDlibFaceDetecter)
            {
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                foreach (var unityRect in result)
                {
                    detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                }
            }
            else
            {
                if (cascade == null)
                {
                    cascade = new CascadeClassifier(haarcascade_frontalface_alt_xml_filepath);
                }
//                if (cascade.empty ()) {
//                    Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
//                }

                // convert image to greyscale.
                Mat gray = new Mat();
                Imgproc.cvtColor(rgbaMat, gray, Imgproc.COLOR_RGBA2GRAY);

                MatOfRect faces = new MatOfRect();
                Imgproc.equalizeHist(gray, gray);
                cascade.detectMultiScale(gray, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size());
                //Debug.Log ("faces " + faces.dump ());

                detectResult = faces.toList();

                // adjust to Dilb's result.
                foreach (OpenCVForUnity.Rect r in detectResult)
                {
                    r.y += (int)(r.height * 0.1f);
                }

                gray.Dispose();
            }

            // detect face landmark points.
            OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
            List <List <Vector2> > landmarkPoints = new List <List <Vector2> > ();

            foreach (var openCVRect in detectResult)
            {
                UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);

                Debug.Log("face : " + rect);
                //OpenCVForUnityUtils.DrawFaceRect(imgMat, rect, new Scalar(255, 0, 0, 255), 2);

                List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);
                //OpenCVForUnityUtils.DrawFaceLandmark(imgMat, points, new Scalar(0, 255, 0, 255), 2);
                landmarkPoints.Add(points);
            }

            // mask faces.
            int[] face_nums = new int[landmarkPoints.Count];
            for (int i = 0; i < face_nums.Length; i++)
            {
                face_nums [i] = i;
            }
            face_nums = face_nums.OrderBy(i => System.Guid.NewGuid()).ToArray();

            float imageWidth      = meshOverlay.width;
            float imageHeight     = meshOverlay.height;
            float maskImageWidth  = imgTexture.width;
            float maskImageHeight = imgTexture.height;

            TrackedMesh tm;

            for (int i = 0; i < face_nums.Length; i++)
            {
                meshOverlay.CreateObject(i, imgTexture);
                tm = meshOverlay.GetObjectById(i);

                Vector3[] vertices = tm.meshFilter.mesh.vertices;
                if (vertices.Length == landmarkPoints [face_nums [i]].Count)
                {
                    for (int j = 0; j < vertices.Length; j++)
                    {
                        vertices [j].x = landmarkPoints [face_nums [i]] [j].x / imageWidth - 0.5f;
                        vertices [j].y = 0.5f - landmarkPoints [face_nums [i]] [j].y / imageHeight;
                    }
                }
                Vector2[] uv = tm.meshFilter.mesh.uv;
                if (uv.Length == landmarkPoints [face_nums [0]].Count)
                {
                    for (int jj = 0; jj < uv.Length; jj++)
                    {
                        uv [jj].x = landmarkPoints [face_nums [0]] [jj].x / maskImageWidth;
                        uv [jj].y = (maskImageHeight - landmarkPoints [face_nums [0]] [jj].y) / maskImageHeight;
                    }
                }
                meshOverlay.UpdateObject(i, vertices, null, uv);

                if (enableColorCorrection)
                {
                    faceMaskColorCorrector.CreateLUTTex(i);
                    Texture2D LUTTex = faceMaskColorCorrector.UpdateLUTTex(i, rgbaMat, rgbaMat, landmarkPoints [face_nums [0]], landmarkPoints [face_nums [i]]);
                    tm.sharedMaterial.SetTexture("_LUTTex", LUTTex);
                    tm.sharedMaterial.SetFloat("_ColorCorrection", 1f);
                }
                else
                {
                    tm.sharedMaterial.SetFloat("_ColorCorrection", 0f);
                }

                // filter non frontal faces.
                if (filterNonFrontalFaces && frontalFaceChecker.GetFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit)
                {
                    tm.sharedMaterial.SetFloat("_Fade", 1f);
                }
                else
                {
                    tm.sharedMaterial.SetFloat("_Fade", 0.3f);
                }
            }

            // draw face rects.
            if (displayFaceRects)
            {
                int ann = face_nums[0];
                UnityEngine.Rect rect_ann = new UnityEngine.Rect(detectResult [ann].x, detectResult [ann].y, detectResult [ann].width, detectResult [ann].height);
                OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_ann, new Scalar(255, 255, 0, 255), 2);

                int bob = 0;
                for (int i = 1; i < face_nums.Length; i++)
                {
                    bob = face_nums [i];
                    UnityEngine.Rect rect_bob = new UnityEngine.Rect(detectResult [bob].x, detectResult [bob].y, detectResult [bob].width, detectResult [bob].height);
                    OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect_bob, new Scalar(255, 0, 0, 255), 2);
                }
            }

            // draw face points.
            if (displayDebugFacePoints)
            {
                for (int i = 0; i < landmarkPoints.Count; i++)
                {
                    OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, landmarkPoints [i], new Scalar(0, 255, 0, 255), 2);
                }
            }


            Texture2D texture = new Texture2D(rgbaMat.cols(), rgbaMat.rows(), TextureFormat.RGBA32, false);

            OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture);
            gameObject.transform.GetComponent <Renderer> ().material.mainTexture = texture;

            frontalFaceChecker.Dispose();
            rgbaMat.Dispose();
        }
Esempio n. 18
0
        /**
         * Find groups of Extremal Regions that are organized as text blocks.
         *
         *
         *
         * param regions Vector of ER's retrieved from the ERFilter algorithm from each channel.
         *
         * provided regions.
         *
         * param groups_rects The output of the algorithm are stored in this parameter as list of rectangles.
         *
         * ERGROUPING_ORIENTATION_ANY.
         *
         * samples/trained_classifier_erGrouping.xml). Only to use when grouping method is
         * ERGROUPING_ORIENTATION_ANY.
         *
         * method is ERGROUPING_ORIENTATION_ANY.
         * param image automatically generated
         * param channel automatically generated
         */
        public static void erGrouping(Mat image, Mat channel, List <MatOfPoint> regions, MatOfRect groups_rects)
        {
            if (image != null)
            {
                image.ThrowIfDisposed();
            }
            if (channel != null)
            {
                channel.ThrowIfDisposed();
            }
            if (groups_rects != null)
            {
                groups_rects.ThrowIfDisposed();
            }
            List <Mat> regions_tmplm    = new List <Mat>((regions != null) ? regions.Count : 0);
            Mat        regions_mat      = Converters.vector_vector_Point_to_Mat(regions, regions_tmplm);
            Mat        groups_rects_mat = groups_rects;

            text_Text_erGrouping_13(image.nativeObj, channel.nativeObj, regions_mat.nativeObj, groups_rects_mat.nativeObj);
        }
Esempio n. 19
0
        //
        // C++:  void cv::text::detectRegions(Mat image, Ptr_ERFilter er_filter1, Ptr_ERFilter er_filter2, vector_Rect& groups_rects, int method = ERGROUPING_ORIENTATION_HORIZ, String filename = String(), float minProbability = (float)0.5)
        //

        /**
         * Extracts text regions from image.
         *
         * param image Source image where text blocks needs to be extracted from.  Should be CV_8UC3 (color).
         * param er_filter1 Extremal Region Filter for the 1st stage classifier of N&amp;M algorithm CITE: Neumann12
         * param er_filter2 Extremal Region Filter for the 2nd stage classifier of N&amp;M algorithm CITE: Neumann12
         * param groups_rects Output list of rectangle blocks with text
         * param method Grouping method (see text::erGrouping_Modes). Can be one of ERGROUPING_ORIENTATION_HORIZ, ERGROUPING_ORIENTATION_ANY.
         * param filename The XML or YAML file with the classifier model (e.g. samples/trained_classifier_erGrouping.xml). Only to use when grouping method is ERGROUPING_ORIENTATION_ANY.
         * param minProbability The minimum probability for accepting a group. Only to use when grouping method is ERGROUPING_ORIENTATION_ANY.
         */
        public static void detectRegions(Mat image, ERFilter er_filter1, ERFilter er_filter2, MatOfRect groups_rects, int method, string filename, float minProbability)
        {
            if (image != null)
            {
                image.ThrowIfDisposed();
            }
            if (er_filter1 != null)
            {
                er_filter1.ThrowIfDisposed();
            }
            if (er_filter2 != null)
            {
                er_filter2.ThrowIfDisposed();
            }
            if (groups_rects != null)
            {
                groups_rects.ThrowIfDisposed();
            }
            Mat groups_rects_mat = groups_rects;

            text_Text_detectRegions_10(image.nativeObj, er_filter1.getNativeObjAddr(), er_filter2.getNativeObjAddr(), groups_rects_mat.nativeObj, method, filename, minProbability);
        }
        private void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);


            Mat img = Imgcodecs.imread(scenetext01_jpg_filepath);

            #if !UNITY_WSA_10_0
            if (img.empty())
            {
                Debug.LogError("text/scenetext01.jpg is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/text/” to “Assets/StreamingAssets/” folder. ");
            }
            #endif

            //# for visualization
            Mat vis = new Mat();
            img.copyTo(vis);
            Imgproc.cvtColor(vis, vis, Imgproc.COLOR_BGR2RGB);


            //# Extract channels to be processed individually
            List <Mat> channels = new List <Mat> ();
            Text.computeNMChannels(img, channels);

            //# Append negative channels to detect ER- (bright regions over dark background)
            int cn = channels.Count;
            for (int i = 0; i < cn; i++)
            {
                channels.Add(new Scalar(255) - channels [i]);
            }

            //# Apply the default cascade classifier to each independent channel (could be done in parallel)
            Debug.Log("Extracting Class Specific Extremal Regions from " + channels.Count + " channels ...");
            Debug.Log("    (...) this may take a while (...)");
            foreach (var channel in channels)
            {
                ERFilter er1 = Text.createERFilterNM1(trained_classifierNM1_xml_filepath, 16, 0.00015f, 0.13f, 0.2f, true, 0.1f);

                ERFilter er2 = Text.createERFilterNM2(trained_classifierNM2_xml_filepath, 0.5f);

                List <MatOfPoint> regions = new List <MatOfPoint> ();
                Text.detectRegions(channel, er1, er2, regions);

                MatOfRect matOfRects = new MatOfRect();
                Text.erGrouping(img, channel, regions, matOfRects);
//                Text.erGrouping (img, channel, regions, matOfRects, Text.ERGROUPING_ORIENTATION_ANY, Utils.getFilePath ("text/trained_classifier_erGrouping.xml"), 0.5f);

                List <OpenCVForUnity.Rect> rects = matOfRects.toList();

                //#Visualization
                foreach (var rect in rects)
                {
                    Imgproc.rectangle(vis, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(255, 0, 0), 2);
                    Imgproc.rectangle(vis, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y + rect.height), new Scalar(255, 255, 255), 1);
                }
            }

            Texture2D texture = new Texture2D(vis.cols(), vis.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(vis, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;


            Utils.setDebugMode(false);
        }
				// Update is called once per frame
				void Update ()
				{
						if (!initDone)
								return;


						if (screenOrientation != Screen.orientation) {
								screenOrientation = Screen.orientation;
								updateLayout ();
						}

						#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
				        if (webCamTexture.width > 16 && webCamTexture.height > 16) {
						#else
						if (webCamTexture.didUpdateThisFrame) {
								#endif
								Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

								//flip to correct direction.
								if (webCamDevice.isFrontFacing) {
										if (webCamTexture.videoRotationAngle == 0) {
												Core.flip (rgbaMat, rgbaMat, 1);
										} else if (webCamTexture.videoRotationAngle == 90) {
												Core.flip (rgbaMat, rgbaMat, 0);
										}
										if (webCamTexture.videoRotationAngle == 180) {
												Core.flip (rgbaMat, rgbaMat, 0);
										} else if (webCamTexture.videoRotationAngle == 270) {
												Core.flip (rgbaMat, rgbaMat, 1);
										}
								} else {
										if (webCamTexture.videoRotationAngle == 180) {
												Core.flip (rgbaMat, rgbaMat, -1);
										} else if (webCamTexture.videoRotationAngle == 270) {
												Core.flip (rgbaMat, rgbaMat, -1);
										}
								}

								//convert image to greyscale
								Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);


								if (faceTracker.getPoints ().Count <= 0) {
										Debug.Log ("detectFace");

										//convert image to greyscale
										using (Mat equalizeHistMat = new Mat ()) 
										using (MatOfRect faces = new MatOfRect ()) {
			
												Imgproc.equalizeHist (grayMat, equalizeHistMat);
			
												cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0
//														                           | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
														| Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());
			

			
												if (faces.rows () > 0) {
														Debug.Log ("faces " + faces.dump ());
														//add initial face points from MatOfRect
														faceTracker.addPoints (faces);

														//draw face rect
														OpenCVForUnity.Rect[] rects = faces.toArray ();
														for (int i = 0; i < rects.Length; i++) {
																Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2);
														}
														
												}

										}

								}


								//track face points.if face points <= 0, always return false.
								if (faceTracker.track (grayMat, faceTrackerParams))
										faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255));

								Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);

			
								Utils.matToTexture2D (rgbaMat, texture, colors);
			
						}

						if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) {
								faceTracker.reset ();
						}
		
				}
	
				void OnDisable ()
				{
						webCamTexture.Stop ();
				}
        /// <summary>
        /// Raises the web cam texture to mat helper inited event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInited()
        {
            Debug.Log ("OnWebCamTextureToMatHelperInited");

                        Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();

                        colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()];
                        texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);

                        grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);
                        cascade = new CascadeClassifier (Utils.getFilePath ("lbpcascade_frontalface.xml"));
                        //cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
                        if (cascade.empty ()) {
                                Debug.LogError ("cascade file is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
                        }
                        faces = new MatOfRect ();

                        gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);

                        Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

                        float width = 0;
                        float height = 0;

                        width = gameObject.transform.localScale.x;
                        height = gameObject.transform.localScale.y;

                        float widthScale = (float)Screen.width / width;
                        float heightScale = (float)Screen.height / height;
                        if (widthScale < heightScale) {
                                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
                        } else {
                                Camera.main.orthographicSize = height / 2;
                        }

                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
        }
        private IEnumerator init()
        {
            axes.SetActive (false);
                        head.SetActive (false);
                        rightEye.SetActive (false);
                        leftEye.SetActive (false);
                        mouth.SetActive (false);

                        if (webCamTexture != null) {
                                faceTracker.reset ();

                                webCamTexture.Stop ();
                                initDone = false;

                                rgbaMat.Dispose ();
                                grayMat.Dispose ();
                                cascade.Dispose ();
                                camMatrix.Dispose ();
                                distCoeffs.Dispose ();

                        }

                        // Checks how many and which cameras are available on the device
                        for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {

                                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {

                                        Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                                        webCamDevice = WebCamTexture.devices [cameraIndex];

                                        webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                                        break;
                                }
                        }

                        if (webCamTexture == null) {
                                webCamDevice = WebCamTexture.devices [0];
                                webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
                        }

                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

                        // Starts the camera
                        webCamTexture.Play ();

                        while (true) {
                                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                                #else
                                if (webCamTexture.didUpdateThisFrame) {
                                        #endif
                                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                                        Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                                        colors = new Color32[webCamTexture.width * webCamTexture.height];

                                        rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
                                        grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

                                        texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                                        cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
                                        if (cascade.empty ()) {
                                                Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
                                        }

                                        gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

                                        gameObject.transform.localEulerAngles = new Vector3 (0, 0, 0);
            //										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);

            //										bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
            //										float scaleX = 1;
            //										float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f;
            //										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);

                                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                                        Camera.main.orthographicSize = webCamTexture.height / 2;

                                        int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ());
                                        camMatrix = new Mat (3, 3, CvType.CV_64FC1);
                                        camMatrix.put (0, 0, max_d);
                                        camMatrix.put (0, 1, 0);
                                        camMatrix.put (0, 2, rgbaMat.cols () / 2.0f);
                                        camMatrix.put (1, 0, 0);
                                        camMatrix.put (1, 1, max_d);
                                        camMatrix.put (1, 2, rgbaMat.rows () / 2.0f);
                                        camMatrix.put (2, 0, 0);
                                        camMatrix.put (2, 1, 0);
                                        camMatrix.put (2, 2, 1.0f);

                                        Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ());
                                        double apertureWidth = 0;
                                        double apertureHeight = 0;
                                        double[] fovx = new double[1];
                                        double[] fovy = new double[1];
                                        double[] focalLength = new double[1];
                                        Point principalPoint = new Point ();
                                        double[] aspectratio = new double[1];

                                        Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

                                        Debug.Log ("imageSize " + imageSize.ToString ());
                                        Debug.Log ("apertureWidth " + apertureWidth);
                                        Debug.Log ("apertureHeight " + apertureHeight);
                                        Debug.Log ("fovx " + fovx [0]);
                                        Debug.Log ("fovy " + fovy [0]);
                                        Debug.Log ("focalLength " + focalLength [0]);
                                        Debug.Log ("principalPoint " + principalPoint.ToString ());
                                        Debug.Log ("aspectratio " + aspectratio [0]);

                                        ARCamera.fieldOfView = (float)fovy [0];

                                        Debug.Log ("camMatrix " + camMatrix.dump ());

                                        distCoeffs = new MatOfDouble (0, 0, 0, 0);
                                        Debug.Log ("distCoeffs " + distCoeffs.dump ());

                                        lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0));
                                        Debug.Log ("lookAt " + lookAtM.ToString ());

                                        invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));

                                        initDone = true;

                                        break;
                                } else {
                                        yield return 0;
                                }
                        }
                }

                // Update is called once per frame
                void Update ()
                {
                        if (!initDone)
                                return;

                        #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                        if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                        #else
                        if (webCamTexture.didUpdateThisFrame) {
                                #endif

                                Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

                                //flip to correct direction.
                                if (webCamTexture.videoVerticallyMirrored) {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 0) {

                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        }
                                } else {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 0) {

                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                }
                                //convert image to greyscale
                                Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                                if (faceTracker.getPoints ().Count <= 0) {
                                        Debug.Log ("detectFace");

                                        //convert image to greyscale
                                        using (Mat equalizeHistMat = new Mat ())
                                        using (MatOfRect faces = new MatOfRect ()) {

                                                Imgproc.equalizeHist (grayMat, equalizeHistMat);

                                                cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0
                                                        | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                        | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());

                                                if (faces.rows () > 0) {
                                                        Debug.Log ("faces " + faces.dump ());
                                                        //add initial face points from MatOfRect
                                                        faceTracker.addPoints (faces);

                                                        //draw face rect
                                                        OpenCVForUnity.Rect[] rects = faces.toArray ();
                                                        for (int i = 0; i < rects.Length; i++) {
                                                                Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2);
                                                        }
                                                }

                                        }

                                }

                                //track face points.if face points <= 0, always return false.
                                if (faceTracker.track (grayMat, faceTrackerParams)) {
                                        if (isDrawPoints)
                                                faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255));

                                        Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);

                                        Point[] points = faceTracker.getPoints () [0];

                                        if (points.Length > 0) {

            //												for (int i = 0; i < points.Length; i++) {
            //														Core.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false);
            //												}

                                                imagePoints.fromArray (
                        points [31],//l eye
                        points [36],//r eye
                        points [67],//nose
                        points [48],//l mouth
                        points [54] //r mouth
            //							,
            //											points [1],//l ear
            //											points [13]//r ear
                                                );

                                                Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

                                                bool isRefresh = false;

                                                if (tvec.get (2, 0) [0] > 0 && tvec.get (2, 0) [0] < 1200 * ((float)webCamTexture.width / (float)width)) {

                                                        isRefresh = true;

                                                        if (oldRvec == null) {
                                                                oldRvec = new Mat ();
                                                                rvec.copyTo (oldRvec);
                                                        }
                                                        if (oldTvec == null) {
                                                                oldTvec = new Mat ();
                                                                tvec.copyTo (oldTvec);
                                                        }

                                                        //filter Rvec Noise.
                                                        using (Mat absDiffRvec = new Mat ()) {
                                                                Core.absdiff (rvec, oldRvec, absDiffRvec);

                                                                //				Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                                                using (Mat cmpRvec = new Mat ()) {
                                                                        Core.compare (absDiffRvec, new Scalar (rvecNoiseFilterRange), cmpRvec, Core.CMP_GT);

                                                                        if (Core.countNonZero (cmpRvec) > 0)
                                                                                isRefresh = false;
                                                                }
                                                        }

                                                        //filter Tvec Noise.
                                                        using (Mat absDiffTvec = new Mat ()) {
                                                                Core.absdiff (tvec, oldTvec, absDiffTvec);

                                                                //				Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                                                using (Mat cmpTvec = new Mat ()) {
                                                                        Core.compare (absDiffTvec, new Scalar (tvecNoiseFilterRange), cmpTvec, Core.CMP_GT);

                                                                        if (Core.countNonZero (cmpTvec) > 0)
                                                                                isRefresh = false;
                                                                }
                                                        }

                                                }

                                                if (isRefresh) {

                                                        if (!rightEye.activeSelf)
                                                                rightEye.SetActive (true);
                                                        if (!leftEye.activeSelf)
                                                                leftEye.SetActive (true);

                                                        if ((Mathf.Abs ((float)(points [48].x - points [56].x)) < Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.2
                                                                && Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.9)
                                                                || Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.7) {

                                                                if (!mouth.activeSelf)
                                                                        mouth.SetActive (true);

                                                        } else {
                                                                if (mouth.activeSelf)
                                                                        mouth.SetActive (false);
                                                        }

                                                        rvec.copyTo (oldRvec);
                                                        tvec.copyTo (oldTvec);

                                                        Calib3d.Rodrigues (rvec, rotM);

                                                        transformationM .SetRow (0, new Vector4 ((float)rotM.get (0, 0) [0], (float)rotM.get (0, 1) [0], (float)rotM.get (0, 2) [0], (float)tvec.get (0, 0) [0]));
                                                        transformationM.SetRow (1, new Vector4 ((float)rotM.get (1, 0) [0], (float)rotM.get (1, 1) [0], (float)rotM.get (1, 2) [0], (float)tvec.get (1, 0) [0]));
                                                        transformationM.SetRow (2, new Vector4 ((float)rotM.get (2, 0) [0], (float)rotM.get (2, 1) [0], (float)rotM.get (2, 2) [0], (float)tvec.get (2, 0) [0]));
                                                        transformationM.SetRow (3, new Vector4 (0, 0, 0, 1));

                                                        modelViewMtrx = lookAtM * transformationM * invertZM;

                                                        ARCamera.worldToCameraMatrix = modelViewMtrx;

                                                        //				Debug.Log ("modelViewMtrx " + modelViewMtrx.ToString());
                                                }
                                        }
                                }

                                Utils.matToTexture2D (rgbaMat, texture, colors);

                        }

                        if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) {
                                faceTracker.reset ();
                                if (oldRvec != null) {
                                        oldRvec.Dispose ();
                                        oldRvec = null;
                                }
                                if (oldTvec != null) {
                                        oldTvec.Dispose ();
                                        oldTvec = null;
                                }

                                ARCamera.ResetWorldToCameraMatrix ();

                                rightEye.SetActive (false);
                                leftEye.SetActive (false);
                                mouth.SetActive (false);
                        }

                }

                void OnDisable ()
                {
                        webCamTexture.Stop ();
                }

                private Matrix4x4 getLookAtMatrix (Vector3 pos, Vector3 target, Vector3 up)
                {

                        Vector3 z = Vector3.Normalize (pos - target);
                        Vector3 x = Vector3.Normalize (Vector3.Cross (up, z));
                        Vector3 y = Vector3.Normalize (Vector3.Cross (z, x));

                        Matrix4x4 result = new Matrix4x4 ();
                        result.SetRow (0, new Vector4 (x.x, x.y, x.z, -(Vector3.Dot (pos, x))));
                        result.SetRow (1, new Vector4 (y.x, y.y, y.z, -(Vector3.Dot (pos, y))));
                        result.SetRow (2, new Vector4 (z.x, z.y, z.z, -(Vector3.Dot (pos, z))));
                        result.SetRow (3, new Vector4 (0, 0, 0, 1));

                        return result;
                }

                void OnGUI ()
                {
                        float screenScale = Screen.height / 240.0f;
                        Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
                        GUI.matrix = scaledMatrix;

                        GUILayout.BeginVertical ();
                        if (GUILayout.Button ("back")) {
                                Application.LoadLevel ("FaceTrackerSample");
                        }
                        if (GUILayout.Button ("change camera")) {
                                isFrontFacing = !isFrontFacing;
                                StartCoroutine (init ());
                        }

                        if (GUILayout.Button ("drawPoints")) {
                                if (isDrawPoints) {
                                        isDrawPoints = false;
                                } else {
                                        isDrawPoints = true;
                                }
                        }
                        if (GUILayout.Button ("axes")) {
                                if (axes.activeSelf) {
                                        axes.SetActive (false);
                                } else {
                                        axes.SetActive (true);
                                }
                        }
                        if (GUILayout.Button ("head")) {
                                if (head.activeSelf) {
                                        head.SetActive (false);
                                } else {
                                        head.SetActive (true);
                                }
                        }

                        GUILayout.EndVertical ();
                }

            }
Esempio n. 24
0
            // 更新は、フレームごとに呼ばれます
            void Update()
            {
            if (!initDone)
                return;

            if (screenOrientation != Screen.orientation)
            {
                screenOrientation = Screen.orientation;
                updateLayout();
            }

            #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
            #else
                if (webCamTexture.didUpdateThisFrame)
                {
            #endif
                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);
                // 方向を修正するために反転.
                if (webCamDevice.isFrontFacing)
                {
                    if (webCamTexture.videoRotationAngle == 0)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                    else if (webCamTexture.videoRotationAngle == 90)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, 0);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, 1);
                    }
                }
                else
                {
                    if (webCamTexture.videoRotationAngle == 180)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                    else if (webCamTexture.videoRotationAngle == 270)
                    {
                        Core.flip(rgbaMat, rgbaMat, -1);
                    }
                }
                // グレースケールにイメージを変換します
                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                if (faceTracker.getPoints().Count <= 0)
                {
                    Debug.Log("detectFace");

                    // グレースケールにイメージを変換します
                    using (Mat equalizeHistMat = new Mat())
                    using (MatOfRect faces = new MatOfRect())
                    {
                        Imgproc.equalizeHist(grayMat, equalizeHistMat);

                        cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
                                | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                        if (faces.rows() > 0)
                        {
                            Debug.Log("--------------------------faces");
                            Debug.Log("faces " + faces.dump());
                            Debug.Log("--------------------------faces");
                            // MatOfRectから顔の初期座標を追加
                            faceTracker.addPoints(faces);

                            // 顔の四角形を描きます
                            OpenCVForUnity.Rect[] rects = faces.toArray();
                            for (int i = 0; i < rects.Length; i++)
                            {
                                Core.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2);
                            }
                        }
                    }
                }

                // 顔の座標を追跡します.if face points <= 0, always return false.
                if (faceTracker.track(grayMat, faceTrackerParams))
                {
                    if (isDrawPoints)
                        //Debug.Log("--------------------------100-----------------------------------");
                        faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
                        //Debug.Log("--------------------------1000");
                    //Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);

                    Point[] points = faceTracker.getPoints()[0];

                    //eyeWidth = points[36] - points[31];
            /*
                    Debug.Log("--------------------------0");
                    Debug.Log(points[31]);
                    Debug.Log("--------------------------2");
                    Debug.Log(points[36]);
                    Debug.Log("--------------------------1");
            */
                    if (points.Length > 0)
                    {
                        imagePoints.fromArray(
                            points[31], //l eye
                            points[36], //r eye
                            points[67], // nose
                            points[48], //l mouth
                            points[54]  //r mouth
                        );

                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

                        bool isRefresh = false;

                        if (tvec.get(2, 0)[0] > 0 && tvec.get(2, 0)[0] < 1200 * ((float)webCamTexture.width / (float)width))
                        {

                            isRefresh = true;

                            if (oldRvec == null)
                            {
                                oldRvec = new Mat();
                                rvec.copyTo(oldRvec);
                            }
                            if (oldTvec == null)
                            {
                                oldTvec = new Mat();
                                tvec.copyTo(oldTvec);
                            }

                            // Rvecノイズをフィルタリング.
                            using (Mat absDiffRvec = new Mat())
                            {
                                Core.absdiff(rvec, oldRvec, absDiffRvec);

                                using (Mat cmpRvec = new Mat())
                                {
                                    Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpRvec) > 0)
                                        isRefresh = false;
                                }
                            }

                            // Tvecノイズをフィルタリング.
                            using (Mat absDiffTvec = new Mat())
                            {
                                Core.absdiff(tvec, oldTvec, absDiffTvec);
                                using (Mat cmpTvec = new Mat())
                                {
                                    Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpTvec) > 0)
                                        isRefresh = false;
                                }
                            }
                        }

                        if (isRefresh)
                        {
                            if (!rightEye.activeSelf)
                                rightEye.SetActive (true);
                            if (!leftEye.activeSelf)
                                leftEye.SetActive (true);

                            rvec.copyTo(oldRvec);
                            tvec.copyTo(oldTvec);

                            Calib3d.Rodrigues(rvec, rotM);

                            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0)[0], (float)rotM.get(0, 1)[0], (float)rotM.get(0, 2)[0], (float)tvec.get(0, 0)[0]));
                            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0)[0], (float)rotM.get(1, 1)[0], (float)rotM.get(1, 2)[0], (float)tvec.get(1, 0)[0]));
                            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0)[0], (float)rotM.get(2, 1)[0], (float)rotM.get(2, 2)[0], (float)tvec.get(2, 0)[0]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                            modelViewMtrx = lookAtM * transformationM * invertZM;
                            ARCamera.worldToCameraMatrix = modelViewMtrx;
                        }
                    }
                }
                Utils.matToTexture2D(rgbaMat, texture, colors);
            }

            if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
            {
                faceTracker.reset();
                if (oldRvec != null)
                {
                    oldRvec.Dispose();
                    oldRvec = null;
                }
                if (oldTvec != null)
                {
                    oldTvec.Dispose();
                    oldTvec = null;
                }
                ARCamera.ResetWorldToCameraMatrix();

                rightEye.SetActive (false);
                leftEye.SetActive (false);

            }
            }

            void OnDisable()
            {
            webCamTexture.Stop();
            }
				// Update is called once per frame
				void Update ()
				{

						if (webCamTextureToMatHelper.isPlaying () && webCamTextureToMatHelper.didUpdateThisFrame ()) {
				
								Mat rgbaMat = webCamTextureToMatHelper.GetMat ();

								//convert image to greyscale
								Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
										
										
								if (faceTracker.getPoints ().Count <= 0) {
										Debug.Log ("detectFace");
											
										//convert image to greyscale
										using (Mat equalizeHistMat = new Mat ()) 
										using (MatOfRect faces = new MatOfRect ()) {
												
												Imgproc.equalizeHist (grayMat, equalizeHistMat);
												
												cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0
												//														                           | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
														| Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());
												
												
												
												if (faces.rows () > 0) {
														Debug.Log ("faces " + faces.dump ());
														//add initial face points from MatOfRect
														faceTracker.addPoints (faces);
													
														//draw face rect
														OpenCVForUnity.Rect[] rects = faces.toArray ();
														for (int i = 0; i < rects.Length; i++) {
																#if OPENCV_3
														Imgproc.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2);
																#else
																Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2);
																#endif
														}
													
												}
												
										}
											
								}
										
										
								//track face points.if face points <= 0, always return false.
								if (faceTracker.track (grayMat, faceTrackerParams))
										faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255));
										
								#if OPENCV_3
										Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
								#else
								Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);
								#endif
										
										
//								Core.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);

								Utils.matToTexture2D (rgbaMat, texture, colors);
										
						}
									
						if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) {
								faceTracker.reset ();
						}
					
				}
Esempio n. 26
0
		public void addPoints (MatOfRect rects)
		{
				points.AddRange (detector.convertMatOfRectToPoints (rects));

		}
				private IEnumerator init ()
				{

						if (webCamTexture != null) {
								webCamTexture.Stop ();
								initDone = false;

								rgbaMat.Dispose ();
								grayMat.Dispose ();
						}

						// Checks how many and which cameras are available on the device
						for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {


								if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {


										Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

										webCamDevice = WebCamTexture.devices [cameraIndex];

										webCamTexture = new WebCamTexture (webCamDevice.name, width, height);


										break;
								}


						}

						if (webCamTexture == null) {
								webCamDevice = WebCamTexture.devices [0];
								webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
						}

						Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);


						// Starts the camera
						webCamTexture.Play ();

						while (true) {
								//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
				if (webCamTexture.width > 16 && webCamTexture.height > 16) {
#else
								if (webCamTexture.didUpdateThisFrame) {
#endif

										Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
										Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);


										colors = new Color32[webCamTexture.width * webCamTexture.height];

										rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
										grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

										texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

										gameObject.transform.eulerAngles = new Vector3 (0, 0, 0);
#if (UNITY_ANDROID || UNITY_IPHONE || UNITY_WP_8_1) && !UNITY_EDITOR
					gameObject.transform.eulerAngles = new Vector3 (0, 0, -90);
#endif
										//										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);


										gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

										//										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
										//										float scaleX = 1;
										//										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
										//										if (webCamTexture.videoRotationAngle == 270)
										//												scaleY = -1.0f;
										//										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);



										cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
										regionCascade = new CascadeClassifier (Utils.getFilePath ("lbpcascade_frontalface.xml"));

										gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

#if (UNITY_ANDROID || UNITY_IPHONE || UNITY_WP_8_1) && !UNITY_EDITOR
						                Camera.main.orthographicSize = webCamTexture.width / 2;
#else
										Camera.main.orthographicSize = webCamTexture.height / 2;
#endif


										initThread ();

										initDone = true;

										break;
								} else {
										yield return 0;
								}
						}
				}

				// Update is called once per frame
				void Update ()
				{
						if (!initDone)
								return;




#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
				if (webCamTexture.width > 16 && webCamTexture.height > 16) {
#else
						if (webCamTexture.didUpdateThisFrame) {
#endif

								Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

								if (webCamTexture.videoVerticallyMirrored) {
										if (webCamDevice.isFrontFacing) {
												if (webCamTexture.videoRotationAngle == 0) {
														Core.flip (rgbaMat, rgbaMat, 1);
												} else if (webCamTexture.videoRotationAngle == 90) {
														Core.flip (rgbaMat, rgbaMat, 0);
												} else if (webCamTexture.videoRotationAngle == 270) {
														Core.flip (rgbaMat, rgbaMat, 1);
												}
										} else {
												if (webCamTexture.videoRotationAngle == 90) {

												} else if (webCamTexture.videoRotationAngle == 270) {
														Core.flip (rgbaMat, rgbaMat, -1);
												}
										}
								} else {
										if (webCamDevice.isFrontFacing) {
												if (webCamTexture.videoRotationAngle == 0) {
														Core.flip (rgbaMat, rgbaMat, 1);
												} else if (webCamTexture.videoRotationAngle == 90) {
														Core.flip (rgbaMat, rgbaMat, 0);
												} else if (webCamTexture.videoRotationAngle == 270) {
														Core.flip (rgbaMat, rgbaMat, 1);
												}
										} else {
												if (webCamTexture.videoRotationAngle == 90) {

												} else if (webCamTexture.videoRotationAngle == 270) {
														Core.flip (rgbaMat, rgbaMat, -1);
												}
										}
								}


								Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
								Imgproc.equalizeHist (grayMat, grayMat);





								didUpdateThisFrame = true;

								#if UNITY_WSA
								#if NETFX_CORE
					ThreadWorker();
								#endif
								#else
								ThreadWorker ();
								#endif

								OpenCVForUnity.Rect[] rects;

								if (resultDetect != null) {
										//Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect");
										rectsWhereRegions = resultDetect.toArray ();


										rects = resultDetect.toArray ();
										for (int i = 0; i < rects.Length; i++) {
												Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (0, 0, 255, 255), 2);
										}



										resultDetect = null;
								} else {
										//Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions from previous positions");
										rectsWhereRegions = new Rect[trackedObjects.Count];

										for (int i = 0; i < trackedObjects.Count; i++) {
												int n = trackedObjects [i].lastPositions.Count;
												//if (n > 0) UnityEngine.Debug.LogError("n > 0 is false");

												Rect r = trackedObjects [i].lastPositions [n - 1];
												if (r.area () == 0) {
														Debug.Log ("DetectionBasedTracker::process: ERROR: ATTENTION: strange algorithm's behavior: trackedObjects[i].rect() is empty");
														continue;
												}

												//correction by speed of rectangle
												if (n > 1) {
														Point center = centerRect (r);
														Point center_prev = centerRect (trackedObjects [i].lastPositions [n - 2]);
														Point shift = new Point ((center.x - center_prev.x) * innerParameters.coeffObjectSpeedUsingInPrediction,
                        (center.y - center_prev.y) * innerParameters.coeffObjectSpeedUsingInPrediction);

														r.x += (int)Math.Round (shift.x);
														r.y += (int)Math.Round (shift.y);
												}
												rectsWhereRegions [i] = r;
										}

										rects = rectsWhereRegions;
										for (int i = 0; i < rects.Length; i++) {
												Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (0, 255, 0, 255), 2);
										}
								}

								if (rectsWhereRegions.Length > 0) {
										detectedObjectsInRegions.Clear ();

										int len = rectsWhereRegions.Length;
										for (int i = 0; i < len; i++) {
												detectInRegion (grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
										}
								}

								updateTrackedObjects (detectedObjectsInRegions);

								getObjects (resultObjects);


								rects = resultObjects.ToArray ();
								for (int i = 0; i < rects.Length; i++) {
										//Debug.Log ("detect faces " + rects [i]);

										Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2);
								}

								Utils.matToTexture2D (rgbaMat, texture, colors);

						}

				}

				private void initThread ()
				{
						grayMat4Thread = new Mat ();

				}


#if UNITY_WSA
#if NETFX_CORE
                private async void ThreadWorker()
                {
                    //Debug.Log("Thread working...");

                    if (isThreadRunning) return;
                    if (!didUpdateThisFrame) return;
                    isThreadRunning = true;
                    didUpdateThisFrame = false;

                    grayMat.copyTo(grayMat4Thread);

                    MatOfRect faces = new MatOfRect();

                    await Task.Run(() =>
                    {

                        if (cascade != null)
							cascade.detectMultiScale(grayMat, faces, 1.1, 2, Objdetect.CASCADE_SCALE_IMAGE, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                                  new Size(grayMat4Thread.height() * 0.2, grayMat4Thread.height() * 0.2), new Size());
                    });

                    //Debug.Log("Thread done.");

                    isThreadRunning = false;
                    resultDetect = faces;
                }
        public Mat OnCameraFrame(CameraBridgeViewBase.ICvCameraViewFrame inputFrame)
        {
            mRgba = inputFrame.Rgba();
            mGray = inputFrame.Gray();

            if (mAbsoluteFaceSize == 0)
            {
                int height = mGray.Rows();
                if (Math.Round(height * mRelativeFaceSize) > 0)
                {
                    mAbsoluteFaceSize = (int)Math.Round(height * mRelativeFaceSize);
                }
            }

            if (mZoomWindow == null || mZoomWindow2 == null)
            {
                CreateAuxiliaryMats();
            }

            MatOfRect faces = new MatOfRect();

            if (FACE != null)
            {
                FACE.DetectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                                      new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
            }

            Rect[] facesArray = faces.ToArray();
            for (int i = 0; i < facesArray.Length; i++)
            {
                Imgproc.Rectangle(mRgba, facesArray[i].Tl(), facesArray[i].Br(), FACE_RECT_COLOR, 3);
                xCenter = (facesArray[i].X + facesArray[i].Width + facesArray[i].X) / 2;
                yCenter = (facesArray[i].Y + facesArray[i].Y + facesArray[i].Height) / 2;
                Point center = new Point(xCenter, yCenter);

                Imgproc.Circle(mRgba, center, 10, new Scalar(255, 0, 0, 255), 3);

                /* --- NOT NEEDED
                 * Imgproc.putText(mRgba, "[" + center.X + "," + center.Y + "]",
                 *      new Point(center.X + 20, center.Y + 20),
                 *      Core.FONT_HERSHEY_SIMPLEX, 0.7, new Scalar(255, 255, 255, 255));
                 */

                Rect r = facesArray[i];

                // compute the eye area
                Rect eyearea = new Rect(r.X + r.Width / 8,
                                        (int)(r.Y + (r.Height / 4.5)), r.Width - 2 * r.Width / 8, (int)(r.Height / 3.0));

                // split it
                Rect eyearea_right = new Rect(r.X + r.Width / 16, (int)(r.Y + (r.Height / 4.5)),
                                              (r.Width - 2 * r.Width / 16) / 2, (int)(r.Height / 3.0));

                Rect eyearea_left = new Rect(r.X + r.Width / 16 + (r.Width - 2 * r.Width / 16) / 2,
                                             (int)(r.Y + (r.Height / 4.5)), (r.Width - 2 * r.Width / 16) / 2, (int)(r.Height / 3.0));

                // draw the area - mGray is working grayscale mat, if you want to see area in rgb preview,
                // change mGray to mRgba
                Imgproc.Rectangle(mRgba, eyearea_left.Tl(), eyearea_left.Br(), new Scalar(255, 0, 0, 255), 2);
                Imgproc.Rectangle(mRgba, eyearea_right.Tl(), eyearea_right.Br(), new Scalar(255, 0, 0, 255), 2);

                if (learn_frames < 5)
                {
                    teplateR = get_template(EYE, eyearea_right, 24);
                    teplateL = get_template(EYE, eyearea_left, 24);
                    learn_frames++;
                }
                else
                {
                    // Learning finished, use the new templates for template matching
                    match_eye(eyearea_right, teplateR, method);
                    match_eye(eyearea_left, teplateL, method);
                }

                // cut eye areas and put them to zoom windows
                Imgproc.Resize(mRgba.Submat(eyearea_left), mZoomWindow2, mZoomWindow2.Size());
                Imgproc.Resize(mRgba.Submat(eyearea_right), mZoomWindow, mZoomWindow.Size());
            }

            /*
             * mRgba = inputFrame.Rgba();
             * Log.Info(TAG, "called OnCameraFrame");
             *
             * if (mIsColorSelected)
             * {
             *  mDetector.Process(mRgba);
             *  List<MatOfPoint> contours = mDetector.Contours;
             *  Log.Error(TAG, "Contours count: " + contours.Count);
             *  Imgproc.DrawContours(mRgba, contours, -1, CONTOUR_COLOR);
             *
             *  Mat colorLabel = mRgba.Submat(4, 68, 4, 68);
             *  colorLabel.SetTo(mBlobColorRgba);
             *
             *  Mat spectrumLabel = mRgba.Submat(4, 4 + mSpectrum.Rows(), 70, 70 + mSpectrum.Cols());
             *  mSpectrum.CopyTo(spectrumLabel);
             * }
             */

            return(mRgba);
        }
Esempio n. 29
0
				// Update is called once per frame
				void Update ()
				{

						if (webCamTextureToMatHelper.isPlaying () && webCamTextureToMatHelper.didUpdateThisFrame ()) {
				
								Mat rgbaMat = webCamTextureToMatHelper.GetMat ();


								//convert image to greyscale
								Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
										
										
								if (faceTracker.getPoints ().Count <= 0) {
										Debug.Log ("detectFace");
											
										//convert image to greyscale
										using (Mat equalizeHistMat = new Mat ()) 
										using (MatOfRect faces = new MatOfRect ()) {
												
												Imgproc.equalizeHist (grayMat, equalizeHistMat);
												
												cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0
														| Objdetect.CASCADE_FIND_BIGGEST_OBJECT
														| Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());
												
												
												
												if (faces.rows () > 0) {
														Debug.Log ("faces " + faces.dump ());
														//add initial face points from MatOfRect
														faceTracker.addPoints (faces);
													
														//draw face rect
														OpenCVForUnity.Rect[] rects = faces.toArray ();
														for (int i = 0; i < rects.Length; i++) {
																#if OPENCV_3
														Imgproc.rectangle(rgbaMat, new Point(rects[i].x, rects[i].y), new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 0, 0, 255), 2);
																#else
																Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2);
																#endif
														}
												}
												
										}
											
								}
										
										
								//track face points.if face points <= 0, always return false.
								if (faceTracker.track (grayMat, faceTrackerParams)) {
										if (isDrawPoints)
												faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255));
											
										#if OPENCV_3
											Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
										#else
										Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);
										#endif
											
											
										Point[] points = faceTracker.getPoints () [0];
											
											
										if (points.Length > 0) {
												
												//												for (int i = 0; i < points.Length; i++) {
												//#if OPENCV_3
												//                                                  Imgproc.putText(rgbaMat, "" + i, new Point(points[i].x, points[i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar(0, 0, 255, 255), 2, Core.LINE_AA, false);
												//#else
												//                                                  Core.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false);
												//#endif
												//												}
												
												
												imagePoints.fromArray (
													points [31],//l eye
													points [36],//r eye
													points [67],//nose
													points [48],//l mouth
													points [54] //r mouth
													//							,
													//											points [1],//l ear
													//											points [13]//r ear
												);
												
												
												Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
												
												bool isRefresh = false;
												
												if (tvec.get (2, 0) [0] > 0 && tvec.get (2, 0) [0] < 1200 * ((float)rgbaMat.cols () / (float)webCamTextureToMatHelper.requestWidth)) {
													
														isRefresh = true;
													
														if (oldRvec == null) {
																oldRvec = new Mat ();
																rvec.copyTo (oldRvec);
														}
														if (oldTvec == null) {
																oldTvec = new Mat ();
																tvec.copyTo (oldTvec);
														}
													
													
														//filter Rvec Noise.
														using (Mat absDiffRvec = new Mat ()) {
																Core.absdiff (rvec, oldRvec, absDiffRvec);
														
																//				Debug.Log ("absDiffRvec " + absDiffRvec.dump());
														
																using (Mat cmpRvec = new Mat ()) {
																		Core.compare (absDiffRvec, new Scalar (rvecNoiseFilterRange), cmpRvec, Core.CMP_GT);
															
																		if (Core.countNonZero (cmpRvec) > 0)
																				isRefresh = false;
																}
														}
													
													
													
														//filter Tvec Noise.
														using (Mat absDiffTvec = new Mat ()) {
																Core.absdiff (tvec, oldTvec, absDiffTvec);
														
																//				Debug.Log ("absDiffRvec " + absDiffRvec.dump());
														
																using (Mat cmpTvec = new Mat ()) {
																		Core.compare (absDiffTvec, new Scalar (tvecNoiseFilterRange), cmpTvec, Core.CMP_GT);
															
																		if (Core.countNonZero (cmpTvec) > 0)
																				isRefresh = false;
																}
														}
													
													
													
												}
												
												if (isRefresh) {
													
														if (!rightEye.activeSelf)
																rightEye.SetActive (true);
														if (!leftEye.activeSelf)
																leftEye.SetActive (true);
													
													
														if ((Mathf.Abs ((float)(points [48].x - points [56].x)) < Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.2 
																&& Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.9)
																|| Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.7) {
														
																if (!mouth.activeSelf)
																		mouth.SetActive (true);
														
														} else {
																if (mouth.activeSelf)
																		mouth.SetActive (false);
														}
													
													
													
														rvec.copyTo (oldRvec);
														tvec.copyTo (oldTvec);
													
														Calib3d.Rodrigues (rvec, rotM);
													
														transformationM .SetRow (0, new Vector4 ((float)rotM.get (0, 0) [0], (float)rotM.get (0, 1) [0], (float)rotM.get (0, 2) [0], (float)tvec.get (0, 0) [0]));
														transformationM.SetRow (1, new Vector4 ((float)rotM.get (1, 0) [0], (float)rotM.get (1, 1) [0], (float)rotM.get (1, 2) [0], (float)tvec.get (1, 0) [0]));
														transformationM.SetRow (2, new Vector4 ((float)rotM.get (2, 0) [0], (float)rotM.get (2, 1) [0], (float)rotM.get (2, 2) [0], (float)tvec.get (2, 0) [0]));
														transformationM.SetRow (3, new Vector4 (0, 0, 0, 1));
													
														modelViewMtrx = lookAtM * transformationM * invertZM;
													
														ARCamera.worldToCameraMatrix = modelViewMtrx;
													
													
														//				Debug.Log ("modelViewMtrx " + modelViewMtrx.ToString());
												}
										}
								}
										
//								Core.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);
										
								Utils.matToTexture2D (rgbaMat, texture, colors);
										
						}
									
						if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) {
								faceTracker.reset ();
								if (oldRvec != null) {
										oldRvec.Dispose ();
										oldRvec = null;
								}
								if (oldTvec != null) {
										oldTvec.Dispose ();
										oldTvec = null;
								}
										
								ARCamera.ResetWorldToCameraMatrix ();
										
								rightEye.SetActive (false);
								leftEye.SetActive (false);
								mouth.SetActive (false);
						}
					
				}
Esempio n. 30
0
        /// <summary>
        /// Raises the web cam texture to mat helper inited event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInited()
        {
            Debug.Log("OnWebCamTextureToMatHelperInited");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetDownScaleMat(webCamTextureToMatHelper.GetMat());

            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
            Debug.Log("webCamTextureMat.width " + webCamTextureMat.width() + " webCamTextureMat.height " + webCamTextureMat.height());


            processingAreaRect = new OpenCVForUnity.Rect((int)(webCamTextureMat.cols() * (outsideClippingRatio.x - clippingOffset.x)), (int)(webCamTextureMat.rows() * (outsideClippingRatio.y + clippingOffset.y)),
                                                         (int)(webCamTextureMat.cols() * (1f - outsideClippingRatio.x * 2)), (int)(webCamTextureMat.rows() * (1f - outsideClippingRatio.y * 2)));
            processingAreaRect = processingAreaRect.intersect(new OpenCVForUnity.Rect(0, 0, webCamTextureMat.cols(), webCamTextureMat.rows()));

            Debug.Log("webCamTextureMat.width " + webCamTextureMat.width() + " webCamTextureMat.height " + webCamTextureMat.height());
            Debug.Log("processingAreaRect.x " + processingAreaRect.x + " processingAreaRect.y " + processingAreaRect.y + " processingAreaRect.width " + processingAreaRect.width + " processingAreaRect.height " + processingAreaRect.height);


            processingAreaMat = new Mat(processingAreaRect.height, processingAreaRect.width, CvType.CV_8UC4);

            grayMat = new Mat(processingAreaMat.rows(), processingAreaMat.cols(), CvType.CV_8UC1);

            faces = new MatOfRect();


            quad_renderer = gameObject.GetComponent <Renderer> () as Renderer;
            quad_renderer.sharedMaterial.SetTexture("_MainTex", texture);
            quad_renderer.sharedMaterial.SetVector("_VignetteOffset", new Vector4(clippingOffset.x, clippingOffset.y));

            //This value is obtained from PhotoCapture's TryGetProjectionMatrix() method.I do not know whether this method is good.
            //Please see the discussion of this thread.Https://forums.hololens.com/discussion/782/live-stream-of-locatable-camera-webcam-in-unity
            Matrix4x4 projectionMatrix = Matrix4x4.identity;

            projectionMatrix.m00 = 2.31029f;
            projectionMatrix.m01 = 0.00000f;
            projectionMatrix.m02 = 0.09614f;
            projectionMatrix.m03 = 0.00000f;
            projectionMatrix.m10 = 0.00000f;
            projectionMatrix.m11 = 4.10427f;
            projectionMatrix.m12 = -0.06231f;
            projectionMatrix.m13 = 0.00000f;
            projectionMatrix.m20 = 0.00000f;
            projectionMatrix.m21 = 0.00000f;
            projectionMatrix.m22 = -1.00000f;
            projectionMatrix.m23 = 0.00000f;
            projectionMatrix.m30 = 0.00000f;
            projectionMatrix.m31 = 0.00000f;
            projectionMatrix.m32 = -1.00000f;
            projectionMatrix.m33 = 0.00000f;
            quad_renderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix", projectionMatrix);
            quad_renderer.sharedMaterial.SetFloat("_VignetteScale", vignetteScale);


            float halfOfVerticalFov = Mathf.Atan(1.0f / projectionMatrix.m11);
            float aspectRatio       = (1.0f / Mathf.Tan(halfOfVerticalFov)) / projectionMatrix.m00;

            Debug.Log("halfOfVerticalFov " + halfOfVerticalFov);
            Debug.Log("aspectRatio " + aspectRatio);
        }
        // Update is called once per frame
        void Update()
        {
            // loop play.
            if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
            {
                capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
            }

            if (capture.grab())
            {
                capture.retrieve(rgbMat, 0);

                Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
                //Debug.Log ("Mat toString " + rgbMat.ToString ());


                // detect faces.
                List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> ();
                if (useDlibFaceDetecter)
                {
                    OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
                    List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                    foreach (var unityRect in result)
                    {
                        detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                    }
                }
                else
                {
                    // convert image to greyscale.
                    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);

                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            detectResult = faces.toList();

                            // adjust to Dilb's result.
                            foreach (OpenCVForUnity.Rect r in detectResult)
                            {
                                r.y += (int)(r.height * 0.1f);
                            }
                        }
                }


                // face traking.
                rectangleTracker.UpdateTrackedObjects(detectResult);
                List <TrackedRect> trackedRects = new List <TrackedRect> ();
                rectangleTracker.GetObjects(trackedRects, true);

                // detect face landmark points.
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
                List <List <Vector2> > landmarkPoints = new List <List <Vector2> > ();
                for (int i = 0; i < trackedRects.Count; i++)
                {
                    TrackedRect      tr   = trackedRects [i];
                    UnityEngine.Rect rect = new UnityEngine.Rect(tr.x, tr.y, tr.width, tr.height);

                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);
                    landmarkPoints.Add(points);
                }

                // face masking.
                if (faceMaskTexture != null && landmarkPoints.Count >= 1)
                {
                    OpenCVForUnity.Utils.texture2DToMat(faceMaskTexture, faceMaskMat);

                    float imageWidth      = meshOverlay.width;
                    float imageHeight     = meshOverlay.height;
                    float maskImageWidth  = faceMaskTexture.width;
                    float maskImageHeight = faceMaskTexture.height;

                    TrackedRect tr;
                    TrackedMesh tm;
                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        tr = trackedRects [i];

                        if (tr.state == TrackedState.NEW)
                        {
                            meshOverlay.CreateObject(tr.id, faceMaskTexture);
                        }
                        if (tr.state < TrackedState.DELETED)
                        {
                            tm = meshOverlay.GetObjectById(tr.id);

                            Vector3[] vertices = tm.meshFilter.mesh.vertices;
                            if (vertices.Length == landmarkPoints [i].Count)
                            {
                                for (int j = 0; j < vertices.Length; j++)
                                {
                                    vertices [j].x = landmarkPoints [i] [j].x / imageWidth - 0.5f;
                                    vertices [j].y = 0.5f - landmarkPoints [i] [j].y / imageHeight;
                                }
                            }
                            Vector2[] uv = tm.meshFilter.mesh.uv;
                            if (uv.Length == faceLandmarkPointsInMask.Count)
                            {
                                for (int jj = 0; jj < uv.Length; jj++)
                                {
                                    uv [jj].x = faceLandmarkPointsInMask [jj].x / maskImageWidth;
                                    uv [jj].y = (maskImageHeight - faceLandmarkPointsInMask [jj].y) / maskImageHeight;
                                }
                            }
                            meshOverlay.UpdateObject(tr.id, vertices, null, uv);

                            if (tr.numFramesNotDetected > 3)
                            {
                                tm.material.SetFloat(shader_FadeID, 1f);
                            }
                            else if (tr.numFramesNotDetected > 0 && tr.numFramesNotDetected <= 3)
                            {
                                tm.material.SetFloat(shader_FadeID, 0.3f + (0.7f / 4f) * tr.numFramesNotDetected);
                            }
                            else
                            {
                                tm.material.SetFloat(shader_FadeID, 0.3f);
                            }

                            // filter non frontal faces.
                            if (filterNonFrontalFaces && frontalFaceChecker.GetFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit)
                            {
                                tm.material.SetFloat(shader_FadeID, 1f);
                            }
                        }
                        else if (tr.state == TrackedState.DELETED)
                        {
                            meshOverlay.DeleteObject(tr.id);
                        }
                    }
                }
                else if (landmarkPoints.Count >= 1)
                {
                    float imageWidth      = meshOverlay.width;
                    float imageHeight     = meshOverlay.height;
                    float maskImageWidth  = texture.width;
                    float maskImageHeight = texture.height;

                    TrackedRect tr;
                    TrackedMesh tm;
                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        tr = trackedRects [i];

                        if (tr.state == TrackedState.NEW)
                        {
                            meshOverlay.CreateObject(tr.id, texture);
                        }
                        if (tr.state < TrackedState.DELETED)
                        {
                            tm = meshOverlay.GetObjectById(tr.id);

                            Vector3[] vertices = tm.meshFilter.mesh.vertices;
                            if (vertices.Length == landmarkPoints [i].Count)
                            {
                                for (int j = 0; j < vertices.Length; j++)
                                {
                                    vertices [j].x = landmarkPoints[i][j].x / imageWidth - 0.5f;
                                    vertices [j].y = 0.5f - landmarkPoints[i][j].y / imageHeight;
                                }
                            }
                            Vector2[] uv = tm.meshFilter.mesh.uv;
                            if (uv.Length == landmarkPoints [0].Count)
                            {
                                for (int jj = 0; jj < uv.Length; jj++)
                                {
                                    uv [jj].x = landmarkPoints[0][jj].x / maskImageWidth;
                                    uv [jj].y = (maskImageHeight - landmarkPoints[0][jj].y) / maskImageHeight;
                                }
                            }
                            meshOverlay.UpdateObject(tr.id, vertices, null, uv);

                            if (tr.numFramesNotDetected > 3)
                            {
                                tm.material.SetFloat(shader_FadeID, 1f);
                            }
                            else if (tr.numFramesNotDetected > 0 && tr.numFramesNotDetected <= 3)
                            {
                                tm.material.SetFloat(shader_FadeID, 0.3f + (0.7f / 4f) * tr.numFramesNotDetected);
                            }
                            else
                            {
                                tm.material.SetFloat(shader_FadeID, 0.3f);
                            }

                            // filter non frontal faces.
                            if (filterNonFrontalFaces && frontalFaceChecker.GetFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit)
                            {
                                tm.material.SetFloat(shader_FadeID, 1f);
                            }
                        }
                        else if (tr.state == TrackedState.DELETED)
                        {
                            meshOverlay.DeleteObject(tr.id);
                        }
                    }
                }

                // draw face rects.
                if (displayFaceRects)
                {
                    for (int i = 0; i < detectResult.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 0, 0, 255), 2);
                    }

                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(trackedRects [i].x, trackedRects [i].y, trackedRects [i].width, trackedRects [i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 255, 0, 255), 2);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceAngles (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceChecker.GetFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    }
                }

                // draw face points.
                if (displayDebugFacePoints)
                {
                    for (int i = 0; i < landmarkPoints.Count; i++)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, landmarkPoints [i], new Scalar(0, 255, 0, 255), 2);
                    }
                }


                // display face mask image.
                if (faceMaskTexture != null && faceMaskMat != null)
                {
                    if (displayFaceRects)
                    {
                        OpenCVForUnityUtils.DrawFaceRect(faceMaskMat, faceRectInMask, new Scalar(255, 0, 0, 255), 2);
                    }
                    if (displayDebugFacePoints)
                    {
                        OpenCVForUnityUtils.DrawFaceLandmark(faceMaskMat, faceLandmarkPointsInMask, new Scalar(0, 255, 0, 255), 2);
                    }

                    float scale = (rgbMat.width() / 4f) / faceMaskMat.width();
                    float tx    = rgbMat.width() - faceMaskMat.width() * scale;
                    float ty    = 0.0f;
                    Mat   trans = new Mat(2, 3, CvType.CV_32F);//1.0, 0.0, tx, 0.0, 1.0, ty);
                    trans.put(0, 0, scale);
                    trans.put(0, 1, 0.0f);
                    trans.put(0, 2, tx);
                    trans.put(1, 0, 0.0f);
                    trans.put(1, 1, scale);
                    trans.put(1, 2, ty);

                    Imgproc.warpAffine(faceMaskMat, rgbMat, trans, rgbMat.size(), Imgproc.INTER_LINEAR, Core.BORDER_TRANSPARENT, new Scalar(0));
                }

                Imgproc.putText(rgbMat, "W:" + rgbMat.width() + " H:" + rgbMat.height() + " SO:" + Screen.orientation, new Point(5, rgbMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbMat, texture);
            }
        }
Esempio n. 32
0
        // Update is called once per frame
        void Update()
        {
            if (sourceToMatHelper.IsPlaying() && sourceToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbMat = sourceToMatHelper.GetMat();

                // detect faces.
                List <Rect> detectResult = new List <Rect>();
                if (useDlibFaceDetecter)
                {
                    OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
                    List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                    foreach (var unityRect in result)
                    {
                        detectResult.Add(new Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                    }
                }
                else
                {
                    // convert image to greyscale.
                    Imgproc.cvtColor(rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);

                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect())
                        {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            detectResult = faces.toList();

                            // correct the deviation of the detection result of the face rectangle of OpenCV and Dlib.
                            foreach (Rect r in detectResult)
                            {
                                r.y += (int)(r.height * 0.1f);
                            }
                        }
                }

                // face tracking.
                List <TrackedRect> trackedRects = new List <TrackedRect>();
                rectangleTracker.UpdateTrackedObjects(detectResult);
                rectangleTracker.GetObjects(trackedRects, true);

                // create noise filter.
                foreach (var openCVRect in trackedRects)
                {
                    if (openCVRect.state == TrackedState.NEW)
                    {
                        if (!lowPassFilterDict.ContainsKey(openCVRect.id))
                        {
                            lowPassFilterDict.Add(openCVRect.id, new LowPassPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()));
                        }
                        if (!opticalFlowFilterDict.ContainsKey(openCVRect.id))
                        {
                            opticalFlowFilterDict.Add(openCVRect.id, new OFPointsFilter((int)faceLandmarkDetector.GetShapePredictorNumParts()));
                        }
                    }
                    else if (openCVRect.state == TrackedState.DELETED)
                    {
                        if (lowPassFilterDict.ContainsKey(openCVRect.id))
                        {
                            lowPassFilterDict[openCVRect.id].Dispose();
                            lowPassFilterDict.Remove(openCVRect.id);
                        }
                        if (opticalFlowFilterDict.ContainsKey(openCVRect.id))
                        {
                            opticalFlowFilterDict[openCVRect.id].Dispose();
                            opticalFlowFilterDict.Remove(openCVRect.id);
                        }
                    }
                }

                // detect face landmark points.
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbMat);
                List <List <Vector2> > landmarkPoints = new List <List <Vector2> >();
                foreach (var openCVRect in trackedRects)
                {
                    if (openCVRect.state > TrackedState.NEW_DISPLAYED && openCVRect.state < TrackedState.NEW_HIDED)
                    {
                        UnityEngine.Rect rect   = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
                        List <Vector2>   points = faceLandmarkDetector.DetectLandmark(rect);

                        // apply noise filter.
                        if (enableNoiseFilter)
                        {
                            opticalFlowFilterDict[openCVRect.id].Process(rgbMat, points, points);
                            lowPassFilterDict[openCVRect.id].Process(rgbMat, points, points);
                        }

                        landmarkPoints.Add(points);
                    }
                }

                // filter non frontal faces.
                if (filterNonFrontalFaces)
                {
                    for (int i = 0; i < landmarkPoints.Count; i++)
                    {
                        if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints[i]) < frontalFaceRateLowerLimit)
                        {
                            trackedRects.RemoveAt(i);
                            landmarkPoints.RemoveAt(i);
                            i--;
                        }
                    }
                }

                // face swapping.
                if (landmarkPoints.Count >= 2)
                {
                    int ann = 0, bob = 1;
                    for (int i = 0; i < landmarkPoints.Count - 1; i += 2)
                    {
                        ann = i;
                        bob = i + 1;

                        faceSwapper.SwapFaces(rgbMat, landmarkPoints[ann], landmarkPoints[bob], 1);
                    }
                }

                // draw face rects.
                if (displayFaceRects)
                {
                    for (int i = 0; i < trackedRects.Count; i++)
                    {
                        Rect             openCVRect = trackedRects[i];
                        UnityEngine.Rect rect       = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbMat, rect, new Scalar(255, 0, 0, 255), 2);
                        //Imgproc.putText (rgbMat, " " + frontalFaceChecker.GetFrontalFaceAngles (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                        //Imgproc.putText (rgbMat, " " + frontalFaceChecker.GetFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    }
                }

                //Imgproc.putText (rgbMat, "W:" + rgbMat.width () + " H:" + rgbMat.height () + " SO:" + Screen.orientation, new Point (5, rgbMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.UnityUtils.Utils.fastMatToTexture2D(rgbMat, texture);
            }
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                Mat   downScaleRgbaMat = null;
                float DOWNSCALE_RATIO  = 1.0f;
                if (enableDownScale)
                {
                    downScaleRgbaMat = imageOptimizationHelper.GetDownScaleMat(rgbaMat);
                    DOWNSCALE_RATIO  = imageOptimizationHelper.downscaleRatio;
                }
                else
                {
                    downScaleRgbaMat = rgbaMat;
                    DOWNSCALE_RATIO  = 1.0f;
                }


                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, downScaleRgbaMat);

                // Detect faces on resize image
                if (!enableSkipFrame || !imageOptimizationHelper.IsCurrentFrameSkipped())
                {
                    //detect face rects
                    if (useOpenCVFaceDetector)
                    {
                        // convert image to greyscale.
                        Imgproc.cvtColor(downScaleRgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                        using (Mat equalizeHistMat = new Mat())
                            using (MatOfRect faces = new MatOfRect()) {
                                Imgproc.equalizeHist(grayMat, equalizeHistMat);

                                cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                                List <OpenCVForUnity.Rect> opencvDetectResult = faces.toList();

                                // adjust to Dilb's result.
                                detectionResult.Clear();
                                foreach (var opencvRect in opencvDetectResult)
                                {
                                    detectionResult.Add(new UnityEngine.Rect((float)opencvRect.x, (float)opencvRect.y + (float)(opencvRect.height * 0.1f), (float)opencvRect.width, (float)opencvRect.height));
                                }
                            }
                    }
                    else
                    {
                        detectionResult = faceLandmarkDetector.Detect();
                    }
                }


                foreach (var rect in detectionResult)
                {
                    //detect landmark points
                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);

                    List <Vector2> originalPoints = new List <Vector2> (points.Count);
                    foreach (var point in points)
                    {
                        originalPoints.Add(new Vector2(point.x * DOWNSCALE_RATIO, point.y * DOWNSCALE_RATIO));
                    }

                    //draw landmark points
                    OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, originalPoints, new Scalar(0, 255, 0, 255), 2);

                    UnityEngine.Rect originalRect = new UnityEngine.Rect(rect.x * DOWNSCALE_RATIO, rect.y * DOWNSCALE_RATIO, rect.width * DOWNSCALE_RATIO, rect.height * DOWNSCALE_RATIO);
                    //draw face rect
                    OpenCVForUnityUtils.DrawFaceRect(rgbaMat, originalRect, new Scalar(255, 0, 0, 255), 2);
                }

                //Imgproc.putText (rgbaMat, "Original:(" + rgbaMat.width () + "," + rgbaMat.height () + ") DownScale:(" + downScaleRgbaMat.width () + "," + downScaleRgbaMat.height () + ") FrameSkipping: " + imageOptimizationHelper.frameSkippingRatio, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.fastMatToTexture2D(rgbaMat, texture);
            }
        }
Esempio n. 34
0
        /**
         * Extracts text regions from image.
         *
         * param image Source image where text blocks needs to be extracted from.  Should be CV_8UC3 (color).
         * param er_filter1 Extremal Region Filter for the 1st stage classifier of N&amp;M algorithm CITE: Neumann12
         * param er_filter2 Extremal Region Filter for the 2nd stage classifier of N&amp;M algorithm CITE: Neumann12
         * param groups_rects Output list of rectangle blocks with text
         */
        public static void detectRegions(Mat image, ERFilter er_filter1, ERFilter er_filter2, MatOfRect groups_rects)
        {
            if (image != null)
            {
                image.ThrowIfDisposed();
            }
            if (er_filter1 != null)
            {
                er_filter1.ThrowIfDisposed();
            }
            if (er_filter2 != null)
            {
                er_filter2.ThrowIfDisposed();
            }
            if (groups_rects != null)
            {
                groups_rects.ThrowIfDisposed();
            }
            Mat groups_rects_mat = groups_rects;

            text_Text_detectRegions_13(image.nativeObj, er_filter1.getNativeObjAddr(), er_filter2.getNativeObjAddr(), groups_rects_mat.nativeObj);
        }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();


                //convert image to greyscale
                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);


                if (isAutoResetMode || faceTracker.getPoints().Count <= 0)
                {
//                    Debug.Log ("detectFace");

                    //convert image to greyscale
                    using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
                                                     | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                     | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());



                            if (faces.rows() > 0)
                            {
//                            Debug.Log ("faces " + faces.dump ());

                                List <OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList();
                                List <Point[]> pointsList = faceTracker.getPoints();

                                if (isAutoResetMode)
                                {
                                    //add initial face points from MatOfRect
                                    if (pointsList.Count <= 0)
                                    {
                                        faceTracker.addPoints(faces);
//                                    Debug.Log ("reset faces ");
                                    }
                                    else
                                    {
                                        for (int i = 0; i < rectsList.Count; i++)
                                        {
                                            OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3);
                                            //It determines whether nose point has been included in trackRect.
                                            if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67]))
                                            {
                                                rectsList.RemoveAt(i);
                                                pointsList.RemoveAt(i);
//                                            Debug.Log ("remove " + i);
                                            }
                                            Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2);
                                        }
                                    }
                                }
                                else
                                {
                                    faceTracker.addPoints(faces);
                                }

                                //draw face rect
                                for (int i = 0; i < rectsList.Count; i++)
                                {
                                    Imgproc.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2);
                                }
                            }
                            else
                            {
                                if (isAutoResetMode)
                                {
                                    faceTracker.reset();

                                    rightEye.SetActive(false);
                                    leftEye.SetActive(false);
                                    head.SetActive(false);
                                    mouth.SetActive(false);
                                    axes.SetActive(false);
                                }
                            }
                        }
                }


                //track face points.if face points <= 0, always return false.
                if (faceTracker.track(grayMat, faceTrackerParams))
                {
                    if (isShowingFacePoints)
                    {
                        faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
                    }

                    Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);


                    Point[] points = faceTracker.getPoints() [0];


                    if (points.Length > 0)
                    {
//                        for (int i = 0; i < points.Length; i++)
//                        {
//                            Imgproc.putText(rgbaMat, "" + i, new Point(points [i].x, points [i].y), Imgproc.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar(0, 0, 255, 255), 2, Imgproc.LINE_AA, false);
//                        }


                        imagePoints.fromArray(
                            points [31], //l eye
                            points [36], //r eye
                            points [67], //nose
                            points [48], //l mouth
                            points [54]  //r mouth
//                          ,
//                          points [0],//l ear
//                          points [14]//r ear
                            );


                        Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

                        bool isRefresh = false;

                        if (tvec.get(2, 0) [0] > 0 && tvec.get(2, 0) [0] < 1200 * ((float)rgbaMat.cols() / (float)webCamTextureToMatHelper.requestedWidth))
                        {
                            isRefresh = true;

                            if (oldRvec == null)
                            {
                                oldRvec = new Mat();
                                rvec.copyTo(oldRvec);
                            }
                            if (oldTvec == null)
                            {
                                oldTvec = new Mat();
                                tvec.copyTo(oldTvec);
                            }


                            //filter Rvec Noise.
                            using (Mat absDiffRvec = new Mat()) {
                                Core.absdiff(rvec, oldRvec, absDiffRvec);

                                //              Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                using (Mat cmpRvec = new Mat()) {
                                    Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpRvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }

                            //filter Tvec Noise.
                            using (Mat absDiffTvec = new Mat()) {
                                Core.absdiff(tvec, oldTvec, absDiffTvec);

                                //              Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                using (Mat cmpTvec = new Mat()) {
                                    Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT);

                                    if (Core.countNonZero(cmpTvec) > 0)
                                    {
                                        isRefresh = false;
                                    }
                                }
                            }
                        }

                        if (isRefresh)
                        {
                            if (isShowingEffects)
                            {
                                rightEye.SetActive(true);
                            }
                            if (isShowingEffects)
                            {
                                leftEye.SetActive(true);
                            }
                            if (isShowingHead)
                            {
                                head.SetActive(true);
                            }
                            if (isShowingAxes)
                            {
                                axes.SetActive(true);
                            }


                            if ((Mathf.Abs((float)(points [48].x - points [56].x)) < Mathf.Abs((float)(points [31].x - points [36].x)) / 2.2 &&
                                 Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.9) ||
                                Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.7)
                            {
                                if (isShowingEffects)
                                {
                                    mouth.SetActive(true);
                                }
                            }
                            else
                            {
                                if (isShowingEffects)
                                {
                                    mouth.SetActive(false);
                                }
                            }

                            rvec.copyTo(oldRvec);
                            tvec.copyTo(oldTvec);

                            Calib3d.Rodrigues(rvec, rotM);

                            transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
                            transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
                            transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
                            transformationM.SetRow(3, new Vector4(0, 0, 0, 1));

                            // right-handed coordinates system (OpenCV) to left-handed one (Unity)
                            ARM = invertYM * transformationM;

                            // Apply Z-axis inverted matrix.
                            ARM = ARM * invertZM;

                            if (shouldMoveARCamera)
                            {
                                if (ARGameObject != null)
                                {
                                    ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
                                    ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
                                    ARGameObject.SetActive(true);
                                }
                            }
                            else
                            {
                                ARM = ARCamera.transform.localToWorldMatrix * ARM;

                                if (ARGameObject != null)
                                {
                                    ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
                                    ARGameObject.SetActive(true);
                                }
                            }
                        }
                    }
                }

//                Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

                Utils.fastMatToTexture2D(rgbaMat, texture);
            }

            if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
            {
                faceTracker.reset();
                if (oldRvec != null)
                {
                    oldRvec.Dispose();
                    oldRvec = null;
                }
                if (oldTvec != null)
                {
                    oldTvec.Dispose();
                    oldTvec = null;
                }

                rightEye.SetActive(false);
                leftEye.SetActive(false);
                head.SetActive(false);
                mouth.SetActive(false);
                axes.SetActive(false);
            }
        }
Esempio n. 36
0
 public void addPoints(MatOfRect rects)
 {
     points.AddRange(detector.convertMatOfRectToPoints(rects));
 }
        // Update is called once per frame
        void Update()
        {
            if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
            {
                Mat rgbaMat = webCamTextureToMatHelper.GetMat();

                // detect faces.
                List <OpenCVForUnity.Rect> detectResult = new List <OpenCVForUnity.Rect> ();
                if (useDlibFaceDetecter)
                {
                    OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                    List <UnityEngine.Rect> result = faceLandmarkDetector.Detect();

                    foreach (var unityRect in result)
                    {
                        detectResult.Add(new OpenCVForUnity.Rect((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
                    }
                }
                else
                {
                    // convert image to greyscale.
                    Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                            detectResult = faces.toList();

                            // adjust to Dilb's result.
                            foreach (OpenCVForUnity.Rect r in detectResult)
                            {
                                r.y += (int)(r.height * 0.1f);
                            }
                        }
                }

                // face traking.
                if (enableTracking)
                {
                    rectangleTracker.UpdateTrackedObjects(detectResult);
                    detectResult = new List <OpenCVForUnity.Rect> ();
                    rectangleTracker.GetObjects(detectResult, true);
                }

                // detect face landmark points.
                OpenCVForUnityUtils.SetImage(faceLandmarkDetector, rgbaMat);
                List <List <Vector2> > landmarkPoints = new List <List <Vector2> > ();
                foreach (var openCVRect in detectResult)
                {
                    UnityEngine.Rect rect = new UnityEngine.Rect(openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);

                    List <Vector2> points = faceLandmarkDetector.DetectLandmark(rect);
                    landmarkPoints.Add(points);
                }

                // filter non frontal faces.
                if (filterNonFrontalFaces)
                {
                    for (int i = 0; i < landmarkPoints.Count; i++)
                    {
                        if (frontalFaceChecker.GetFrontalFaceRate(landmarkPoints [i]) < frontalFaceRateLowerLimit)
                        {
                            detectResult.RemoveAt(i);
                            landmarkPoints.RemoveAt(i);
                            i--;
                        }
                    }
                }

                // face swapping.
                if (landmarkPoints.Count >= 2)
                {
                    int ann = 0, bob = 1;
                    for (int i = 0; i < landmarkPoints.Count - 1; i += 2)
                    {
                        ann = i;
                        bob = i + 1;

                        faceSwapper.SwapFaces(rgbaMat, landmarkPoints [ann], landmarkPoints [bob], 1);
                    }
                }

                // draw face rects.
                if (displayFaceRects)
                {
                    for (int i = 0; i < detectResult.Count; i++)
                    {
                        UnityEngine.Rect rect = new UnityEngine.Rect(detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
                        OpenCVForUnityUtils.DrawFaceRect(rgbaMat, rect, new Scalar(255, 0, 0, 255), 2);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceParam.getAngleOfFrontalFace (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                        //Imgproc.putText (rgbaMat, " " + frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
                    }
                }

                Imgproc.putText(rgbaMat, "W:" + rgbaMat.width() + " H:" + rgbaMat.height() + " SO:" + Screen.orientation, new Point(5, rgbaMat.rows() - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);

                OpenCVForUnity.Utils.matToTexture2D(rgbaMat, texture, colors);
            }
        }
				private IEnumerator init ()
				{
						if (webCamTexture != null) {
								webCamTexture.Stop ();
								initDone = false;
				
								rgbaMat.Dispose ();
								grayMat.Dispose ();
						}

						// Checks how many and which cameras are available on the device
						for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {

								if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing) {
										Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

										webCamDevice = WebCamTexture.devices [cameraIndex];
										webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
					
										break;
								}
						}
			
						if (webCamTexture == null) {
								webCamDevice = WebCamTexture.devices [0];
								webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
						}
			
						Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
			
			
						// Starts the camera
						webCamTexture.Play ();

						while (true) {
								//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
								#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
				                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
								#else
								if (webCamTexture.didUpdateThisFrame) {
										#if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2                                    
										while (webCamTexture.width <= 16) {
												webCamTexture.GetPixels32 ();
												yield return new WaitForEndOfFrame ();
										} 
										#endif
								#endif

										Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
										Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);


										colors = new Color32[webCamTexture.width * webCamTexture.height];
										rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
										grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
										texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

										gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

										updateLayout ();
					
										cascade = new CascadeClassifier (Utils.getFilePath ("lbpcascade_frontalface.xml"));
										//cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
										faces = new MatOfRect ();

										screenOrientation = Screen.orientation;
										initDone = true;
					
										break;
								} else {
										yield return 0;
								}
						}
				}
				private void _ThreadWorker (System.Object o)
				{
						threadComm com = o as threadComm;

						MatOfRect faces = new MatOfRect ();
						if (cascade != null)
								cascade.detectMultiScale (grayMat, faces, 1.1, 2, Objdetect.CASCADE_SCALE_IMAGE, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                    new Size (grayMat4Thread.height () * 0.2, grayMat4Thread.height () * 0.2), new Size ());

						comm.result = faces;
						comm.done = true;
				}
    // Update is called once per frame
    void Update()
    {
        if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
        {
            Mat rgbaMat = webCamTextureToMatHelper.GetMat();

            //convert image to greyscale
            Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);


            if (isAutoResetMode || faceTracker.getPoints().Count <= 0)
            {
//                    Debug.Log ("detectFace");

                //convert image to greyscale
                using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) {
                        Imgproc.equalizeHist(grayMat, equalizeHistMat);

                        cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
                                                 //                                                                                 | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                 | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());

                        if (faces.rows() > 0)
                        {
//                            Debug.Log ("faces " + faces.dump ());

                            List <OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList();
                            List <Point[]> pointsList = faceTracker.getPoints();

                            if (isAutoResetMode)
                            {
                                //add initial face points from MatOfRect
                                if (pointsList.Count <= 0)
                                {
                                    faceTracker.addPoints(faces);
//                                    Debug.Log ("reset faces ");
                                }
                                else
                                {
                                    for (int i = 0; i < rectsList.Count; i++)
                                    {
                                        OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3);
                                        //It determines whether nose point has been included in trackRect.
                                        if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67]))
                                        {
                                            rectsList.RemoveAt(i);
                                            pointsList.RemoveAt(i);
//                                                                                      Debug.Log ("remove " + i);
                                        }
                                        //uncomment below for rectangle around face
                                        Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2);
                                    }
                                }
                            }
                            else
                            {
                                faceTracker.addPoints(faces);
                            }
                            //draw face rect
                            for (int i = 0; i < rectsList.Count; i++)
                            {
                                //uncomment below for rectangle around face
                                Imgproc.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2);
                            }
                        }
                        else
                        {
                            if (isAutoResetMode)
                            {
                                faceTracker.reset();
                            }
                        }
                    }
            }

            //track face points.if face points <= 0, always return false.
            if (faceTracker.track(grayMat, faceTrackerParams))
            {
                //GameObject.FindGameObjectWithTag("left hand").transform.localScale = new Vector3(0.05f, 0.05f, 50);
                //GameObject.FindGameObjectWithTag("right hand").transform.localScale = new Vector3(0.05f, 0.05f, 50);
                //facecount = 0;
                if (facerec > 15)
                {
                    GameObject.FindGameObjectWithTag("left hand").transform.localScale  = new Vector3(0.2f, 0.2f, 50);
                    GameObject.FindGameObjectWithTag("right hand").transform.localScale = new Vector3(0.2f, 0.2f, 50);
                    facecount = 0;
                }
                else
                {
                    facerec++;
                }
                //uncomment below for rectangle around face
                //faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
            }
            else
            {
                //facecount prevents flickering of hand from poor face recognition
                if (facecount > 15)
                {
                    facerec = 0;
                    GameObject.FindGameObjectWithTag("left hand").transform.localScale  = new Vector3(0f, 0f, 0);
                    GameObject.FindGameObjectWithTag("right hand").transform.localScale = new Vector3(0f, 0f, 0);                    facecount++;
                }
                else
                {
                    facecount++;
                }
            }

            //Imgproc.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

//                Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);

            Utils.fastMatToTexture2D(rgbaMat, texture);
        }

        //facetrac resets upon screen click and space bar
        if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
        {
            faceTracker.reset();
        }

        if (Input.GetKeyDown(KeyCode.Escape))
        {
            if (SpeechRecognizer.IsRecording())
            {
                SpeechRecognizer.StopIfRecording();
                //resultText.text = "I stopped recording";
            }
            Application.Quit();
            //Application.LoadLevel ("MainActivity.class");
        }
    }
        /// <summary>
        /// Raises the web cam texture to mat helper inited event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInited()
        {
            Debug.Log ("OnWebCamTextureToMatHelperInited");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();

            texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);

            gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

            gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
            Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width = webCamTextureMat.width ();
            float height = webCamTextureMat.height ();

            float widthScale = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;
            if (widthScale < heightScale) {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            } else {
                Camera.main.orthographicSize = height / 2;
            }

            grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);

            faces = new MatOfRect ();
        }
        private IEnumerator init()
        {
            if (webCamTexture != null) {
                                faceTracker.reset ();
                                webCamTexture.Stop ();
                                initDone = false;

                                rgbaMat.Dispose ();
                                grayMat.Dispose ();
                        }

                        // Checks how many and which cameras are available on the device
                        for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {

                                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {

                                        Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                                        webCamDevice = WebCamTexture.devices [cameraIndex];

                                        webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                                        break;
                                }

                        }

                        if (webCamTexture == null) {
                                webCamDevice = WebCamTexture.devices [0];
                                webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                        }

                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

                        // Starts the camera
                        webCamTexture.Play ();

                        while (true) {
                                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                                #else
                                if (webCamTexture.didUpdateThisFrame) {
                                        #endif
                                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                                        Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                                        colors = new Color32[webCamTexture.width * webCamTexture.height];

                                        rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
                                        grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

                                        texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                                        cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
                                        if (cascade.empty ()) {
                                                Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
                                        }

                                        gameObject.transform.localEulerAngles = new Vector3 (0, 0, 0);
            //					gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);

                                        gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

            //										bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
            //										float scaleX = 1;
            //										float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f;
            //										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);

                                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                                        Camera.main.orthographicSize = webCamTexture.height / 2;

                                        initDone = true;

                                        break;
                                } else {
                                        yield return 0;
                                }
                        }
                }

                // Update is called once per frame
                void Update ()
                {
                        if (!initDone)
                                return;
                        #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                        if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                        #else
                        if (webCamTexture.didUpdateThisFrame) {
                                #endif
                                Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

                                //flip to correct direction.
                                if (webCamTexture.videoVerticallyMirrored) {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 0) {

                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        }
                                } else {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 0) {

                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                }

                                //convert image to greyscale
                                Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                                if (faceTracker.getPoints ().Count <= 0) {
                                        Debug.Log ("detectFace");

                                        //convert image to greyscale
                                        using (Mat equalizeHistMat = new Mat ())
                                        using (MatOfRect faces = new MatOfRect ()) {

                                                Imgproc.equalizeHist (grayMat, equalizeHistMat);

                                                cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0
            //														                           | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                        | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());

                                                if (faces.rows () > 0) {
                                                        Debug.Log ("faces " + faces.dump ());
                                                        //add initial face points from MatOfRect
                                                        faceTracker.addPoints (faces);

                                                        //draw face rect
                                                        OpenCVForUnity.Rect[] rects = faces.toArray ();
                                                        for (int i = 0; i < rects.Length; i++) {
                                                                Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2);
                                                        }

                                                }

                                        }

                                }

                                //track face points.if face points <= 0, always return false.
                                if (faceTracker.track (grayMat, faceTrackerParams))
                                        faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255));

                                Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);

                                Utils.matToTexture2D (rgbaMat, texture, colors);

                        }

                        if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) {
                                faceTracker.reset ();
                        }

                }

                void OnDisable ()
                {
                        webCamTexture.Stop ();
                }

                void OnGUI ()
                {
                        float screenScale = Screen.height / 240.0f;
                        Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
                        GUI.matrix = scaledMatrix;

                        GUILayout.BeginVertical ();
                        if (GUILayout.Button ("back")) {
                                Application.LoadLevel ("FaceTrackerSample");
                        }
                        if (GUILayout.Button ("change camera")) {
                                isFrontFacing = !isFrontFacing;
                                StartCoroutine (init ());
                        }

                        GUILayout.EndVertical ();
                }

            }
        }
        private void ThreadWorker()
        {
            if (isThreadRunning) return;

            Debug.Log("Thread Start");

            isThreadRunning = true;

            threadComm.shouldDetectInMultiThread = false;
            didUpdateTheDetectionResult = false;

            tokenSource_ = new CancellationTokenSource();

            task_ =Task.Factory.StartNew(
                (o)=>
                {
                    ThreadComm comm = o as ThreadComm;

                    while(true)
                    {
                        tokenSource_.Token.ThrowIfCancellationRequested();

                        if(!comm.shouldDetectInMultiThread) continue;
          
                        lock (thisLock)
                        {
            

                            MatOfRect faces = new MatOfRect();
                            if (cascade != null)
                                cascade.detectMultiScale(grayMat4Thread, faces, 1.1, 2, Objdetect.CASCADE_SCALE_IMAGE, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                                    new Size(grayMat4Thread.height() * 0.2, grayMat4Thread.height() * 0.2), new Size());

                            resultDetect = faces;
                        }
                        comm.shouldDetectInMultiThread = false;

                        didUpdateTheDetectionResult = true;
                    }

                }
                , threadComm
                , tokenSource_.Token
            ).ContinueWith(t =>
            {
                tokenSource_.Dispose();
                tokenSource_ = null;

                isThreadRunning = false;
            });

        }
        private void Run()
        {
            //if true, The error log of the Native side OpenCV will be displayed on the Unity Editor Console.
            Utils.setDebugMode(true);


            Mat frame = Imgcodecs.imread(scenetext01_jpg_filepath);

            #if !UNITY_WSA_10_0
            if (frame.empty())
            {
                Debug.LogError("text/scenetext01.jpg is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/text/” to “Assets/StreamingAssets/” folder. ");
            }
            #endif

            Mat binaryMat = new Mat();
            Mat maskMat   = new Mat();


            List <MatOfPoint> regions = new List <MatOfPoint> ();

            ERFilter er_filter1 = Text.createERFilterNM1(trained_classifierNM1_xml_filepath, 8, 0.00015f, 0.13f, 0.2f, true, 0.1f);

            ERFilter er_filter2 = Text.createERFilterNM2(trained_classifierNM2_xml_filepath, 0.5f);


            Mat transition_p = new Mat(62, 62, CvType.CV_64FC1);
            //            string filename = "OCRHMM_transitions_table.xml";
            //            FileStorage fs(filename, FileStorage::READ);
            //            fs["transition_probabilities"] >> transition_p;
            //            fs.release();

            //Load TransitionProbabilitiesData.
            transition_p.put(0, 0, GetTransitionProbabilitiesData(OCRHMM_transitions_table_xml_filepath));

            Mat           emission_p = Mat.eye(62, 62, CvType.CV_64FC1);
            string        voc        = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789";
            OCRHMMDecoder decoder    = OCRHMMDecoder.create(
                OCRHMM_knn_model_data_xml_gz_filepath,
                voc, transition_p, emission_p);

            //Text Detection
            Imgproc.cvtColor(frame, frame, Imgproc.COLOR_BGR2RGB);
            Imgproc.cvtColor(frame, binaryMat, Imgproc.COLOR_RGB2GRAY);
            Imgproc.threshold(binaryMat, binaryMat, 0, 255, Imgproc.THRESH_BINARY | Imgproc.THRESH_OTSU);
            Core.absdiff(binaryMat, new Scalar(255), maskMat);


            Text.detectRegions(binaryMat, er_filter1, er_filter2, regions);
            Debug.Log("regions.Count " + regions.Count);


            MatOfRect groups_rects           = new MatOfRect();
            List <OpenCVForUnity.Rect> rects = new List <OpenCVForUnity.Rect> ();
            Text.erGrouping(frame, binaryMat, regions, groups_rects);


            for (int i = 0; i < regions.Count; i++)
            {
                regions [i].Dispose();
            }
            regions.Clear();


            rects.AddRange(groups_rects.toList());

            groups_rects.Dispose();


            //Text Recognition (OCR)

            List <Mat> detections = new List <Mat> ();

            for (int i = 0; i < (int)rects.Count; i++)
            {
                Mat group_img = new Mat();
                maskMat.submat(rects [i]).copyTo(group_img);
                Core.copyMakeBorder(group_img, group_img, 15, 15, 15, 15, Core.BORDER_CONSTANT, new Scalar(0));
                detections.Add(group_img);
            }

            Debug.Log("detections.Count " + detections.Count);


            //#Visualization
            for (int i = 0; i < rects.Count; i++)
            {
                Imgproc.rectangle(frame, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0), 2);
                Imgproc.rectangle(frame, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 255, 255), 1);

                string output = decoder.run(detections [i], 0);
                if (!string.IsNullOrEmpty(output))
                {
                    Debug.Log("output " + output);
                    Imgproc.putText(frame, output, new Point(rects [i].x, rects [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(0, 0, 255), 1, Imgproc.LINE_AA, false);
                }
            }


            Texture2D texture = new Texture2D(frame.cols(), frame.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(frame, texture);

//            Texture2D texture = new Texture2D (detections [0].cols (), detections [0].rows (), TextureFormat.RGBA32, false);
//
//            Utils.matToTexture2D (detections [0], texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;


            for (int i = 0; i < detections.Count; i++)
            {
                detections [i].Dispose();
            }
            binaryMat.Dispose();
            maskMat.Dispose();

            Utils.setDebugMode(false);
        }
                IEnumerator ThreadCoroutine(System.Object o)
                {
                    ThreadComm comm = o as ThreadComm;


                    while (true)
                    {
                        while (!comm.shouldDetectInMultiThread) { yield return null; }

                        lock (thisLock)
                        {
                            MatOfRect faces = new MatOfRect();
                            if (cascade != null)
                                cascade.detectMultiScale(grayMat4Thread, faces, 1.1, 2, Objdetect.CASCADE_SCALE_IMAGE, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                                    new Size(grayMat4Thread.height() * 0.2, grayMat4Thread.height() * 0.2), new Size());

                            resultDetect = faces;
                        }
                        comm.shouldDetectInMultiThread = false;

                        didUpdateTheDetectionResult = true;
                    }
                }
Esempio n. 46
0
        private IEnumerator init()
        {
            if (webCamTexture != null)
            {
                faceTracker.reset();
                webCamTexture.Stop();
                initDone = false;

                rgbaMat.Dispose();
                grayMat.Dispose();
            }

            // Checks how many and which cameras are available on the device
            for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++)
            {
                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing)
                {
                    Debug.Log(cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                    webCamDevice = WebCamTexture.devices [cameraIndex];

                    webCamTexture = new WebCamTexture(webCamDevice.name, width, height);

                    break;
                }
            }

            if (webCamTexture == null)
            {
                webCamDevice  = WebCamTexture.devices [0];
                webCamTexture = new WebCamTexture(webCamDevice.name, width, height);
            }

            Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);



            // Starts the camera
            webCamTexture.Play();


            while (true)
            {
                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16)
                {
                                                                #else
                if (webCamTexture.didUpdateThisFrame)
                {
                                                                                #endif
                    Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                    Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                    colors = new Color32[webCamTexture.width * webCamTexture.height];

                    rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
                    grayMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

                    texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);


                    cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml"));
                    if (cascade.empty())
                    {
                        Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
                    }

                    gameObject.transform.localEulerAngles = new Vector3(0, 0, 0);
//					gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);


                    gameObject.transform.localScale = new Vector3(webCamTexture.width, webCamTexture.height, 1);


//										bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
//										float scaleX = 1;
//										float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f;
//										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);


                    gameObject.GetComponent <Renderer> ().material.mainTexture = texture;

                    Camera.main.orthographicSize = webCamTexture.height / 2;

                    initDone = true;

                    break;
                }
                else
                {
                    yield return(0);
                }
            }
        }

        // Update is called once per frame
        void Update()
        {
            if (!initDone)
            {
                return;
            }
                                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
            if (webCamTexture.width > 16 && webCamTexture.height > 16)
            {
                                                #else
            if (webCamTexture.didUpdateThisFrame)
            {
                                                                #endif
                Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);

                //flip to correct direction.
                if (webCamTexture.videoVerticallyMirrored)
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                        else if (webCamTexture.videoRotationAngle == 180)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 180)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                    }
                }
                else
                {
                    if (webCamDevice.isFrontFacing)
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                            Core.flip(rgbaMat, rgbaMat, 1);
                        }
                        else if (webCamTexture.videoRotationAngle == 180)
                        {
                            Core.flip(rgbaMat, rgbaMat, 0);
                        }
                    }
                    else
                    {
                        if (webCamTexture.videoRotationAngle == 0)
                        {
                        }
                        else if (webCamTexture.videoRotationAngle == 180)
                        {
                            Core.flip(rgbaMat, rgbaMat, -1);
                        }
                    }
                }

                //convert image to greyscale
                Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);


                if (faceTracker.getPoints().Count <= 0)
                {
                    Debug.Log("detectFace");

                    //convert image to greyscale
                    using (Mat equalizeHistMat = new Mat())
                        using (MatOfRect faces = new MatOfRect()) {
                            Imgproc.equalizeHist(grayMat, equalizeHistMat);

                            cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
//														                           | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                     | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());



                            if (faces.rows() > 0)
                            {
                                Debug.Log("faces " + faces.dump());
                                //add initial face points from MatOfRect
                                faceTracker.addPoints(faces);

                                //draw face rect
                                OpenCVForUnity.Rect[] rects = faces.toArray();
                                for (int i = 0; i < rects.Length; i++)
                                {
                                    Core.rectangle(rgbaMat, new Point(rects [i].x, rects [i].y), new Point(rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar(255, 0, 0, 255), 2);
                                }
                            }
                        }
                }


                //track face points.if face points <= 0, always return false.
                if (faceTracker.track(grayMat, faceTrackerParams))
                {
                    faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
                }

                Core.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Core.LINE_AA, false);


                Utils.matToTexture2D(rgbaMat, texture, colors);
            }

            if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
            {
                faceTracker.reset();
            }
        }

        void OnDisable()
        {
            webCamTexture.Stop();
        }

        void OnGUI()
        {
            float     screenScale  = Screen.height / 240.0f;
            Matrix4x4 scaledMatrix = Matrix4x4.Scale(new Vector3(screenScale, screenScale, screenScale));

            GUI.matrix = scaledMatrix;


            GUILayout.BeginVertical();
            if (GUILayout.Button("back"))
            {
                Application.LoadLevel("FaceTrackerSample");
            }
            if (GUILayout.Button("change camera"))
            {
                isFrontFacing = !isFrontFacing;
                StartCoroutine(init());
            }

            GUILayout.EndVertical();
        }
    }
}
				private void _ThreadWorker (System.Object o)
				{
						ThreadComm comm = o as ThreadComm;


						while (!shouldStopThread) {
								if (!comm.shouldDetectInMultiThread)
										continue;


								lock (thisLock) {
										MatOfRect faces = new MatOfRect ();
										if (cascade != null)
												cascade.detectMultiScale (grayMat4Thread, faces, 1.1, 2, Objdetect.CASCADE_SCALE_IMAGE, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                                    new Size (grayMat4Thread.height () * 0.2, grayMat4Thread.height () * 0.2), new Size ());

										resultDetect = faces;
								}
								comm.shouldDetectInMultiThread = false;

								didUpdateTheDetectionResult = true;
						}

						isThreadRunning = false;
				}
Esempio n. 48
0
        /// <summary>
        /// Raises the web cam texture to mat helper initialized event.
        /// </summary>
        public void OnWebCamTextureToMatHelperInitialized()
        {
            Debug.Log("OnWebCamTextureToMatHelperInitialized");

            Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();

            #if NETFX_CORE && !DISABLE_HOLOLENSCAMSTREAM_API
            // HololensCameraStream always returns image data in BGRA format.
            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.BGRA32, false);
            #else
            texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
            #endif

            texture.wrapMode = TextureWrapMode.Clamp;

            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            quad_renderer = gameObject.GetComponent <Renderer> () as Renderer;
            quad_renderer.sharedMaterial.SetTexture("_MainTex", texture);
            quad_renderer.sharedMaterial.SetVector("_VignetteOffset", new Vector4(0, 0));

            Matrix4x4 projectionMatrix;
            #if NETFX_CORE && !DISABLE_HOLOLENSCAMSTREAM_API
            projectionMatrix = webCamTextureToMatHelper.GetProjectionMatrix();
            quad_renderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix", projectionMatrix);
            #else
            //This value is obtained from PhotoCapture's TryGetProjectionMatrix() method.I do not know whether this method is good.
            //Please see the discussion of this thread.Https://forums.hololens.com/discussion/782/live-stream-of-locatable-camera-webcam-in-unity
            projectionMatrix     = Matrix4x4.identity;
            projectionMatrix.m00 = 2.31029f;
            projectionMatrix.m01 = 0.00000f;
            projectionMatrix.m02 = 0.09614f;
            projectionMatrix.m03 = 0.00000f;
            projectionMatrix.m10 = 0.00000f;
            projectionMatrix.m11 = 4.10427f;
            projectionMatrix.m12 = -0.06231f;
            projectionMatrix.m13 = 0.00000f;
            projectionMatrix.m20 = 0.00000f;
            projectionMatrix.m21 = 0.00000f;
            projectionMatrix.m22 = -1.00000f;
            projectionMatrix.m23 = 0.00000f;
            projectionMatrix.m30 = 0.00000f;
            projectionMatrix.m31 = 0.00000f;
            projectionMatrix.m32 = -1.00000f;
            projectionMatrix.m33 = 0.00000f;
            quad_renderer.sharedMaterial.SetMatrix("_CameraProjectionMatrix", projectionMatrix);
            #endif

            quad_renderer.sharedMaterial.SetFloat("_VignetteScale", 0.0f);


            grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1);
            cascade = new CascadeClassifier();
            cascade.load(Utils.getFilePath("lbpcascade_frontalface.xml"));
//            cascade.load (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));

            // "empty" method is not working on the UWP platform.
            //            if (cascade.empty ()) {
            //                Debug.LogError ("cascade file is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            //            }

            grayMat4Thread = new Mat();
            cascade4Thread = new CascadeClassifier();
            cascade4Thread.load(Utils.getFilePath("haarcascade_frontalface_alt.xml"));

            // "empty" method is not working on the UWP platform.
            //            if (cascade4Thread.empty ()) {
            //                Debug.LogError ("cascade file is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            //            }

            detectionResult = new MatOfRect();
        }
				private void detectInRegion (Mat img, Rect r, List<Rect> detectedObjectsInRegions)
				{
						Rect r0 = new Rect (new Point (), img.size ());
						Rect r1 = new Rect (r.x, r.y, r.width, r.height);
						Inflate (r1, (int)((r1.width * innerParameters.coeffTrackingWindowSize) - r1.width) / 2,
            (int)((r1.height * innerParameters.coeffTrackingWindowSize) - r1.height) / 2);
						r1 = Intersect (r0, r1);

						if ((r1.width <= 0) || (r1.height <= 0)) {
								Debug.Log ("DetectionBasedTracker::detectInRegion: Empty intersection");
								return;
						}


						int d = Math.Min (r.width, r.height);
						d = (int)Math.Round (d * innerParameters.coeffObjectSizeToTrack);


						MatOfRect tmpobjects = new MatOfRect ();

						Mat img1 = new Mat (img, r1);//subimage for rectangle -- without data copying

						regionCascade.detectMultiScale (img1, tmpobjects, parameters.scaleFactor, parameters.minNeighbors, 0 | Objdetect.CASCADE_DO_CANNY_PRUNING | Objdetect.CASCADE_SCALE_IMAGE | Objdetect.CASCADE_FIND_BIGGEST_OBJECT, new Size (d, d), new Size ());


						Rect[] tmpobjectsArray = tmpobjects.toArray ();
						int len = tmpobjectsArray.Length;
						for (int i = 0; i < len; i++) {
								Rect tmp = tmpobjectsArray [i];
								Rect curres = new Rect (new Point (tmp.x + r1.x, tmp.y + r1.y), tmp.size ());
								detectedObjectsInRegions.Add (curres);
						}
				}
Esempio n. 50
0
        /// <summary>
        /// Postprocess the specified frame, outs and net.
        /// </summary>
        /// <param name="frame">Frame.</param>
        /// <param name="outs">Outs.</param>
        /// <param name="net">Net.</param>
        private void postprocess(Mat frame, List <Mat> outs, Net net)
        {
            string outLayerType = outBlobTypes[0];


            List <int>   classIdsList    = new List <int>();
            List <float> confidencesList = new List <float>();
            List <OpenCVForUnity.CoreModule.Rect> boxesList = new List <OpenCVForUnity.CoreModule.Rect>();

            if (net.getLayer(new DictValue(0)).outputNameToIndex("im_info") != -1)
            {
                // Faster-RCNN or R-FCN
                // Network produces output blob with a shape 1x1xNx7 where N is a number of
                // detections and an every detection is a vector of values
                // [batchId, classId, confidence, left, top, right, bottom]

                if (outs.Count == 1)
                {
                    outs[0] = outs[0].reshape(1, (int)outs[0].total() / 7);

                    //Debug.Log ("outs[i].ToString() " + outs [0].ToString ());

                    float[] data = new float[7];

                    for (int i = 0; i < outs[0].rows(); i++)
                    {
                        outs[0].get(i, 0, data);

                        float confidence = data[2];

                        if (confidence > confThreshold)
                        {
                            int class_id = (int)(data[1]);

                            int left   = (int)(data[3] * frame.cols());
                            int top    = (int)(data[4] * frame.rows());
                            int right  = (int)(data[5] * frame.cols());
                            int bottom = (int)(data[6] * frame.rows());
                            int width  = right - left + 1;
                            int height = bottom - top + 1;

                            classIdsList.Add((int)(class_id) - 0);
                            confidencesList.Add((float)confidence);
                            boxesList.Add(new OpenCVForUnity.CoreModule.Rect(left, top, width, height));
                        }
                    }
                }
            }
            else if (outLayerType == "DetectionOutput")
            {
                // Network produces output blob with a shape 1x1xNx7 where N is a number of
                // detections and an every detection is a vector of values
                // [batchId, classId, confidence, left, top, right, bottom]

                if (outs.Count == 1)
                {
                    outs[0] = outs[0].reshape(1, (int)outs[0].total() / 7);

                    //Debug.Log ("outs[i].ToString() " + outs [0].ToString ());

                    float[] data = new float[7];

                    for (int i = 0; i < outs[0].rows(); i++)
                    {
                        outs[0].get(i, 0, data);

                        float confidence = data[2];

                        if (confidence > confThreshold)
                        {
                            int class_id = (int)(data[1]);

                            int left   = (int)(data[3] * frame.cols());
                            int top    = (int)(data[4] * frame.rows());
                            int right  = (int)(data[5] * frame.cols());
                            int bottom = (int)(data[6] * frame.rows());
                            int width  = right - left + 1;
                            int height = bottom - top + 1;

                            classIdsList.Add((int)(class_id) - 0);
                            confidencesList.Add((float)confidence);
                            boxesList.Add(new OpenCVForUnity.CoreModule.Rect(left, top, width, height));
                        }
                    }
                }
            }
            else if (outLayerType == "Region")
            {
                for (int i = 0; i < outs.Count; ++i)
                {
                    // Network produces output blob with a shape NxC where N is a number of
                    // detected objects and C is a number of classes + 4 where the first 4
                    // numbers are [center_x, center_y, width, height]

                    //Debug.Log ("outs[i].ToString() "+outs[i].ToString());

                    float[] positionData   = new float[5];
                    float[] confidenceData = new float[outs[i].cols() - 5];

                    for (int p = 0; p < outs[i].rows(); p++)
                    {
                        outs[i].get(p, 0, positionData);

                        outs[i].get(p, 5, confidenceData);

                        int   maxIdx     = confidenceData.Select((val, idx) => new { V = val, I = idx }).Aggregate((max, working) => (max.V > working.V) ? max : working).I;
                        float confidence = confidenceData[maxIdx];

                        if (confidence > confThreshold)
                        {
                            int centerX = (int)(positionData[0] * frame.cols());
                            int centerY = (int)(positionData[1] * frame.rows());
                            int width   = (int)(positionData[2] * frame.cols());
                            int height  = (int)(positionData[3] * frame.rows());
                            int left    = centerX - width / 2;
                            int top     = centerY - height / 2;

                            classIdsList.Add(maxIdx);
                            confidencesList.Add((float)confidence);
                            boxesList.Add(new OpenCVForUnity.CoreModule.Rect(left, top, width, height));
                        }
                    }
                }
            }
            else
            {
                Debug.Log("Unknown output layer type: " + outLayerType);
            }


            MatOfRect boxes = new MatOfRect();

            boxes.fromList(boxesList);

            MatOfFloat confidences = new MatOfFloat();

            confidences.fromList(confidencesList);


            MatOfInt indices = new MatOfInt();

            Dnn.NMSBoxes(boxes, confidences, confThreshold, nmsThreshold, indices);

            //Debug.Log ("indices.dump () "+indices.dump ());
            //Debug.Log ("indices.ToString () "+indices.ToString());

            for (int i = 0; i < indices.total(); ++i)
            {
                int idx = (int)indices.get(i, 0)[0];
                OpenCVForUnity.CoreModule.Rect box = boxesList[idx];
                drawPred(classIdsList[idx], confidencesList[idx], box.x, box.y,
                         box.x + box.width, box.y + box.height, frame);
            }

            indices.Dispose();
            boxes.Dispose();
            confidences.Dispose();
        }
        // Update is called once per frame
        void Update()
        {
            //Loop play
                        if (capture.get (Videoio.CAP_PROP_POS_FRAMES) >= capture.get (Videoio.CAP_PROP_FRAME_COUNT))
                                capture.set (Videoio.CAP_PROP_POS_FRAMES, 0);

                        //error PlayerLoop called recursively! on iOS.reccomend WebCamTexture.
                        if (capture.grab ()) {

                                capture.retrieve (rgbMat, 0);

                                Imgproc.cvtColor (rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);

                                //Debug.Log ("Mat toString " + rgbMat.ToString ());

                                using (HOGDescriptor des = new HOGDescriptor())
                                using (MatOfRect locations = new MatOfRect ())
                                using (MatOfDouble weights = new MatOfDouble ()) {
                                        des.setSVMDetector (HOGDescriptor.getDefaultPeopleDetector ());
                                        des.detectMultiScale (rgbMat, locations, weights);

                                        OpenCVForUnity.Rect[] rects = locations.toArray ();
                                        for (int i = 0; i < rects.Length; i++) {
            //												Debug.Log ("detected person " + rects [i]);
                                                Imgproc.rectangle (rgbMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0), 2);
                                        }
            //										Debug.Log (locations.ToString ());
            //										Debug.Log (weights.ToString ());
                                }

                                Utils.matToTexture2D (rgbMat, texture);

                                gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                        }
        }
Esempio n. 52
0
        private void Run()
        {
            gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = 0;
            float height = 0;

            width  = gameObject.transform.localScale.x;
            height = gameObject.transform.localScale.y;

            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            //initialize FaceTracker
            FaceTracker faceTracker = new FaceTracker(tracker_model_json_filepath);
            //initialize FaceTrackerParams
            FaceTrackerParams faceTrackerParams = new FaceTrackerParams();


            Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);

            Utils.texture2DToMat(imgTexture, imgMat);
            Debug.Log("imgMat dst ToString " + imgMat.ToString());


            CascadeClassifier cascade = new CascadeClassifier();

            cascade.load(haarcascade_frontalface_alt_xml_filepath);
            //if (cascade.empty())
            //{
            //    Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            //}

            //convert image to greyscale
            Mat gray = new Mat();

            Imgproc.cvtColor(imgMat, gray, Imgproc.COLOR_RGBA2GRAY);


            MatOfRect faces = new MatOfRect();

            Imgproc.equalizeHist(gray, gray);

            cascade.detectMultiScale(gray, faces, 1.1f, 2, 0
                                     // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                     | Objdetect.CASCADE_SCALE_IMAGE, new Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size());

            Debug.Log("faces " + faces.dump());

            if (faces.rows() > 0)
            {
                //add initial face points from MatOfRect
                faceTracker.addPoints(faces);
            }


            //track face points.if face points <= 0, always return false.
            if (faceTracker.track(imgMat, faceTrackerParams))
            {
                faceTracker.draw(imgMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
            }



            Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(imgMat, texture);

            gameObject.GetComponent <Renderer>().material.mainTexture = texture;

            cascade.Dispose();
        }
				private IEnumerator init ()
				{
						if (webCamTexture != null) {
								webCamTexture.Stop ();
								initDone = false;
				
								rgbaMat.Dispose ();
								grayMat.Dispose ();
						}

						// Checks how many and which cameras are available on the device
						for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {
				
				
								if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {
					
					
										Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

										webCamDevice = WebCamTexture.devices [cameraIndex];
					
										webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
					
					
										break;
								}
				
				
						}
			
						if (webCamTexture == null) {
								webCamDevice = WebCamTexture.devices [0];
								webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
						}
			
						Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
			
			
						// Starts the camera
						webCamTexture.Play ();

						while (true) {
								//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
								#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
				if (webCamTexture.width > 16 && webCamTexture.height > 16) {
								#else
								if (webCamTexture.didUpdateThisFrame) {
										#endif

										Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
										Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);


										colors = new Color32[webCamTexture.width * webCamTexture.height];
					
										rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
										grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
					
										texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);
					
										gameObject.transform.eulerAngles = new Vector3 (0, 0, 0);
										#if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
					gameObject.transform.eulerAngles = new Vector3 (0, 0, -90);
										#endif
//										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);


										gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

//										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
//										float scaleX = 1;
//										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
//										if (webCamTexture.videoRotationAngle == 270)
//												scaleY = -1.0f;
//										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);

					
					
										cascade = new CascadeClassifier (Utils.getFilePath ("lbpcascade_frontalface.xml"));
										//cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));

				
										faces = new MatOfRect ();

										gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

										#if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
						                Camera.main.orthographicSize = webCamTexture.width / 2;
										#else
										Camera.main.orthographicSize = webCamTexture.height / 2;
										#endif
					
										initDone = true;
					
										break;
								} else {
										yield return 0;
								}
						}
				}
	
				// Update is called once per frame
				void Update ()
				{
						if (!initDone)
								return;
	
						#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
				if (webCamTexture.width > 16 && webCamTexture.height > 16) {
						#else
						if (webCamTexture.didUpdateThisFrame) {
								#endif

								Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

								if (webCamTexture.videoVerticallyMirrored) {
										if (webCamDevice.isFrontFacing) {
												if (webCamTexture.videoRotationAngle == 0) {
														Core.flip (rgbaMat, rgbaMat, 1);
												} else if (webCamTexture.videoRotationAngle == 90) {
														Core.flip (rgbaMat, rgbaMat, 0);
												} else if (webCamTexture.videoRotationAngle == 270) {
														Core.flip (rgbaMat, rgbaMat, 1);
												}
										} else {
												if (webCamTexture.videoRotationAngle == 90) {
														
												} else if (webCamTexture.videoRotationAngle == 270) {
														Core.flip (rgbaMat, rgbaMat, -1);
												}
										}
								} else {
										if (webCamDevice.isFrontFacing) {
												if (webCamTexture.videoRotationAngle == 0) {
														Core.flip (rgbaMat, rgbaMat, 1);
												} else if (webCamTexture.videoRotationAngle == 90) {
														Core.flip (rgbaMat, rgbaMat, 0);
												} else if (webCamTexture.videoRotationAngle == 270) {
														Core.flip (rgbaMat, rgbaMat, 1);
												}
										} else {
												if (webCamTexture.videoRotationAngle == 90) {
														
												} else if (webCamTexture.videoRotationAngle == 270) {
														Core.flip (rgbaMat, rgbaMat, -1);
												}
										}
								}


								Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
								Imgproc.equalizeHist (grayMat, grayMat);

								
								if (cascade != null)
										cascade.detectMultiScale (grayMat, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
					                          new Size (webCamTexture.height * 0.2, webCamTexture.height * 0.2), new Size ());

		
								OpenCVForUnity.Rect[] rects = faces.toArray ();
								for (int i = 0; i < rects.Length; i++) {
										//				Debug.Log ("detect faces " + rects [i]);
			
										Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2);
								}

								Utils.matToTexture2D (rgbaMat, texture, colors);

						}
				}

				void OnDisable ()
				{
						webCamTexture.Stop ();
				}

				void OnGUI ()
				{
						float screenScale = Screen.width / 240.0f;
						Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
						GUI.matrix = scaledMatrix;
		
		
						GUILayout.BeginVertical ();
						if (GUILayout.Button ("back")) {
								Application.LoadLevel ("OpenCVForUnitySample");
						}
						if (GUILayout.Button ("change camera")) {
								isFrontFacing = !isFrontFacing;
								StartCoroutine (init ());
						}
		
						GUILayout.EndVertical ();
				}
		}
}