예제 #1
0
        private void Run()
        {
            //set 3d face object points.
            objectPoints = new MatOfPoint3f(new Point3(-31, 72, 86), //l eye
                                            new Point3(31, 72, 86),  //r eye
                                            new Point3(0, 40, 114),  //nose
                                            new Point3(-20, 15, 90), //l mouse
                                            new Point3(20, 15, 90)   //r mouse
//                                                                                                                                                            ,
//                                                                                                                                                            new Point3 (-70, 60, -9),//l ear
//                                                                                                                                                            new Point3 (70, 60, -9)//r ear
                                            );
            imagePoints = new MatOfPoint2f();
            rvec        = new Mat();
            tvec        = new Mat();
            rotM        = new Mat(3, 3, CvType.CV_64FC1);

            //initialize FaceTracker
            faceTracker = new FaceTracker(tracker_model_json_filepath);
            //initialize FaceTrackerParams
            faceTrackerParams = new FaceTrackerParams();

            cascade = new CascadeClassifier();
            cascade.load(haarcascade_frontalface_alt_xml_filepath);
//            if (cascade.empty())
//            {
//                Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
//            }



            webCamTextureToMatHelper.Initialize();
        }
        // Use this for initialization
        void Start()
        {
            //set 3d face object points.
            objectPoints = new MatOfPoint3f(new Point3(-31, 72, 86),            //l eye
                                            new Point3(31, 72, 86),             //r eye
                                            new Point3(0, 40, 114),             //nose
                                            new Point3(-20, 15, 90),            //l mouse
                                            new Point3(20, 15, 90)              //r mouse
//			                                                                                                                                                  ,
//			                                                                                                                                                  new Point3 (-70, 60, -9),//l ear
//			                                                                                                                                                  new Point3 (70, 60, -9)//r ear
                                            );
            imagePoints = new MatOfPoint2f();
            rvec        = new Mat();
            tvec        = new Mat();
            rotM        = new Mat(3, 3, CvType.CV_64FC1);

            //initialize FaceTracker
            faceTracker = new FaceTracker(Utils.getFilePath("tracker_model.json"));
            //initialize FaceTrackerParams
            faceTrackerParams = new FaceTrackerParams();

            webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper> ();
            webCamTextureToMatHelper.Init();

            autoResetModeToggle.isOn = autoResetMode;
        }
예제 #3
0
        private void Run()
        {
            //set 3d face object points.
            objectPoints = new MatOfPoint3f(new Point3(-31, 72, 86), //l eye
                                            new Point3(31, 72, 86),  //r eye
                                            new Point3(0, 40, 114),  //nose
                                            new Point3(-20, 15, 90), //l mouse
                                            new Point3(20, 15, 90),  //r mouse
                                            new Point3(-70, 60, -9), //l ear
                                            new Point3(70, 60, -9)   //r ear
                                            );
            imagePoints = new MatOfPoint2f();
            rvec        = new Mat();
            tvec        = new Mat();
            rotM        = new Mat(3, 3, CvType.CV_64FC1);

            //initialize FaceTracker
            faceTracker = new FaceTracker(tracker_model_json_filepath);
            //initialize FaceTrackerParams
            faceTrackerParams = new FaceTrackerParams();

            cascade = new CascadeClassifier();
            cascade.load(haarcascade_frontalface_alt_xml_filepath);
            //if (cascade.empty())
            //{
            //  Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            //}


#if UNITY_ANDROID && !UNITY_EDITOR
            // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
            webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
#endif
            webCamTextureToMatHelper.Initialize();
        }
예제 #4
0
        // Use this for initialization
        void Start()
        {
            //initialize FaceTracker
            faceTracker = new FaceTracker(Utils.getFilePath("tracker_model.json"));
            //initialize FaceTrackerParams
            faceTrackerParams = new FaceTrackerParams();

            StartCoroutine(init());
        }
예제 #5
0
        // 初期化
        void Start()
        {
            // 顔の追跡の初期化
            faceTracker = new FaceTracker(Utils.getFilePath("tracker_model.json"));
            // 顔の追跡パラメータの初期化
            faceTrackerParams = new FaceTrackerParams();

            StartCoroutine(init());
        }
				// Use this for initialization
				void Start ()
				{

						//initialize FaceTracker
						FaceTracker faceTracker = new FaceTracker (Utils.getFilePath ("tracker_model.json"));
						//initialize FaceTrackerParams
						FaceTrackerParams faceTrackerParams = new FaceTrackerParams ();


						gameObject.transform.localScale = new Vector3 (imgTexture.width, imgTexture.height, 1);
						Camera.main.orthographicSize = imgTexture.height / 2;
		
						Mat imgMat = new Mat (imgTexture.height, imgTexture.width, CvType.CV_8UC4);
		
						Utils.texture2DToMat (imgTexture, imgMat);
						Debug.Log ("imgMat dst ToString " + imgMat.ToString ());


						CascadeClassifier cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
						if (cascade.empty ()) {
								Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
						}

						//convert image to greyscale
						Mat gray = new Mat ();
						Imgproc.cvtColor (imgMat, gray, Imgproc.COLOR_RGBA2GRAY);

		
						MatOfRect faces = new MatOfRect ();
		
						Imgproc.equalizeHist (gray, gray);
		
						cascade.detectMultiScale (gray, faces, 1.1f, 2, 0
//								                           | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
								| Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (gray.cols () * 0.05, gray.cols () * 0.05), new Size ());
		
						Debug.Log ("faces " + faces.dump ());
		
						if (faces.rows () > 0) {
								//add initial face points from MatOfRect
								faceTracker.addPoints (faces);
						}


						//track face points.if face points <= 0, always return false.
						if (faceTracker.track (imgMat, faceTrackerParams))
								faceTracker.draw (imgMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255));


		
						Texture2D texture = new Texture2D (imgMat.cols (), imgMat.rows (), TextureFormat.RGBA32, false);
		
		
						Utils.matToTexture2D (imgMat, texture);
		
						gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
				}
				// Use this for initialization
				void Start ()
				{
						//initialize FaceTracker
						faceTracker = new FaceTracker (Utils.getFilePath ("tracker_model.json"));
						//initialize FaceTrackerParams
						faceTrackerParams = new FaceTrackerParams ();

						webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper> ();
						webCamTextureToMatHelper.Init (OnWebCamTextureToMatHelperInited, OnWebCamTextureToMatHelperDisposed);
				}
        // Use this for initialization
        void Start()
        {
            //initialize FaceTracker
            faceTracker = new FaceTracker(Utils.getFilePath("tracker_model.json"));
            //initialize FaceTrackerParams
            faceTrackerParams = new FaceTrackerParams();

            webCamTextureToMatHelper = gameObject.GetComponent <WebCamTextureToMatHelper> ();
            webCamTextureToMatHelper.Init();
        }
예제 #9
0
        private void Run()
        {
            //initialize FaceTracker
            faceTracker = new FaceTracker(tracker_model_json_filepath);
            //initialize FaceTrackerParams
            faceTrackerParams = new FaceTrackerParams();

            cascade = new CascadeClassifier();
            cascade.load(haarcascade_frontalface_alt_xml_filepath);
            //if (cascade.empty())
            //{
            //    Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            //}

            webCamTextureToMatHelper.Initialize();
        }
    public void Run()
    {
        //anim = GetComponent<Animator>();

        //initialize FaceTracker
        faceTracker = new FaceTracker(tracker_model_json_filepath);
        //initialize FaceTrackerParams
        faceTrackerParams = new FaceTrackerParams();

        cascade = new CascadeClassifier();
        cascade.load(haarcascade_frontalface_alt_xml_filepath);
//            if (cascade.empty())
//            {
//                Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
//            }

        #if UNITY_ANDROID && !UNITY_EDITOR
        // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
        webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
        #endif
        webCamTextureToMatHelper.Initialize();
        if (SpeechRecognizer.ExistsOnDevice())
        {
            resultText.text = "I am running run";
            SpeechRecognizerListener listener = GameObject.FindObjectOfType <SpeechRecognizerListener>();
            listener.onAuthorizationStatusFetched.AddListener(OnAuthorizationStatusFetched);
            listener.onAvailabilityChanged.AddListener(OnAvailabilityChange);
            listener.onErrorDuringRecording.AddListener(OnError);
            listener.onErrorOnStartRecording.AddListener(OnError);
            listener.onFinalResults.AddListener(OnFinalResult);
            listener.onPartialResults.AddListener(OnPartialResult);
            listener.onEndOfSpeech.AddListener(OnEndOfSpeech);
            //startRecordingButton.enabled = false;
            SpeechRecognizer.RequestAccess();
            SpeechRecognizer.StartRecording(true);
            resultText.text = "Say something :-)";
        }
        else
        {
            resultText.text = "Sorry, but this device doesn't support speech recognition";
            Debug.Log("Next Command is crossfade from run function");
            //GameObject.FindGameObjectWithTag("twohand)").GetComponent<Animator>().CrossFade("V", -1);
            //startRecordingButton.enabled = false;
        }
    }
        private void Run()
        {
            //initialize FaceTracker
            faceTracker = new FaceTracker(tracker_model_json_filepath);
            //initialize FaceTrackerParams
            faceTrackerParams = new FaceTrackerParams();

            cascade = new CascadeClassifier();
            cascade.load(haarcascade_frontalface_alt_xml_filepath);
//            if (cascade.empty())
//            {
//                Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
//            }

            #if UNITY_ANDROID && !UNITY_EDITOR
            // Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
            webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
            #endif
            webCamTextureToMatHelper.Initialize();
        }
예제 #12
0
    public bool track(Mat im, FaceTrackerParams p)
    {
        if (points.Count <= 0)
        {
            return(false);
        }


        //convert image to greyscale
        Mat gray = null;

        if (im.channels() == 1)
        {
            gray = im;
        }
        else
        {
            gray = new Mat();
            Imgproc.cvtColor(im, gray, Imgproc.COLOR_RGBA2GRAY);
        }

        //initialise
//				if (!tracking)
//						points = detector.detect (gray, p.scaleFactor, p.minNeighbours, p.minSize);

        for (int i = 0; i < points.Count; i++)
        {
            if (points [i].Length != smodel.npts())
            {
                return(false);
            }

            //fit
            for (int level = 0; level < p.ssize.Count; level++)
            {
                points [i] = fit(gray, points [i], p.ssize [level], p.robust, p.itol, p.ftol);
            }
        }

        return(true);
    }
예제 #13
0
        private void Run()
        {
            gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
            Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);

            float width  = 0;
            float height = 0;

            width  = gameObject.transform.localScale.x;
            height = gameObject.transform.localScale.y;

            float widthScale  = (float)Screen.width / width;
            float heightScale = (float)Screen.height / height;

            if (widthScale < heightScale)
            {
                Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
            }
            else
            {
                Camera.main.orthographicSize = height / 2;
            }


            //initialize FaceTracker
            FaceTracker faceTracker = new FaceTracker(tracker_model_json_filepath);
            //initialize FaceTrackerParams
            FaceTrackerParams faceTrackerParams = new FaceTrackerParams();


            Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);

            Utils.texture2DToMat(imgTexture, imgMat);
            Debug.Log("imgMat dst ToString " + imgMat.ToString());


            CascadeClassifier cascade = new CascadeClassifier();

            cascade.load(haarcascade_frontalface_alt_xml_filepath);
            //if (cascade.empty())
            //{
            //    Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            //}

            //convert image to greyscale
            Mat gray = new Mat();

            Imgproc.cvtColor(imgMat, gray, Imgproc.COLOR_RGBA2GRAY);


            MatOfRect faces = new MatOfRect();

            Imgproc.equalizeHist(gray, gray);

            cascade.detectMultiScale(gray, faces, 1.1f, 2, 0
                                     // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                     | Objdetect.CASCADE_SCALE_IMAGE, new Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size());

            Debug.Log("faces " + faces.dump());

            if (faces.rows() > 0)
            {
                //add initial face points from MatOfRect
                faceTracker.addPoints(faces);
            }


            //track face points.if face points <= 0, always return false.
            if (faceTracker.track(imgMat, faceTrackerParams))
            {
                faceTracker.draw(imgMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
            }



            Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);

            Utils.matToTexture2D(imgMat, texture);

            gameObject.GetComponent <Renderer>().material.mainTexture = texture;

            cascade.Dispose();
        }
        // Use this for initialization
        void Start()
        {
            //initialize FaceTracker
                        faceTracker = new FaceTracker (Utils.getFilePath ("tracker_model.json"));
                        //initialize FaceTrackerParams
                        faceTrackerParams = new FaceTrackerParams ();

                        StartCoroutine (init ());
        }
예제 #15
0
		public bool track (Mat im, FaceTrackerParams p)
		{
				if (points.Count <= 0)
						return false;


				//convert image to greyscale
				Mat gray = null;
				if (im.channels () == 1) {
						gray = im;
				} else {
						gray = new Mat ();
						Imgproc.cvtColor (im, gray, Imgproc.COLOR_RGBA2GRAY);
				}

				//initialise
//				if (!tracking)
//						points = detector.detect (gray, p.scaleFactor, p.minNeighbours, p.minSize);

				for (int i = 0; i < points.Count; i++) {
						if (points [i].Length != smodel.npts ())
								return false;
			
						//fit
						for (int level = 0; level < p.ssize.Count; level++) {
								points [i] = fit (gray, points [i], p.ssize [level], p.robust, p.itol, p.ftol);
						}
				}

				return true;
		}
예제 #16
0
        // Use this for initialization
        void Start()
        {
            //initialize FaceTracker
            FaceTracker faceTracker = new FaceTracker(Utils.getFilePath("tracker_model.json"));
            //initialize FaceTrackerParams
            FaceTrackerParams faceTrackerParams = new FaceTrackerParams();


            gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
            Camera.main.orthographicSize    = imgTexture.height / 2;

            Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);

            Utils.texture2DToMat(imgTexture, imgMat);
            Debug.Log("imgMat dst ToString " + imgMat.ToString());


            CascadeClassifier cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml"));

            if (cascade.empty())
            {
                Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
            }

            //convert image to greyscale
            Mat gray = new Mat();

            Imgproc.cvtColor(imgMat, gray, Imgproc.COLOR_RGBA2GRAY);


            MatOfRect faces = new MatOfRect();

            Imgproc.equalizeHist(gray, gray);

            cascade.detectMultiScale(gray, faces, 1.1f, 2, 0
//								                           | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                     | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size());

            Debug.Log("faces " + faces.dump());

            if (faces.rows() > 0)
            {
                //add initial face points from MatOfRect
                faceTracker.addPoints(faces);
            }


            //track face points.if face points <= 0, always return false.
            if (faceTracker.track(imgMat, faceTrackerParams))
            {
                faceTracker.draw(imgMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
            }



            Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);


            Utils.matToTexture2D(imgMat, texture);

            gameObject.GetComponent <Renderer> ().material.mainTexture = texture;
        }