コード例 #1
0
// fields

// properties
    static void WebCamDevice_name(JSVCall vc)
    {
        UnityEngine.WebCamDevice _this = (UnityEngine.WebCamDevice)vc.csObj;
        var result = _this.name;

        JSApi.setStringS((int)JSApi.SetType.Rval, result);
    }
コード例 #2
0
	static int _CreateWebCamDevice(IntPtr L)
	{
		LuaScriptMgr.CheckArgsCount(L, 0);
		WebCamDevice obj = new WebCamDevice();
		LuaScriptMgr.PushValue(L, obj);
		return 1;
	}
コード例 #3
0
    static void WebCamDevice_isFrontFacing(JSVCall vc)
    {
        UnityEngine.WebCamDevice _this = (UnityEngine.WebCamDevice)vc.csObj;
        var result = _this.isFrontFacing;

        JSApi.setBooleanS((int)JSApi.SetType.Rval, (System.Boolean)(result));
    }
コード例 #4
0
 static public int constructor(IntPtr l)
 {
     try {
                     #if DEBUG
         var    method     = System.Reflection.MethodBase.GetCurrentMethod();
         string methodName = GetMethodName(method);
                     #if UNITY_5_5_OR_NEWER
         UnityEngine.Profiling.Profiler.BeginSample(methodName);
                     #else
         Profiler.BeginSample(methodName);
                     #endif
                     #endif
         UnityEngine.WebCamDevice o;
         o = new UnityEngine.WebCamDevice();
         pushValue(l, true);
         pushValue(l, o);
         return(2);
     }
     catch (Exception e) {
         return(error(l, e));
     }
             #if DEBUG
     finally {
                     #if UNITY_5_5_OR_NEWER
         UnityEngine.Profiling.Profiler.EndSample();
                     #else
         Profiler.EndSample();
                     #endif
     }
             #endif
 }
コード例 #5
0
    // Starts the default camera and assigns the texture to the current renderer
    void Start()
    {
        webCamDevice = new WebCamDevice();

        webcamTexture = new WebCamTexture(webCamDevice.name,800,600,1);
        renderer.material.mainTexture = webcamTexture;
        webcamTexture.Play();
    }
コード例 #6
0
 public static int constructor(IntPtr l)
 {
     try {
         UnityEngine.WebCamDevice o;
         o=new UnityEngine.WebCamDevice();
         pushValue(l,o);
         return 1;
     }
     catch(Exception e) {
         LuaDLL.luaL_error(l, e.ToString());
         return 0;
     }
 }
コード例 #7
0
 static public int constructor(IntPtr l)
 {
     try {
         UnityEngine.WebCamDevice o;
         o = new UnityEngine.WebCamDevice();
         pushValue(l, true);
         pushValue(l, o);
         return(2);
     }
     catch (Exception e) {
         return(error(l, e));
     }
 }
コード例 #8
0
 static public int constructor(IntPtr l)
 {
     try {
         UnityEngine.WebCamDevice o;
         o = new UnityEngine.WebCamDevice();
         pushValue(l, o);
         return(1);
     }
     catch (Exception e) {
         LuaDLL.luaL_error(l, e.ToString());
         return(0);
     }
 }
コード例 #9
0
 public static int constructor(IntPtr l)
 {
     try {
         UnityEngine.WebCamDevice o;
         o=new UnityEngine.WebCamDevice();
         pushValue(l,true);
         pushValue(l,o);
         return 2;
     }
     catch(Exception e) {
         return error(l,e);
     }
 }
コード例 #10
0
ファイル: WebcamHelper.cs プロジェクト: RELO4D3D/sdk-dist
    protected void Start()
    {
        WebCamDevice[] devices = WebCamTexture.devices;

        if (devices.Length == 0)
        {
            return;
        }

        m_Device = devices[0];

        m_Texture = new WebCamTexture(m_Device.name, m_DesiredWidth, m_DesiredHeight, m_DesiredFrameRate);
        this.gameObject.renderer.material.SetTexture("_MainTex", m_Texture);

        m_Texture.Play();
    }
				private IEnumerator init ()
				{

						if (webCamTexture != null) {
								faceTracker.reset ();
								webCamTexture.Stop ();
								initDone = false;
				
								rgbaMat.Dispose ();
								grayMat.Dispose ();
						}

						// Checks how many and which cameras are available on the device
						for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {
				
				
								if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing) {
					
					
										Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

										webCamDevice = WebCamTexture.devices [cameraIndex];
									
										webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
		
										break;
								}
				
				
						}
			
						if (webCamTexture == null) {
								webCamDevice = WebCamTexture.devices [0];
								webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

						}
			
						Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
			

			
						// Starts the camera
						webCamTexture.Play ();


						while (true) {
								//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
								#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
				                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
								#else
								if (webCamTexture.didUpdateThisFrame) {
										#if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2                                    
						while (webCamTexture.width <= 16) {
							webCamTexture.GetPixels32 ();
							yield return new WaitForEndOfFrame ();
						} 
										#endif
										#endif
										Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
										Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);
				
										colors = new Color32[webCamTexture.width * webCamTexture.height];
				
										rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
										grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
				
										texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

										gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

										updateLayout ();

										cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
										if (cascade.empty ()) {
												Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
										}

										screenOrientation = Screen.orientation;
										initDone = true;
				
										break;
								} else {
										yield return 0;
								}
						}
				}
コード例 #12
0
        private IEnumerator init()
        {
            if (webCamTexture != null) {
                                webCamTexture.Stop ();
                                initDone = false;

                                rgbaMat.Dispose ();

                                matOpFlowThis.Dispose ();
                                matOpFlowPrev.Dispose ();
                                MOPcorners.Dispose ();
                                mMOP2fptsThis.Dispose ();
                                mMOP2fptsPrev.Dispose ();
                                mMOP2fptsSafe.Dispose ();
                                mMOBStatus.Dispose ();
                                mMOFerr.Dispose ();
                        }

                        // Checks how many and which cameras are available on the device
                        for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {

                                if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing) {

                                        Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                                        webCamDevice = WebCamTexture.devices [cameraIndex];

                                        webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                                        break;
                                }

                        }

                        if (webCamTexture == null) {
                                webCamDevice = WebCamTexture.devices [0];
                                webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
                        }

                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

                        // Starts the camera
                        webCamTexture.Play ();
                        while (true) {
                                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                                #else
                                if (webCamTexture.didUpdateThisFrame) {
                                        #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2
                                        while (webCamTexture.width <= 16) {
                                                webCamTexture.GetPixels32 ();
                                                yield return new WaitForEndOfFrame ();
                                        }
                                        #endif
                                #endif

                                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                                        Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                                        colors = new Color32[webCamTexture.width * webCamTexture.height];

                                        rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);

                                        matOpFlowThis = new Mat ();
                                        matOpFlowPrev = new Mat ();
                                        MOPcorners = new MatOfPoint ();
                                        mMOP2fptsThis = new MatOfPoint2f ();
                                        mMOP2fptsPrev = new MatOfPoint2f ();
                                        mMOP2fptsSafe = new MatOfPoint2f ();
                                        mMOBStatus = new MatOfByte ();
                                        mMOFerr = new MatOfFloat ();

                                        texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                                        updateLayout ();

                                        screenOrientation = Screen.orientation;
                                        initDone = true;

                                        break;
                                } else {
                                        yield return 0;
                                }
                        }
                }
コード例 #13
0
        private IEnumerator init()
        {
            if (webCamTexture != null) {
                                webCamTexture.Stop ();
                                initDone = false;

                                rgbaMat.Dispose ();

                                matOpFlowThis.Dispose ();
                                matOpFlowPrev.Dispose ();
                                MOPcorners.Dispose ();
                                mMOP2fptsThis.Dispose ();
                                mMOP2fptsPrev.Dispose ();
                                mMOP2fptsSafe.Dispose ();
                                mMOBStatus.Dispose ();
                                mMOFerr.Dispose ();
                        }

                        // Checks how many and which cameras are available on the device
                        for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {

                                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {

                                        Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                                        webCamDevice = WebCamTexture.devices [cameraIndex];

                                        webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                                        break;
                                }

                        }

                        if (webCamTexture == null) {
                                webCamDevice = WebCamTexture.devices [0];
                                webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
                        }

                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

                        // Starts the camera
                        webCamTexture.Play ();
                        while (true) {
                                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                                #else
                                if (webCamTexture.didUpdateThisFrame) {
                                        #endif

                                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                                        Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                                        colors = new Color32[webCamTexture.width * webCamTexture.height];

                                        rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);

                                        matOpFlowThis = new Mat ();
                                        matOpFlowPrev = new Mat ();
                                        MOPcorners = new MatOfPoint ();
                                        mMOP2fptsThis = new MatOfPoint2f ();
                                        mMOP2fptsPrev = new MatOfPoint2f ();
                                        mMOP2fptsSafe = new MatOfPoint2f ();
                                        mMOBStatus = new MatOfByte ();
                                        mMOFerr = new MatOfFloat ();

                                        texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                                        gameObject.transform.eulerAngles = new Vector3 (0, 0, 0);
                                        #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
                    gameObject.transform.eulerAngles = new Vector3 (0, 0, -90);
                                        #endif
            //										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);

                                        gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

            //										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
            //										float scaleX = 1;
            //										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
            //										if (webCamTexture.videoRotationAngle == 270)
            //												scaleY = -1.0f;
            //										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);

                                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                                        #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
                                        Camera.main.orthographicSize = webCamTexture.width / 2;
                                        #else
                                        Camera.main.orthographicSize = webCamTexture.height / 2;
                                        #endif

                                        initDone = true;

                                        break;
                                } else {
                                        yield return 0;
                                }
                        }
                }

                // Update is called once per frame
                void Update ()
                {
                        if (!initDone)
                                return;

                        #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                        #else
                        if (webCamTexture.didUpdateThisFrame) {
                                #endif

                                Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

                                if (webCamTexture.videoVerticallyMirrored) {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 90) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 90) {

                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                } else {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 90) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 90) {

                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                }

                                if (mMOP2fptsPrev.rows () == 0) {

                                        // first time through the loop so we need prev and this mats
                                        // plus prev points
                                        // get this mat
                                        Imgproc.cvtColor (rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                                        // copy that to prev mat
                                        matOpFlowThis.copyTo (matOpFlowPrev);

                                        // get prev corners
                                        Imgproc.goodFeaturesToTrack (matOpFlowPrev, MOPcorners, iGFFTMax, 0.05, 20);
                                        mMOP2fptsPrev.fromArray (MOPcorners.toArray ());

                                        // get safe copy of this corners
                                        mMOP2fptsPrev.copyTo (mMOP2fptsSafe);
                                } else {
                                        // we've been through before so
                                        // this mat is valid. Copy it to prev mat
                                        matOpFlowThis.copyTo (matOpFlowPrev);

                                        // get this mat
                                        Imgproc.cvtColor (rgbaMat, matOpFlowThis, Imgproc.COLOR_RGBA2GRAY);

                                        // get the corners for this mat
                                        Imgproc.goodFeaturesToTrack (matOpFlowThis, MOPcorners, iGFFTMax, 0.05, 20);
                                        mMOP2fptsThis.fromArray (MOPcorners.toArray ());

                                        // retrieve the corners from the prev mat
                                        // (saves calculating them again)
                                        mMOP2fptsSafe.copyTo (mMOP2fptsPrev);

                                        // and save this corners for next time through

                                        mMOP2fptsThis.copyTo (mMOP2fptsSafe);
                                }

                                /*
            Parameters:
            prevImg first 8-bit input image
            nextImg second input image
            prevPts vector of 2D points for which the flow needs to be found; point coordinates must be single-precision floating-point numbers.
            nextPts output vector of 2D points (with single-precision floating-point coordinates) containing the calculated new positions of input features in the second image; when OPTFLOW_USE_INITIAL_FLOW flag is passed, the vector must have the same size as in the input.
            status output status vector (of unsigned chars); each element of the vector is set to 1 if the flow for the corresponding features has been found, otherwise, it is set to 0.
            err output vector of errors; each element of the vector is set to an error for the corresponding feature, type of the error measure can be set in flags parameter; if the flow wasn't found then the error is not defined (use the status parameter to find such cases).
            */
                                Video.calcOpticalFlowPyrLK (matOpFlowPrev, matOpFlowThis, mMOP2fptsPrev, mMOP2fptsThis, mMOBStatus, mMOFerr);

                                if (!mMOBStatus.empty ()) {
                                        List<Point> cornersPrev = mMOP2fptsPrev.toList ();
                                        List<Point> cornersThis = mMOP2fptsThis.toList ();
                                        List<byte> byteStatus = mMOBStatus.toList ();

                                        int x = 0;
                                        int y = byteStatus.Count - 1;

                                        for (x = 0; x < y; x++) {
                                                if (byteStatus [x] == 1) {
                                                        Point pt = cornersThis [x];
                                                        Point pt2 = cornersPrev [x];

                                                        Core.circle (rgbaMat, pt, 5, colorRed, iLineThickness - 1);

                                                        Core.line (rgbaMat, pt, pt2, colorRed, iLineThickness);
                                                }
                                        }
                                }

                                Utils.matToTexture2D (rgbaMat, texture, colors);

                                gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                        }

                }

                void OnDisable ()
                {
                        webCamTexture.Stop ();
                }

                void OnGUI ()
                {
                        float screenScale = Screen.width / 240.0f;
                        Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
                        GUI.matrix = scaledMatrix;

                        GUILayout.BeginVertical ();
                        if (GUILayout.Button ("back")) {
                                Application.LoadLevel ("OpenCVForUnitySample");
                        }
                        if (GUILayout.Button ("change camera")) {
                                isFrontFacing = !isFrontFacing;
                                StartCoroutine (init ());
                        }

                        GUILayout.EndVertical ();
                }
            }
        }
コード例 #14
0
        protected virtual void Start()
        {
            GlobalState.Instance.SceneToSwitchTo = Config.Scenes.None;

            // Keep a static reference to this class to be able to display toast messages from other components (namely QrCodeCollection.cs).
            CameraScriptInstance = this;

            _coin = GetComponent<AudioSource>();

            _qrCodeCollection = new QrCodeCollection();

            // Reset the global state current question and coin every time we restart the camera scene.
            GlobalState.Instance.Reset();

            Application.RequestUserAuthorization(UserAuthorization.WebCam);
            if (Application.HasUserAuthorization(UserAuthorization.WebCam))
            {
                Debug.Log("access to webcam granted!");
                Debug.Log("#WebCamDevices: " + WebCamTexture.devices.GetLength(0));

                if (GlobalState.Instance.WebCamTexture == null)
                {
                    // Find a backfacing camera device.
                    foreach (WebCamDevice device in WebCamTexture.devices)
                    {
                        Debug.Log("WebCamDevice: " + device.name);
                        Debug.Log("FrontFacing? " + device.isFrontFacing);
                        if (!device.isFrontFacing)
                        {
                            _backFacing = device;
                        }
                    }

                    // Try to obtain a 1024x768 texture from the webcam.
                    _webcamTexture = new WebCamTexture(_backFacing.name, 1024, 768);
                    _webcamTexture.Play();

                    // The device might not support the requested resolution, so we try again with a lower one.
                    if (_webcamTexture.width != 1024 || _webcamTexture.height != 768)
                    {
                        _webcamTexture.Stop();
                        _webcamTexture = new WebCamTexture(_backFacing.name, 640, 480);
                        _webcamTexture.Play();
                    }

                    // Keep a global reference to the WebCamTexture to speed up scene initialization next time.
                    GlobalState.Instance.WebCamTexture = _webcamTexture;
                }
                else
                {
                    _webcamTexture = GlobalState.Instance.WebCamTexture;
                    _webcamTexture.Play();
                }

                GetComponent<Renderer>().material.SetTexture("_MainTex", _webcamTexture);
                Debug.Log(string.Format("Actual camera dimens: {0}x{1}", _webcamTexture.width, _webcamTexture.height));

                GlobalState.Instance.CamWidth = _webcamTexture.width;
                GlobalState.Instance.CamHeight = _webcamTexture.height;
                float camRatio = (float) _webcamTexture.width/_webcamTexture.height;
                float screenRatio = (float) Screen.width/Screen.height;

                // Scale plane so it fills the screen while keeping the camera's aspect ratio.
                // If the camera's aspect ratio differs from the screen's,
                // one side will match exactly and the other side will be larger than the screen's dimension.
                var idealHeight = 0.7f;
                if (screenRatio > camRatio)
                {
                    gameObject.transform.localScale = new Vector3(screenRatio*idealHeight, 1,
                        screenRatio*idealHeight/camRatio);
                }
                else
                {
                    gameObject.transform.localScale = new Vector3(camRatio*idealHeight, 1, idealHeight);
                }

                GlobalState.Instance.PlaneWidth = gameObject.transform.localScale.x*10;
                GlobalState.Instance.PlaneHeight = gameObject.transform.localScale.z*10;

                _qrCodeThread = new Thread(DecodeQr);
                _qrCodeThread.Start();
            }
            else
            {
                Debug.LogError("No User Authorization for Camera Device.");
            }

            // Check win condition: if the user has already collected all the coins, show a toast.
            if (GlobalState.Instance.AllQuestions.questions.Length == GlobalState.Instance.CollectedCoinCount())
            {
                SetToastToShow(StringResources.WinToastMessage, ToastLengthLong);
            }
        }
コード例 #15
0
				private IEnumerator init ()
				{

						if (webCamTexture != null) {
								webCamTexture.Stop ();
								initDone = false;

								rgbaMat.Dispose ();
								grayMat.Dispose ();
						}

						// Checks how many and which cameras are available on the device
						for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {


								if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {


										Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

										webCamDevice = WebCamTexture.devices [cameraIndex];

										webCamTexture = new WebCamTexture (webCamDevice.name, width, height);


										break;
								}


						}

						if (webCamTexture == null) {
								webCamDevice = WebCamTexture.devices [0];
								webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
						}

						Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);


						// Starts the camera
						webCamTexture.Play ();

						while (true) {
								//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
				if (webCamTexture.width > 16 && webCamTexture.height > 16) {
#else
								if (webCamTexture.didUpdateThisFrame) {
#endif

										Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
										Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);


										colors = new Color32[webCamTexture.width * webCamTexture.height];

										rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
										grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

										texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

										gameObject.transform.eulerAngles = new Vector3 (0, 0, 0);
#if (UNITY_ANDROID || UNITY_IPHONE || UNITY_WP_8_1) && !UNITY_EDITOR
					gameObject.transform.eulerAngles = new Vector3 (0, 0, -90);
#endif
										//										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);


										gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

										//										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
										//										float scaleX = 1;
										//										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
										//										if (webCamTexture.videoRotationAngle == 270)
										//												scaleY = -1.0f;
										//										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);



										cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
										regionCascade = new CascadeClassifier (Utils.getFilePath ("lbpcascade_frontalface.xml"));

										gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

#if (UNITY_ANDROID || UNITY_IPHONE || UNITY_WP_8_1) && !UNITY_EDITOR
						                Camera.main.orthographicSize = webCamTexture.width / 2;
#else
										Camera.main.orthographicSize = webCamTexture.height / 2;
#endif


										initThread ();

										initDone = true;

										break;
								} else {
										yield return 0;
								}
						}
				}

				// Update is called once per frame
				void Update ()
				{
						if (!initDone)
								return;




#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
				if (webCamTexture.width > 16 && webCamTexture.height > 16) {
#else
						if (webCamTexture.didUpdateThisFrame) {
#endif

								Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

								if (webCamTexture.videoVerticallyMirrored) {
										if (webCamDevice.isFrontFacing) {
												if (webCamTexture.videoRotationAngle == 0) {
														Core.flip (rgbaMat, rgbaMat, 1);
												} else if (webCamTexture.videoRotationAngle == 90) {
														Core.flip (rgbaMat, rgbaMat, 0);
												} else if (webCamTexture.videoRotationAngle == 270) {
														Core.flip (rgbaMat, rgbaMat, 1);
												}
										} else {
												if (webCamTexture.videoRotationAngle == 90) {

												} else if (webCamTexture.videoRotationAngle == 270) {
														Core.flip (rgbaMat, rgbaMat, -1);
												}
										}
								} else {
										if (webCamDevice.isFrontFacing) {
												if (webCamTexture.videoRotationAngle == 0) {
														Core.flip (rgbaMat, rgbaMat, 1);
												} else if (webCamTexture.videoRotationAngle == 90) {
														Core.flip (rgbaMat, rgbaMat, 0);
												} else if (webCamTexture.videoRotationAngle == 270) {
														Core.flip (rgbaMat, rgbaMat, 1);
												}
										} else {
												if (webCamTexture.videoRotationAngle == 90) {

												} else if (webCamTexture.videoRotationAngle == 270) {
														Core.flip (rgbaMat, rgbaMat, -1);
												}
										}
								}


								Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
								Imgproc.equalizeHist (grayMat, grayMat);





								didUpdateThisFrame = true;

								#if UNITY_WSA
								#if NETFX_CORE
					ThreadWorker();
								#endif
								#else
								ThreadWorker ();
								#endif

								OpenCVForUnity.Rect[] rects;

								if (resultDetect != null) {
										//Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect");
										rectsWhereRegions = resultDetect.toArray ();


										rects = resultDetect.toArray ();
										for (int i = 0; i < rects.Length; i++) {
												Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (0, 0, 255, 255), 2);
										}



										resultDetect = null;
								} else {
										//Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions from previous positions");
										rectsWhereRegions = new Rect[trackedObjects.Count];

										for (int i = 0; i < trackedObjects.Count; i++) {
												int n = trackedObjects [i].lastPositions.Count;
												//if (n > 0) UnityEngine.Debug.LogError("n > 0 is false");

												Rect r = trackedObjects [i].lastPositions [n - 1];
												if (r.area () == 0) {
														Debug.Log ("DetectionBasedTracker::process: ERROR: ATTENTION: strange algorithm's behavior: trackedObjects[i].rect() is empty");
														continue;
												}

												//correction by speed of rectangle
												if (n > 1) {
														Point center = centerRect (r);
														Point center_prev = centerRect (trackedObjects [i].lastPositions [n - 2]);
														Point shift = new Point ((center.x - center_prev.x) * innerParameters.coeffObjectSpeedUsingInPrediction,
                        (center.y - center_prev.y) * innerParameters.coeffObjectSpeedUsingInPrediction);

														r.x += (int)Math.Round (shift.x);
														r.y += (int)Math.Round (shift.y);
												}
												rectsWhereRegions [i] = r;
										}

										rects = rectsWhereRegions;
										for (int i = 0; i < rects.Length; i++) {
												Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (0, 255, 0, 255), 2);
										}
								}

								if (rectsWhereRegions.Length > 0) {
										detectedObjectsInRegions.Clear ();

										int len = rectsWhereRegions.Length;
										for (int i = 0; i < len; i++) {
												detectInRegion (grayMat, rectsWhereRegions [i], detectedObjectsInRegions);
										}
								}

								updateTrackedObjects (detectedObjectsInRegions);

								getObjects (resultObjects);


								rects = resultObjects.ToArray ();
								for (int i = 0; i < rects.Length; i++) {
										//Debug.Log ("detect faces " + rects [i]);

										Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2);
								}

								Utils.matToTexture2D (rgbaMat, texture, colors);

						}

				}

				private void initThread ()
				{
						grayMat4Thread = new Mat ();

				}


#if UNITY_WSA
#if NETFX_CORE
                private async void ThreadWorker()
                {
                    //Debug.Log("Thread working...");

                    if (isThreadRunning) return;
                    if (!didUpdateThisFrame) return;
                    isThreadRunning = true;
                    didUpdateThisFrame = false;

                    grayMat.copyTo(grayMat4Thread);

                    MatOfRect faces = new MatOfRect();

                    await Task.Run(() =>
                    {

                        if (cascade != null)
							cascade.detectMultiScale(grayMat, faces, 1.1, 2, Objdetect.CASCADE_SCALE_IMAGE, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
                                  new Size(grayMat4Thread.height() * 0.2, grayMat4Thread.height() * 0.2), new Size());
                    });

                    //Debug.Log("Thread done.");

                    isThreadRunning = false;
                    resultDetect = faces;
                }
コード例 #16
0
				/// <summary>
				/// Init this instance.
				/// </summary>
				private IEnumerator init ()
				{
						if (initDone)
								Dispose ();

						if (!String.IsNullOrEmpty (requestDeviceName)) {
//			Debug.Log ("deviceName is "+requestDeviceName);
								webCamTexture = new WebCamTexture (requestDeviceName, requestWidth, requestHeight);
						} else {
//			Debug.Log ("deviceName is null");
								// Checks how many and which cameras are available on the device
								for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {
			
										if (WebCamTexture.devices [cameraIndex].isFrontFacing == requestIsFrontFacing) {
				
//					Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);
				
												webCamDevice = WebCamTexture.devices [cameraIndex];
				
												webCamTexture = new WebCamTexture (webCamDevice.name, requestWidth, requestHeight);
				
												break;
										}
			
			
								}
						}
		
						if (webCamTexture == null) {
//			Debug.Log ("webCamTexture is null");

								webCamDevice = WebCamTexture.devices [0];
								webCamTexture = new WebCamTexture (webCamDevice.name, requestWidth, requestHeight);
						}
	
//		Debug.Log ("name " + webCamTexture.name + " width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
		
		
		
						// Starts the camera
						webCamTexture.Play ();
		
		
						while (true) {
								//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
								#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
			if (webCamTexture.width > 16 && webCamTexture.height > 16) {
								#else
								if (webCamTexture.didUpdateThisFrame) {
										#if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2                                    
					while (webCamTexture.width <= 16) {
						webCamTexture.GetPixels32 ();
						yield return new WaitForEndOfFrame ();
					} 
										#endif
										#endif
					
										Debug.Log ("name " + webCamTexture.name + " width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
										Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

										colors = new Color32[webCamTexture.width * webCamTexture.height];
										rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);

//				Debug.Log ("Screen.orientation " + Screen.orientation);
										screenOrientation = Screen.orientation;
//					screenOrientation = ScreenOrientation.PortraitUpsideDown;

										#if !UNITY_EDITOR && !UNITY_STANDALONE 
										if (screenOrientation == ScreenOrientation.Portrait || screenOrientation == ScreenOrientation.PortraitUpsideDown) {
												rotatedRgbaMat = new Mat (webCamTexture.width, webCamTexture.height, CvType.CV_8UC4);
										}
#endif

//				webCamTexture.Stop ();

										initDone = true;

										if (OnInited != null)
												OnInited ();


										break;
								} else {
										yield return 0;
								}
						}
				}
コード例 #17
0
 public RemoteWebCamDevice(WebCamDevice device)
 {
     this.name = "Remote " + device.name;
     this.internalName = device.name;
     this.device = device;
 }
コード例 #18
0
				/// <summary>
				/// Init this instance.
				/// </summary>
				private IEnumerator init ()
				{
						if (webCamTexture != null) {
								webCamTexture.Stop ();
								initDone = false;
				
								rgbaMat.Dispose ();
						}

						// Checks how many and which cameras are available on the device
						for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {
				
				
								if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing) {
					
					
										Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

										webCamDevice = WebCamTexture.devices [cameraIndex];
										
										webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
					
					
										break;
								}
				
				
						}

						if (webCamTexture == null) {
								webCamDevice = WebCamTexture.devices [0];
								webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
						}
			
						Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
			
			
			
						// Starts the camera
						webCamTexture.Play ();

						while (true) {
								//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
								#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
				if (webCamTexture.width > 16 && webCamTexture.height > 16) {
								#else
								if (webCamTexture.didUpdateThisFrame) {
										#if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2                                    
										while (webCamTexture.width <= 16) {
												webCamTexture.GetPixels32 ();
												yield return new WaitForEndOfFrame ();
										} 
										#endif
										#endif
										Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
										Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

					
										colors = new Color32[webCamTexture.width * webCamTexture.height];
					
										rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
					
										texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);
					
										gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

										updateLayout ();


										//set cameraparam
										int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ());
										camMatrix = new Mat (3, 3, CvType.CV_64FC1);
										camMatrix.put (0, 0, max_d);
										camMatrix.put (0, 1, 0);
										camMatrix.put (0, 2, rgbaMat.cols () / 2.0f);
										camMatrix.put (1, 0, 0);
										camMatrix.put (1, 1, max_d);
										camMatrix.put (1, 2, rgbaMat.rows () / 2.0f);
										camMatrix.put (2, 0, 0);
										camMatrix.put (2, 1, 0);
										camMatrix.put (2, 2, 1.0f);
										Debug.Log ("camMatrix " + camMatrix.dump ());

										distCoeffs = new MatOfDouble (0, 0, 0, 0);
										Debug.Log ("distCoeffs " + distCoeffs.dump ());

										//calibration camera
										Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ());
										double apertureWidth = 0;
										double apertureHeight = 0;
										double[] fovx = new double[1];
										double[] fovy = new double[1];
										double[] focalLength = new double[1];
										Point principalPoint = new Point ();
										double[] aspectratio = new double[1];


										Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

										Debug.Log ("imageSize " + imageSize.ToString ());
										Debug.Log ("apertureWidth " + apertureWidth);
										Debug.Log ("apertureHeight " + apertureHeight);
										Debug.Log ("fovx " + fovx [0]);
										Debug.Log ("fovy " + fovy [0]);
										Debug.Log ("focalLength " + focalLength [0]);
										Debug.Log ("principalPoint " + principalPoint.ToString ());
										Debug.Log ("aspectratio " + aspectratio [0]);

										//Adjust Unity Camera FOV
										for (int i = 0; i < ARCamera.Length; i++) {
												ARCamera [i].fieldOfView = (float)fovy [0];
										}
										

										
					
										markerDetector = new MarkerDetector (camMatrix, distCoeffs, markerDesign);


										//Marker Coordinate Initial Matrix
										lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0));
										Debug.Log ("lookAt " + lookAtM.ToString ());

										//OpenGL to Unity Coordinate System Convert Matrix
										//http://docs.unity3d.com/ScriptReference/Camera-worldToCameraMatrix.html that camera space matches OpenGL convention: camera's forward is the negative Z axis. This is different from Unity's convention, where forward is the positive Z axis.
										invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));
										Debug.Log ("invertZM " + invertZM.ToString ());


										screenOrientation = Screen.orientation;
										initDone = true;
					
										break;
								} else {
										yield return 0;
								}
						}
				}
コード例 #19
0
		private IEnumerator init ()
		{

				if (webCamTexture != null) {
						webCamTexture.Stop ();
						initDone = false;

						rgbaMat.Dispose ();
						grayMat.Dispose ();
						lineMat.Dispose ();
						maskMat.Dispose ();

						bgMat.Dispose ();
				}
		
				// Checks how many and which cameras are available on the device
				for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {
			

						if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {

				
								Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);
								
								webCamDevice = WebCamTexture.devices [cameraIndex];

								webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
	
								break;
						}

			
				}
		
				if (webCamTexture == null) {
						webCamDevice = WebCamTexture.devices [0];
						webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
				}

				Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

		
				// Starts the camera
				webCamTexture.Play ();
		
		
				

				while (true) {

						//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
						#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
			if (webCamTexture.width > 16 && webCamTexture.height > 16) {
						#else
						if (webCamTexture.didUpdateThisFrame) {
								#endif

								Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
								Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);


								colors = new Color32[webCamTexture.width * webCamTexture.height];
				
								rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
								grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
								lineMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
								maskMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

								//create a striped background.
								bgMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1, new Scalar (255));
								for (int i = 0; i < bgMat.rows ()*2.5f; i=i+4) {
										Core.line (bgMat, new Point (0, 0 + i), new Point (bgMat.cols (), -bgMat.cols () + i), new Scalar (0), 1);
								}
				
								dstMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
				
								grayPixels = new byte[grayMat.cols () * grayMat.rows () * grayMat.channels ()];
								maskPixels = new byte[maskMat.cols () * maskMat.rows () * maskMat.channels ()];
				
								texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);
				
								gameObject.transform.eulerAngles = new Vector3 (0, 0, 0);
								#if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
				gameObject.transform.eulerAngles = new Vector3 (0, 0, -90);
								#endif
//								gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);

								gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

//								bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
//								float scaleX = 1;
//								float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
//								if (webCamTexture.videoRotationAngle == 270)
//										scaleY = -1.0f;
//								gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);


								gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

								#if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
					            Camera.main.orthographicSize = (((float)Screen.height/(float)Screen.width) * (float)webCamTexture.height) / 2.0f;
								#else
								Camera.main.orthographicSize = webCamTexture.height / 2;
								#endif

								initDone = true;

								break;
						} else {
								yield return 0;
						}
				}
		}
	
		// Update is called once per frame
		void Update ()
		{

				if (!initDone)
						return;

				#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
			if (webCamTexture.width > 16 && webCamTexture.height > 16) {
				#else
				if (webCamTexture.didUpdateThisFrame) {
						#endif
		
						Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

						if (webCamTexture.videoVerticallyMirrored) {
								if (webCamDevice.isFrontFacing) {
										if (webCamTexture.videoRotationAngle == 0) {
												Core.flip (rgbaMat, rgbaMat, 1);
										} else if (webCamTexture.videoRotationAngle == 90) {
												Core.flip (rgbaMat, rgbaMat, 0);
										} else if (webCamTexture.videoRotationAngle == 270) {
												Core.flip (rgbaMat, rgbaMat, 1);
										}
								} else {
										if (webCamTexture.videoRotationAngle == 90) {
								
										} else if (webCamTexture.videoRotationAngle == 270) {
												Core.flip (rgbaMat, rgbaMat, -1);
										}
								}
						} else {
								if (webCamDevice.isFrontFacing) {
										if (webCamTexture.videoRotationAngle == 0) {
												Core.flip (rgbaMat, rgbaMat, 1);
										} else if (webCamTexture.videoRotationAngle == 90) {
												Core.flip (rgbaMat, rgbaMat, 0);
										} else if (webCamTexture.videoRotationAngle == 270) {
												Core.flip (rgbaMat, rgbaMat, 1);
										}
								} else {
										if (webCamTexture.videoRotationAngle == 90) {
								
										} else if (webCamTexture.videoRotationAngle == 270) {
												Core.flip (rgbaMat, rgbaMat, -1);
										}
								}
						}


						Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

//						Utils.webCamTextureToMat (webCamTexture, grayMat, colors);

				
						bgMat.copyTo (dstMat);


						Imgproc.GaussianBlur (grayMat, lineMat, new Size (3, 3), 0);
				



						grayMat.get (0, 0, grayPixels);

						for (int i = 0; i < grayPixels.Length; i++) {

								maskPixels [i] = 0;
			
								if (grayPixels [i] < 70) {
										grayPixels [i] = 0;

										maskPixels [i] = 1;
								} else if (70 <= grayPixels [i] && grayPixels [i] < 120) {
										grayPixels [i] = 100;

								
								} else {
										grayPixels [i] = 255;

										maskPixels [i] = 1;
								}
						}
		
						grayMat.put (0, 0, grayPixels);
	
						maskMat.put (0, 0, maskPixels);

						grayMat.copyTo (dstMat, maskMat);




				
						Imgproc.Canny (lineMat, lineMat, 20, 120);
		
						lineMat.copyTo (maskMat);
		
						Core.bitwise_not (lineMat, lineMat);

						lineMat.copyTo (dstMat, maskMat);


//		Imgproc.cvtColor(dstMat,rgbaMat,Imgproc.COLOR_GRAY2RGBA);
//				Utils.matToTexture2D (rgbaMat, texture);

						Utils.matToTexture2D (dstMat, texture, colors);
		
				

				}
		}
	
		void OnDisable ()
		{
				webCamTexture.Stop ();
		}
	
		void OnGUI ()
		{
				float screenScale = Screen.width / 240.0f;
				Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
				GUI.matrix = scaledMatrix;
		
		
				GUILayout.BeginVertical ();
				if (GUILayout.Button ("back")) {
						Application.LoadLevel ("OpenCVForUnitySample");
				}
				if (GUILayout.Button ("change camera")) {
						isFrontFacing = !isFrontFacing;
						StartCoroutine (init ());
				}
		
		
				GUILayout.EndVertical ();
		}
}
コード例 #20
0
 static public int get_isFrontFacing(IntPtr l)
 {
     UnityEngine.WebCamDevice o = (UnityEngine.WebCamDevice)checkSelf(l);
     pushValue(l, o.isFrontFacing);
     return(1);
 }
コード例 #21
0
        private IEnumerator init()
        {
            if (webCamTexture != null) {
                                webCamTexture.Stop ();
                                initDone = false;

                                rgbaMat.Dispose ();
                        }

                        // Checks how many and which cameras are available on the device
                        for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {

                                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {

                                        Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                                        webCamDevice = WebCamTexture.devices [cameraIndex];

                                        webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                                        break;
                                }

                        }

                        if (webCamTexture == null) {
                                webCamDevice = WebCamTexture.devices [0];
                                webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
                        }

                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

                        // Starts the camera
                        webCamTexture.Play ();

                        while (true) {
                                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                                #else
                                if (webCamTexture.didUpdateThisFrame) {
                                        #endif

                                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                                        Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                                        colors = new Color32[webCamTexture.width * webCamTexture.height];

                                        rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);

                                        texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                                        gameObject.transform.eulerAngles = new Vector3 (0, 0, 0);
                                        #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
                    gameObject.transform.eulerAngles = new Vector3 (0, 0, -90);
                                        #endif

            //										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);

                                        gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

            //										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
            //										float scaleX = 1;
            //										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
            //										if (webCamTexture.videoRotationAngle == 270)
            //												scaleY = -1.0f;
            //										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);

                                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                                        #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
                                        Camera.main.orthographicSize = (((float)Screen.height/(float)Screen.width) * (float)webCamTexture.height) / 2.0f;
                                        #else
                                        Camera.main.orthographicSize = webCamTexture.height / 2;
                                        #endif

                                        initDone = true;

                                        break;
                                } else {
                                        yield return 0;
                                }
                        }
                }

                // Update is called once per frame
                void Update ()
                {
                        if (!initDone)
                                return;

                        #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                        if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                        #else
                        if (webCamTexture.didUpdateThisFrame) {
                                #endif

                                Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

                                if (webCamTexture.videoVerticallyMirrored) {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 90) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 90) {

                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                } else {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 90) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 90) {

                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                }

                                Utils.matToTexture2D (rgbaMat, texture, colors);

                                gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                        }

                }

                void OnDisable ()
                {
                        webCamTexture.Stop ();
                }

                void OnGUI ()
                {
                        float screenScale = Screen.width / 240.0f;
                        Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
                        GUI.matrix = scaledMatrix;

                        GUILayout.BeginVertical ();
                        if (GUILayout.Button ("back")) {
                                Application.LoadLevel ("OpenCVForUnitySample");
                        }
                        if (GUILayout.Button ("change camera")) {
                                isFrontFacing = !isFrontFacing;
                                StartCoroutine (init ());
                        }

                        GUILayout.EndVertical ();
                }
            }
        }
コード例 #22
0
    // Use this for initialization
    void Start()
    {
        GameObject timestreamGo = GameObject.Find("TimeStream");
        tss = timestreamGo.GetComponent<TimeStreamScript>();

        counter = PlayerPrefs.GetInt("videoInterval");
        audioCounter = PlayerPrefs.GetInt("audioInterval");

        centerLabelStyle = new GUIStyle(skin.label);
        centerLabelStyle.alignment = TextAnchor.MiddleCenter;

        //		Debug.Log(PlayerPrefs.GetString("captureVideo"));
        if (IsTrue(PlayerPrefs.GetString("captureVideo"))) {
            Debug.Log("captureVideo on");
            webCamDevice = new WebCamDevice();

            webcamTexture = new WebCamTexture(webCamDevice.name,1024,768,1);
        //		renderer.material.mainTexture = webcamTexture;
            webcamTexture.Play();

            cubeTexture = new Texture2D(webcamTexture.width, webcamTexture.height, TextureFormat.RGB24, false);
            photoCube.renderer.material.mainTexture = cubeTexture;

            videoCube.renderer.material.mainTexture = webcamTexture;

            Color[] cols = webcamTexture.GetPixels();
            cubeTexture.SetPixels(cols);	//(Texture2D)Instantiate(webcamTexture);
            cubeTexture.Apply();
            StartCoroutine(DecrementCounter());
        }

        if (IsTrue(PlayerPrefs.GetString("captureAudio"))) {
            Debug.Log("captureAudio on");
            if (audioCounter > 0) {
                StartCoroutine(DecrementAudioCounter());
            } else {
                StartCoroutine(DecrementAudioCounter2());
            }
        }

        if (IsTrue(PlayerPrefs.GetString("captureAccel"))) {
            Debug.Log("captureAccel on");
            StartCoroutine(AccelSampleCounter());
            StartCoroutine(AccelStoreCounter());
        }

        DateTime dt = DateTime.Now;
        //		sessionInfo = SetupEventHandlerScript.session+"\n"+SetupEventHandlerScript.location+"\n"+dt.ToString(@"d/M/yyyy HH:mm");	//.ToString();
        sessionInfo = PlayerPrefs.GetString("session")+"\n"+PlayerPrefs.GetString("location")+"\n"+dt.ToString(@"d/M/yyyy HH:mm");	//.ToString();
    }
コード例 #23
0
        private IEnumerator init()
        {
            axes.SetActive (false);
                        head.SetActive (false);
                        rightEye.SetActive (false);
                        leftEye.SetActive (false);
                        mouth.SetActive (false);

                        if (webCamTexture != null) {
                                faceTracker.reset ();

                                webCamTexture.Stop ();
                                initDone = false;

                                rgbaMat.Dispose ();
                                grayMat.Dispose ();
                                cascade.Dispose ();
                                camMatrix.Dispose ();
                                distCoeffs.Dispose ();

                        }

                        // Checks how many and which cameras are available on the device
                        for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {

                                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {

                                        Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                                        webCamDevice = WebCamTexture.devices [cameraIndex];

                                        webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                                        break;
                                }
                        }

                        if (webCamTexture == null) {
                                webCamDevice = WebCamTexture.devices [0];
                                webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
                        }

                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

                        // Starts the camera
                        webCamTexture.Play ();

                        while (true) {
                                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                                #else
                                if (webCamTexture.didUpdateThisFrame) {
                                        #endif
                                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                                        Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                                        colors = new Color32[webCamTexture.width * webCamTexture.height];

                                        rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
                                        grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

                                        texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                                        cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
                                        if (cascade.empty ()) {
                                                Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
                                        }

                                        gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

                                        gameObject.transform.localEulerAngles = new Vector3 (0, 0, 0);
            //										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);

            //										bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
            //										float scaleX = 1;
            //										float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f;
            //										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);

                                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                                        Camera.main.orthographicSize = webCamTexture.height / 2;

                                        int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ());
                                        camMatrix = new Mat (3, 3, CvType.CV_64FC1);
                                        camMatrix.put (0, 0, max_d);
                                        camMatrix.put (0, 1, 0);
                                        camMatrix.put (0, 2, rgbaMat.cols () / 2.0f);
                                        camMatrix.put (1, 0, 0);
                                        camMatrix.put (1, 1, max_d);
                                        camMatrix.put (1, 2, rgbaMat.rows () / 2.0f);
                                        camMatrix.put (2, 0, 0);
                                        camMatrix.put (2, 1, 0);
                                        camMatrix.put (2, 2, 1.0f);

                                        Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ());
                                        double apertureWidth = 0;
                                        double apertureHeight = 0;
                                        double[] fovx = new double[1];
                                        double[] fovy = new double[1];
                                        double[] focalLength = new double[1];
                                        Point principalPoint = new Point ();
                                        double[] aspectratio = new double[1];

                                        Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

                                        Debug.Log ("imageSize " + imageSize.ToString ());
                                        Debug.Log ("apertureWidth " + apertureWidth);
                                        Debug.Log ("apertureHeight " + apertureHeight);
                                        Debug.Log ("fovx " + fovx [0]);
                                        Debug.Log ("fovy " + fovy [0]);
                                        Debug.Log ("focalLength " + focalLength [0]);
                                        Debug.Log ("principalPoint " + principalPoint.ToString ());
                                        Debug.Log ("aspectratio " + aspectratio [0]);

                                        ARCamera.fieldOfView = (float)fovy [0];

                                        Debug.Log ("camMatrix " + camMatrix.dump ());

                                        distCoeffs = new MatOfDouble (0, 0, 0, 0);
                                        Debug.Log ("distCoeffs " + distCoeffs.dump ());

                                        lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0));
                                        Debug.Log ("lookAt " + lookAtM.ToString ());

                                        invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));

                                        initDone = true;

                                        break;
                                } else {
                                        yield return 0;
                                }
                        }
                }

                // Update is called once per frame
                void Update ()
                {
                        if (!initDone)
                                return;

                        #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                        if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                        #else
                        if (webCamTexture.didUpdateThisFrame) {
                                #endif

                                Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

                                //flip to correct direction.
                                if (webCamTexture.videoVerticallyMirrored) {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 0) {

                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        }
                                } else {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 0) {

                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                }
                                //convert image to greyscale
                                Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                                if (faceTracker.getPoints ().Count <= 0) {
                                        Debug.Log ("detectFace");

                                        //convert image to greyscale
                                        using (Mat equalizeHistMat = new Mat ())
                                        using (MatOfRect faces = new MatOfRect ()) {

                                                Imgproc.equalizeHist (grayMat, equalizeHistMat);

                                                cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0
                                                        | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                        | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());

                                                if (faces.rows () > 0) {
                                                        Debug.Log ("faces " + faces.dump ());
                                                        //add initial face points from MatOfRect
                                                        faceTracker.addPoints (faces);

                                                        //draw face rect
                                                        OpenCVForUnity.Rect[] rects = faces.toArray ();
                                                        for (int i = 0; i < rects.Length; i++) {
                                                                Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2);
                                                        }
                                                }

                                        }

                                }

                                //track face points.if face points <= 0, always return false.
                                if (faceTracker.track (grayMat, faceTrackerParams)) {
                                        if (isDrawPoints)
                                                faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255));

                                        Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);

                                        Point[] points = faceTracker.getPoints () [0];

                                        if (points.Length > 0) {

            //												for (int i = 0; i < points.Length; i++) {
            //														Core.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false);
            //												}

                                                imagePoints.fromArray (
                        points [31],//l eye
                        points [36],//r eye
                        points [67],//nose
                        points [48],//l mouth
                        points [54] //r mouth
            //							,
            //											points [1],//l ear
            //											points [13]//r ear
                                                );

                                                Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);

                                                bool isRefresh = false;

                                                if (tvec.get (2, 0) [0] > 0 && tvec.get (2, 0) [0] < 1200 * ((float)webCamTexture.width / (float)width)) {

                                                        isRefresh = true;

                                                        if (oldRvec == null) {
                                                                oldRvec = new Mat ();
                                                                rvec.copyTo (oldRvec);
                                                        }
                                                        if (oldTvec == null) {
                                                                oldTvec = new Mat ();
                                                                tvec.copyTo (oldTvec);
                                                        }

                                                        //filter Rvec Noise.
                                                        using (Mat absDiffRvec = new Mat ()) {
                                                                Core.absdiff (rvec, oldRvec, absDiffRvec);

                                                                //				Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                                                using (Mat cmpRvec = new Mat ()) {
                                                                        Core.compare (absDiffRvec, new Scalar (rvecNoiseFilterRange), cmpRvec, Core.CMP_GT);

                                                                        if (Core.countNonZero (cmpRvec) > 0)
                                                                                isRefresh = false;
                                                                }
                                                        }

                                                        //filter Tvec Noise.
                                                        using (Mat absDiffTvec = new Mat ()) {
                                                                Core.absdiff (tvec, oldTvec, absDiffTvec);

                                                                //				Debug.Log ("absDiffRvec " + absDiffRvec.dump());

                                                                using (Mat cmpTvec = new Mat ()) {
                                                                        Core.compare (absDiffTvec, new Scalar (tvecNoiseFilterRange), cmpTvec, Core.CMP_GT);

                                                                        if (Core.countNonZero (cmpTvec) > 0)
                                                                                isRefresh = false;
                                                                }
                                                        }

                                                }

                                                if (isRefresh) {

                                                        if (!rightEye.activeSelf)
                                                                rightEye.SetActive (true);
                                                        if (!leftEye.activeSelf)
                                                                leftEye.SetActive (true);

                                                        if ((Mathf.Abs ((float)(points [48].x - points [56].x)) < Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.2
                                                                && Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.9)
                                                                || Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.7) {

                                                                if (!mouth.activeSelf)
                                                                        mouth.SetActive (true);

                                                        } else {
                                                                if (mouth.activeSelf)
                                                                        mouth.SetActive (false);
                                                        }

                                                        rvec.copyTo (oldRvec);
                                                        tvec.copyTo (oldTvec);

                                                        Calib3d.Rodrigues (rvec, rotM);

                                                        transformationM .SetRow (0, new Vector4 ((float)rotM.get (0, 0) [0], (float)rotM.get (0, 1) [0], (float)rotM.get (0, 2) [0], (float)tvec.get (0, 0) [0]));
                                                        transformationM.SetRow (1, new Vector4 ((float)rotM.get (1, 0) [0], (float)rotM.get (1, 1) [0], (float)rotM.get (1, 2) [0], (float)tvec.get (1, 0) [0]));
                                                        transformationM.SetRow (2, new Vector4 ((float)rotM.get (2, 0) [0], (float)rotM.get (2, 1) [0], (float)rotM.get (2, 2) [0], (float)tvec.get (2, 0) [0]));
                                                        transformationM.SetRow (3, new Vector4 (0, 0, 0, 1));

                                                        modelViewMtrx = lookAtM * transformationM * invertZM;

                                                        ARCamera.worldToCameraMatrix = modelViewMtrx;

                                                        //				Debug.Log ("modelViewMtrx " + modelViewMtrx.ToString());
                                                }
                                        }
                                }

                                Utils.matToTexture2D (rgbaMat, texture, colors);

                        }

                        if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) {
                                faceTracker.reset ();
                                if (oldRvec != null) {
                                        oldRvec.Dispose ();
                                        oldRvec = null;
                                }
                                if (oldTvec != null) {
                                        oldTvec.Dispose ();
                                        oldTvec = null;
                                }

                                ARCamera.ResetWorldToCameraMatrix ();

                                rightEye.SetActive (false);
                                leftEye.SetActive (false);
                                mouth.SetActive (false);
                        }

                }

                void OnDisable ()
                {
                        webCamTexture.Stop ();
                }

                private Matrix4x4 getLookAtMatrix (Vector3 pos, Vector3 target, Vector3 up)
                {

                        Vector3 z = Vector3.Normalize (pos - target);
                        Vector3 x = Vector3.Normalize (Vector3.Cross (up, z));
                        Vector3 y = Vector3.Normalize (Vector3.Cross (z, x));

                        Matrix4x4 result = new Matrix4x4 ();
                        result.SetRow (0, new Vector4 (x.x, x.y, x.z, -(Vector3.Dot (pos, x))));
                        result.SetRow (1, new Vector4 (y.x, y.y, y.z, -(Vector3.Dot (pos, y))));
                        result.SetRow (2, new Vector4 (z.x, z.y, z.z, -(Vector3.Dot (pos, z))));
                        result.SetRow (3, new Vector4 (0, 0, 0, 1));

                        return result;
                }

                void OnGUI ()
                {
                        float screenScale = Screen.height / 240.0f;
                        Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
                        GUI.matrix = scaledMatrix;

                        GUILayout.BeginVertical ();
                        if (GUILayout.Button ("back")) {
                                Application.LoadLevel ("FaceTrackerSample");
                        }
                        if (GUILayout.Button ("change camera")) {
                                isFrontFacing = !isFrontFacing;
                                StartCoroutine (init ());
                        }

                        if (GUILayout.Button ("drawPoints")) {
                                if (isDrawPoints) {
                                        isDrawPoints = false;
                                } else {
                                        isDrawPoints = true;
                                }
                        }
                        if (GUILayout.Button ("axes")) {
                                if (axes.activeSelf) {
                                        axes.SetActive (false);
                                } else {
                                        axes.SetActive (true);
                                }
                        }
                        if (GUILayout.Button ("head")) {
                                if (head.activeSelf) {
                                        head.SetActive (false);
                                } else {
                                        head.SetActive (true);
                                }
                        }

                        GUILayout.EndVertical ();
                }

            }
コード例 #24
0
ファイル: Eye.cs プロジェクト: muripoLife/EyesAuthentication
        // プロセスを起動する
        private IEnumerator init()
        {
            rightEye.SetActive (false);
            leftEye.SetActive (false);

            Debug.Log("---------------------------------------------------------------Eye");
            Debug.Log(leftEye.transform.localPosition);
            Debug.Log(rightEye.transform.localPosition);
            Debug.Log("---------------------------------------------------------------Eye");

            if (webCamTexture != null)
            {
                faceTracker.reset();

                webCamTexture.Stop();
                initDone = false;

                rgbaMat.Dispose();
                grayMat.Dispose();
                cascade.Dispose();
                camMatrix.Dispose();
                distCoeffs.Dispose();
            }

            // カメラがデバイスで使用可能かチェック
            for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++)
            {
                if (WebCamTexture.devices[cameraIndex].isFrontFacing == shouldUseFrontFacing)
                {
                    Debug.Log(cameraIndex + " name " + WebCamTexture.devices[cameraIndex].name + " isFrontFacing " + WebCamTexture.devices[cameraIndex].isFrontFacing);
                    webCamDevice = WebCamTexture.devices[cameraIndex];
                    webCamTexture = new WebCamTexture(webCamDevice.name, width, height);
                    break;
                }
            }

            if (webCamTexture == null)
            {
                webCamDevice = WebCamTexture.devices[0];
                webCamTexture = new WebCamTexture(webCamDevice.name, width, height);
            }

            Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

            // カメラを起動します
            webCamTexture.Play();
            while (true)
            {
                // iOSの上webcamTexture.widthとwebcamTexture.heightを使用する場合は、それ以外の場合はこれら2つの値が16に等しくなり、webcamTexture.didUpdateThisFrame== 1まで待つ必要があります.
                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                if (webCamTexture.width > 16 && webCamTexture.height > 16)
                {
                #else
                    if (webCamTexture.didUpdateThisFrame)
                    {
                    #if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2
                        while (webCamTexture.width <= 16)
                        {
                        webCamTexture.GetPixels32 ();
                        yield return new WaitForEndOfFrame ();
                        }
                    #endif
                #endif
                    Debug.Log("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                    Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                    colors = new Color32[webCamTexture.width * webCamTexture.height];

                    rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
                    grayMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

                    texture = new Texture2D(webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                    gameObject.GetComponent<Renderer>().material.mainTexture = texture;

                    updateLayout();

                    cascade = new CascadeClassifier(Utils.getFilePath("haarcascade_frontalface_alt.xml"));
                    if (cascade.empty())
                    {
                        Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
                    }

                    int max_d = Mathf.Max(rgbaMat.rows(), rgbaMat.cols());
                    camMatrix = new Mat(3, 3, CvType.CV_64FC1);
                    camMatrix.put(0, 0, max_d);
                    camMatrix.put(0, 1, 0);
                    camMatrix.put(0, 2, rgbaMat.cols() / 2.0f);
                    camMatrix.put(1, 0, 0);
                    camMatrix.put(1, 1, max_d);
                    camMatrix.put(1, 2, rgbaMat.rows() / 2.0f);
                    camMatrix.put(2, 0, 0);
                    camMatrix.put(2, 1, 0);
                    camMatrix.put(2, 2, 1.0f);

                    Size imageSize = new Size(rgbaMat.cols(), rgbaMat.rows());
                    double apertureWidth = 0;
                    double apertureHeight = 0;
                    double[] fovx = new double[1];
                    double[] fovy = new double[1];
                    double[] focalLength = new double[1];
                    Point principalPoint = new Point(); // 主点
                    double[] aspectratio = new double[1];

                    Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

                    Debug.Log("imageSize " + imageSize.ToString());
                    Debug.Log("apertureWidth " + apertureWidth);
                    Debug.Log("apertureHeight " + apertureHeight);
                    Debug.Log("fovx " + fovx[0]);
                    Debug.Log("fovy " + fovy[0]);
                    Debug.Log("focalLength " + focalLength[0]);
                    Debug.Log("--------------------------principalPoint");
                    Debug.Log("principalPoint " + principalPoint.ToString());
                    Debug.Log("--------------------------principalPoint");

                    Debug.Log("aspectratio " + aspectratio[0]);

                    ARCamera.fieldOfView = (float)fovy[0];

                    Debug.Log("camMatrix " + camMatrix.dump());

                    distCoeffs = new MatOfDouble(0, 0, 0, 0);
                    Debug.Log("distCoeffs " + distCoeffs.dump());

                    lookAtM = getLookAtMatrix(new Vector3(0, 0, 0), new Vector3(0, 0, 1), new Vector3(0, -1, 0));
                    Debug.Log("lookAt " + lookAtM.ToString());

                    invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));

                    screenOrientation = Screen.orientation;
                    initDone = true;
                    break;
                    }
                    else
                    {
                        yield return 0;
                    }
                }
            }
コード例 #25
0
				/// <summary>
				/// Init this instance.
				/// </summary>
				private IEnumerator init ()
				{
						if (webCamTexture != null) {
								webCamTexture.Stop ();
								initDone = false;
				
								rgbaMat.Dispose ();
						}

						// Checks how many and which cameras are available on the device
						for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {
				
				
								if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {
					
					
										Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

										webCamDevice = WebCamTexture.devices [cameraIndex];
										
										webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
					
					
										break;
								}
				
				
						}

						if (webCamTexture == null) {
								webCamDevice = WebCamTexture.devices [0];
								webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
						}
			
						Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
			
			
			
						// Starts the camera
						webCamTexture.Play ();

						while (true) {
								//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
								#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
				if (webCamTexture.width > 16 && webCamTexture.height > 16) {
								#else
								if (webCamTexture.didUpdateThisFrame) {
										#endif
										Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
										Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

					
										colors = new Color32[webCamTexture.width * webCamTexture.height];
					
										rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
					
										texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);
					

					
										gameObject.transform.localEulerAngles = new Vector3 (0, 0, 0);
//										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);
					

										gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

//										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
//										float scaleX = 1;
//										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
//										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);

					
										gameObject.GetComponent<Renderer> ().material.mainTexture = texture;


										Camera.main.orthographicSize = webCamTexture.height / 2;





										//set cameraparam
										int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ());
										camMatrix = new Mat (3, 3, CvType.CV_64FC1);
										camMatrix.put (0, 0, max_d);
										camMatrix.put (0, 1, 0);
										camMatrix.put (0, 2, rgbaMat.cols () / 2.0f);
										camMatrix.put (1, 0, 0);
										camMatrix.put (1, 1, max_d);
										camMatrix.put (1, 2, rgbaMat.rows () / 2.0f);
										camMatrix.put (2, 0, 0);
										camMatrix.put (2, 1, 0);
										camMatrix.put (2, 2, 1.0f);
										Debug.Log ("camMatrix " + camMatrix.dump ());

										distCoeffs = new MatOfDouble (0, 0, 0, 0);
										Debug.Log ("distCoeffs " + distCoeffs.dump ());

										//calibration camera
										Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ());
										double apertureWidth = 0;
										double apertureHeight = 0;
										double[] fovx = new double[1];
										double[] fovy = new double[1];
										double[] focalLength = new double[1];
										Point principalPoint = new Point ();
										double[] aspectratio = new double[1];


										Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);

										Debug.Log ("imageSize " + imageSize.ToString ());
										Debug.Log ("apertureWidth " + apertureWidth);
										Debug.Log ("apertureHeight " + apertureHeight);
										Debug.Log ("fovx " + fovx [0]);
										Debug.Log ("fovy " + fovy [0]);
										Debug.Log ("focalLength " + focalLength [0]);
										Debug.Log ("principalPoint " + principalPoint.ToString ());
										Debug.Log ("aspectratio " + aspectratio [0]);

										//Adjust Unity Camera FOV
										for (int i = 0; i < ARCamera.Length; i++) {
												ARCamera [i].fieldOfView = (float)fovy [0];
										}
										

										
					
										markerDetector = new MarkerDetector (camMatrix, distCoeffs);


										//Marker Coordinate Initial Matrix
										lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0));
										Debug.Log ("lookAt " + lookAtM.ToString ());

										//OpenGL to Unity Coordinate System Convert Matrix
										//http://docs.unity3d.com/ScriptReference/Camera-worldToCameraMatrix.html that camera space matches OpenGL convention: camera's forward is the negative Z axis. This is different from Unity's convention, where forward is the positive Z axis.
										invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));
										Debug.Log ("invertZM " + invertZM.ToString ());


					
										initDone = true;
					
										break;
								} else {
										yield return 0;
								}
						}
				}
		
				// Update is called once per frame
				void Update ()
				{
						if (!initDone)
								return;

						#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
				if (webCamTexture.width > 16 && webCamTexture.height > 16) {
						#else
						if (webCamTexture.didUpdateThisFrame) {
								#endif
								
				
								Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

								//flip to correct direction.
								if (webCamTexture.videoVerticallyMirrored) {
										if (webCamDevice.isFrontFacing) {
												if (webCamTexture.videoRotationAngle == 0) {
														Core.flip (rgbaMat, rgbaMat, -1);
												} else if (webCamTexture.videoRotationAngle == 180) {
														Core.flip (rgbaMat, rgbaMat, 0);
												}
										} else {
												if (webCamTexture.videoRotationAngle == 0) {
									
												} else if (webCamTexture.videoRotationAngle == 180) {
														Core.flip (rgbaMat, rgbaMat, 1);
												}
										}
								} else {
										if (webCamDevice.isFrontFacing) {
												if (webCamTexture.videoRotationAngle == 0) {
														Core.flip (rgbaMat, rgbaMat, 1);
												} else if (webCamTexture.videoRotationAngle == 180) {
														Core.flip (rgbaMat, rgbaMat, 0);
												}
										} else {
												if (webCamTexture.videoRotationAngle == 0) {
									
												} else if (webCamTexture.videoRotationAngle == 180) {
														Core.flip (rgbaMat, rgbaMat, -1);
												}
										}
								}
				
								markerDetector.processFrame (rgbaMat, 1);

								//Debug.Log ("markerDetector.getTransformations ().Count " + markerDetector.getTransformations ().Count);


								for (int i = 0; i < ARCamera.Length; i++) {
										ARCamera [i].gameObject.SetActive (false);
								}

								int markerCount = markerDetector.getTransformations ().Count;
								for (int i = 0; i < markerCount; i++) {
										if (i > ARCamera.Length - 1)
												break;
									
										ARCamera [i].gameObject.SetActive (true);

										//Marker to Camera Coordinate System Convert Matrix
										transformationM = markerDetector.getTransformations () [i];
										//Debug.Log ("transformationM " + transformationM.ToString ());
				
										worldToCameraM = lookAtM * transformationM * invertZM;
										//Debug.Log ("worldToCameraM " + worldToCameraM.ToString ());
				
										ARCamera [i].worldToCameraMatrix = worldToCameraM;
								}
							
				
								Utils.matToTexture2D (rgbaMat, texture, colors);
				
								gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
				
				
						}
			
				}
		
				void OnDisable ()
				{
						webCamTexture.Stop ();
				}

				/// <summary>
				/// Gets the look at matrix.
				/// </summary>
				/// <returns>The look at matrix.</returns>
				/// <param name="pos">Position.</param>
				/// <param name="target">Target.</param>
				/// <param name="up">Up.</param>
				private Matrix4x4 getLookAtMatrix (Vector3 pos, Vector3 target, Vector3 up)
				{
			
						Vector3 z = Vector3.Normalize (pos - target);
						Vector3 x = Vector3.Normalize (Vector3.Cross (up, z));
						Vector3 y = Vector3.Normalize (Vector3.Cross (z, x));
			
						Matrix4x4 result = new Matrix4x4 ();
						result.SetRow (0, new Vector4 (x.x, x.y, x.z, -(Vector3.Dot (pos, x))));
						result.SetRow (1, new Vector4 (y.x, y.y, y.z, -(Vector3.Dot (pos, y))));
						result.SetRow (2, new Vector4 (z.x, z.y, z.z, -(Vector3.Dot (pos, z))));
						result.SetRow (3, new Vector4 (0, 0, 0, 1));
			
						return result;
				}

				void OnGUI ()
				{
						float screenScale = Screen.height / 240.0f;
						Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
						GUI.matrix = scaledMatrix;
			
			
						GUILayout.BeginVertical ();
			
						if (GUILayout.Button ("back")) {
								Application.LoadLevel ("MarkerBasedARSample");
						}
						if (GUILayout.Button ("change camera")) {
								isFrontFacing = !isFrontFacing;
								StartCoroutine (init ());
						}
			
						GUILayout.EndVertical ();
				}
		}
コード例 #26
0
        private IEnumerator init()
        {
            if (webCamTexture != null) {
                                webCamTexture.Stop ();
                                initDone = false;

                                rgbaMat.Dispose ();
                                hsvMat.Dispose ();
                                if (roiHistMat != null)
                                        roiHistMat.Dispose ();
                                roiPointList.Clear ();
                        }

                        // Checks how many and which cameras are available on the device
                        for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {

                                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {

                                        Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                                        webCamDevice = WebCamTexture.devices [cameraIndex];

                                        webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                                        break;
                                }

                        }

                        if (webCamTexture == null) {
                                webCamDevice = WebCamTexture.devices [0];
                                webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
                        }

                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

                        // Starts the camera
                        webCamTexture.Play ();

                        while (true) {
                                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                                #else
                                if (webCamTexture.didUpdateThisFrame) {
                                        #endif

                                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                                        Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                                        colors = new Color32[webCamTexture.width * webCamTexture.height];

                                        rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
                                        hsvMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC3);

                                        texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                                        gameObject.transform.eulerAngles = new Vector3 (0, 0, 0);
                                        #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
                    gameObject.transform.eulerAngles = new Vector3 (0, 0, -90);
                                        #endif

            //										gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);

                                        gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

            //										bool videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
            //										float scaleX = 1;
            //										float scaleY = videoVerticallyMirrored ? -1.0f : 1.0f;
            //										if (webCamTexture.videoRotationAngle == 270)
            //												scaleY = -1.0f;
            //										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);

                                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                                        #if (UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR
                                        Camera.main.orthographicSize = (((float)Screen.height/(float)Screen.width) * (float)webCamTexture.height) / 2.0f;
            #else
                                        Camera.main.orthographicSize = webCamTexture.height / 2;
                                        #endif

                                        initDone = true;

                                        break;
                                } else {
                                        yield return 0;
                                }
                        }

                }

                // Update is called once per frame
                void Update ()
                {
                        if (!initDone)
                                return;

                        #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                        if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                        #else
                        if (webCamTexture.didUpdateThisFrame) {
                                #endif

                                Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

                                if (webCamTexture.videoVerticallyMirrored) {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 90) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 90) {

                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                } else {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 90) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 90) {

                                                } else if (webCamTexture.videoRotationAngle == 270) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                }

                                Imgproc.cvtColor (rgbaMat, hsvMat, Imgproc.COLOR_RGBA2RGB);
                                Imgproc.cvtColor (hsvMat, hsvMat, Imgproc.COLOR_RGB2HSV);

                                Point[] points = roiPointList.ToArray ();

                                if (roiPointList.Count == 4) {

                                        using (Mat backProj = new Mat ()) {
                                                Imgproc.calcBackProject (new List<Mat> (new Mat[]{hsvMat}), new MatOfInt (0), roiHistMat, backProj, new MatOfFloat (0, 180), 1.0);

                                                RotatedRect r = Video.CamShift (backProj, roiRect, termination);
                                                r.points (points);
                                        }

                                        #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                            //Touch
                            int touchCount = Input.touchCount;
                            if (touchCount == 1)
                            {

                                if(Input.GetTouch(0).phase == TouchPhase.Ended){

                                    roiPointList.Clear ();
                                }

                            }
                                        #else
                                        if (Input.GetMouseButtonUp (0)) {
                                                roiPointList.Clear ();
                                        }
            #endif
                                }

                                if (roiPointList.Count < 4) {

                                        #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                            //Touch
                            int touchCount = Input.touchCount;
                            if (touchCount == 1)
                            {
                                Touch t = Input.GetTouch(0);
                                if(t.phase == TouchPhase.Ended){
                                    roiPointList.Add (convertScreenPoint (new Point (t.position.x, t.position.y), gameObject, Camera.main));
            //									Debug.Log ("touch X " + t.position.x);
            //									Debug.Log ("touch Y " + t.position.y);

                                    if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) {
                                        roiPointList.RemoveAt (roiPointList.Count - 1);
                                    }
                                }

                            }
            #else
                                        //Mouse
                                        if (Input.GetMouseButtonUp (0)) {

                                                roiPointList.Add (convertScreenPoint (new Point (Input.mousePosition.x, Input.mousePosition.y), gameObject, Camera.main));
            //												Debug.Log ("mouse X " + Input.mousePosition.x);
            //												Debug.Log ("mouse Y " + Input.mousePosition.y);

                                                if (!(new OpenCVForUnity.Rect (0, 0, hsvMat.width (), hsvMat.height ()).contains (roiPointList [roiPointList.Count - 1]))) {
                                                        roiPointList.RemoveAt (roiPointList.Count - 1);
                                                }
                                        }
            #endif

                                        if (roiPointList.Count == 4) {

                                                using (MatOfPoint roiPointMat = new MatOfPoint (roiPointList.ToArray ())) {
                                                        roiRect = Imgproc.boundingRect (roiPointMat);
                                                }

                                                if (roiHistMat != null) {
                                                        roiHistMat.Dispose ();
                                                        roiHistMat = null;
                                                }
                                                roiHistMat = new Mat ();

                                                using (Mat roiHSVMat = new Mat(hsvMat, roiRect))
                                                using (Mat maskMat = new Mat ()) {

                                                        Imgproc.calcHist (new List<Mat> (new Mat[]{roiHSVMat}), new MatOfInt (0), maskMat, roiHistMat, new MatOfInt (16), new MatOfFloat (0, 180));
                                                        Core.normalize (roiHistMat, roiHistMat, 0, 255, Core.NORM_MINMAX);

            //														Debug.Log ("roiHist " + roiHistMat.ToString ());
                                                }
                                        }
                                }

                                if (points.Length < 4) {

                                        for (int i = 0; i < points.Length; i++) {
                                                Core.circle (rgbaMat, points [i], 6, new Scalar (0, 0, 255, 255), 2);
                                        }

                                } else {

                                        for (int i = 0; i < 4; i++) {
                                                Core.line (rgbaMat, points [i], points [(i + 1) % 4], new Scalar (255, 0, 0, 255), 2);
                                        }

                                        Core.rectangle (rgbaMat, roiRect.tl (), roiRect.br (), new Scalar (0, 255, 0, 255), 2);
                                }

                                Core.putText (rgbaMat, "PLEASE TOUCH 4 POINTS", new Point (5, 25), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);

                                Utils.matToTexture2D (rgbaMat, texture, colors);

                                gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                        }

                }

                void OnDisable ()
                {
                        webCamTexture.Stop ();
                }

                void OnGUI ()
                {
                        float screenScale = Screen.width / 240.0f;
                        Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
                        GUI.matrix = scaledMatrix;

                        GUILayout.BeginVertical ();
                        if (GUILayout.Button ("back")) {
                                Application.LoadLevel ("OpenCVForUnitySample");
                        }
                        if (GUILayout.Button ("change camera")) {
                                isFrontFacing = !isFrontFacing;
                                StartCoroutine (init ());
                        }

                        GUILayout.EndVertical ();
                }

                /// <summary>
                /// Converts the screen point.
                /// </summary>
                /// <returns>The screen point.</returns>
                /// <param name="screenPoint">Screen point.</param>
                /// <param name="quad">Quad.</param>
                /// <param name="cam">Cam.</param>
                static Point convertScreenPoint (Point screenPoint, GameObject quad, Camera cam)
                {
                        #if ((UNITY_ANDROID || UNITY_IOS) && !UNITY_EDITOR)
                    Vector2 tl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.y / 2, quad.transform.localPosition.y + quad.transform.localScale.x / 2, quad.transform.localPosition.z));
                    Vector2 tr = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.y / 2, quad.transform.localPosition.y - quad.transform.localScale.x / 2, quad.transform.localPosition.z));
                    Vector2 br = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.y / 2, quad.transform.localPosition.y - quad.transform.localScale.x / 2, quad.transform.localPosition.z));
                    Vector2 bl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.y / 2, quad.transform.localPosition.y + quad.transform.localScale.x / 2, quad.transform.localPosition.z));
            #else
                        Vector2 tl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.x / 2, quad.transform.localPosition.y + quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                        Vector2 tr = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.x / 2, quad.transform.localPosition.y + quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                        Vector2 br = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x + quad.transform.localScale.x / 2, quad.transform.localPosition.y - quad.transform.localScale.y / 2, quad.transform.localPosition.z));
                        Vector2 bl = cam.WorldToScreenPoint (new Vector3 (quad.transform.localPosition.x - quad.transform.localScale.x / 2, quad.transform.localPosition.y - quad.transform.localScale.y / 2, quad.transform.localPosition.z));
            #endif

                        Mat srcRectMat = new Mat (4, 1, CvType.CV_32FC2);
                        Mat dstRectMat = new Mat (4, 1, CvType.CV_32FC2);

                        srcRectMat.put (0, 0, tl.x, tl.y, tr.x, tr.y, br.x, br.y, bl.x, bl.y);
                        dstRectMat.put (0, 0, 0.0, 0.0, quad.transform.localScale.x, 0.0, quad.transform.localScale.x, quad.transform.localScale.y, 0.0, quad.transform.localScale.y);

                        Mat perspectiveTransform = Imgproc.getPerspectiveTransform (srcRectMat, dstRectMat);

            //						Debug.Log ("srcRectMat " + srcRectMat.dump ());
            //						Debug.Log ("dstRectMat " + dstRectMat.dump ());
            //						Debug.Log ("perspectiveTransform " + perspectiveTransform.dump ());

                        MatOfPoint2f srcPointMat = new MatOfPoint2f (screenPoint);
                        MatOfPoint2f dstPointMat = new MatOfPoint2f ();

                        Core.perspectiveTransform (srcPointMat, dstPointMat, perspectiveTransform);

            //						Debug.Log ("srcPointMat " + srcPointMat.dump ());
            //						Debug.Log ("dstPointMat " + dstPointMat.dump ());

                        return dstPointMat.toArray () [0];
                }
            }
コード例 #27
0
        private IEnumerator init()
        {
            if (webCamTexture != null) {
                                faceTracker.reset ();
                                webCamTexture.Stop ();
                                initDone = false;

                                rgbaMat.Dispose ();
                                grayMat.Dispose ();
                        }

                        // Checks how many and which cameras are available on the device
                        for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {

                                if (WebCamTexture.devices [cameraIndex].isFrontFacing == isFrontFacing) {

                                        Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

                                        webCamDevice = WebCamTexture.devices [cameraIndex];

                                        webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                                        break;
                                }

                        }

                        if (webCamTexture == null) {
                                webCamDevice = WebCamTexture.devices [0];
                                webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

                        }

                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

                        // Starts the camera
                        webCamTexture.Play ();

                        while (true) {
                                //If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
                                #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                                #else
                                if (webCamTexture.didUpdateThisFrame) {
                                        #endif
                                        Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
                                        Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);

                                        colors = new Color32[webCamTexture.width * webCamTexture.height];

                                        rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
                                        grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

                                        texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

                                        cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
                                        if (cascade.empty ()) {
                                                Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
                                        }

                                        gameObject.transform.localEulerAngles = new Vector3 (0, 0, 0);
            //					gameObject.transform.rotation = gameObject.transform.rotation * Quaternion.AngleAxis (webCamTexture.videoRotationAngle, Vector3.back);

                                        gameObject.transform.localScale = new Vector3 (webCamTexture.width, webCamTexture.height, 1);

            //										bool _videoVerticallyMirrored = webCamTexture.videoVerticallyMirrored;
            //										float scaleX = 1;
            //										float scaleY = _videoVerticallyMirrored ? -1.0f : 1.0f;
            //										gameObject.transform.localScale = new Vector3 (scaleX * gameObject.transform.localScale.x, scaleY * gameObject.transform.localScale.y, 1);

                                        gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

                                        Camera.main.orthographicSize = webCamTexture.height / 2;

                                        initDone = true;

                                        break;
                                } else {
                                        yield return 0;
                                }
                        }
                }

                // Update is called once per frame
                void Update ()
                {
                        if (!initDone)
                                return;
                        #if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
                        if (webCamTexture.width > 16 && webCamTexture.height > 16) {
                        #else
                        if (webCamTexture.didUpdateThisFrame) {
                                #endif
                                Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);

                                //flip to correct direction.
                                if (webCamTexture.videoVerticallyMirrored) {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 0) {

                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                }
                                        }
                                } else {
                                        if (webCamDevice.isFrontFacing) {
                                                if (webCamTexture.videoRotationAngle == 0) {
                                                        Core.flip (rgbaMat, rgbaMat, 1);
                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, 0);
                                                }
                                        } else {
                                                if (webCamTexture.videoRotationAngle == 0) {

                                                } else if (webCamTexture.videoRotationAngle == 180) {
                                                        Core.flip (rgbaMat, rgbaMat, -1);
                                                }
                                        }
                                }

                                //convert image to greyscale
                                Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);

                                if (faceTracker.getPoints ().Count <= 0) {
                                        Debug.Log ("detectFace");

                                        //convert image to greyscale
                                        using (Mat equalizeHistMat = new Mat ())
                                        using (MatOfRect faces = new MatOfRect ()) {

                                                Imgproc.equalizeHist (grayMat, equalizeHistMat);

                                                cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0
            //														                           | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
                                                        | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());

                                                if (faces.rows () > 0) {
                                                        Debug.Log ("faces " + faces.dump ());
                                                        //add initial face points from MatOfRect
                                                        faceTracker.addPoints (faces);

                                                        //draw face rect
                                                        OpenCVForUnity.Rect[] rects = faces.toArray ();
                                                        for (int i = 0; i < rects.Length; i++) {
                                                                Core.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y), new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2);
                                                        }

                                                }

                                        }

                                }

                                //track face points.if face points <= 0, always return false.
                                if (faceTracker.track (grayMat, faceTrackerParams))
                                        faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255));

                                Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);

                                Utils.matToTexture2D (rgbaMat, texture, colors);

                        }

                        if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) {
                                faceTracker.reset ();
                        }

                }

                void OnDisable ()
                {
                        webCamTexture.Stop ();
                }

                void OnGUI ()
                {
                        float screenScale = Screen.height / 240.0f;
                        Matrix4x4 scaledMatrix = Matrix4x4.Scale (new Vector3 (screenScale, screenScale, screenScale));
                        GUI.matrix = scaledMatrix;

                        GUILayout.BeginVertical ();
                        if (GUILayout.Button ("back")) {
                                Application.LoadLevel ("FaceTrackerSample");
                        }
                        if (GUILayout.Button ("change camera")) {
                                isFrontFacing = !isFrontFacing;
                                StartCoroutine (init ());
                        }

                        GUILayout.EndVertical ();
                }

            }
        }
コード例 #28
0
 static public int get_name(IntPtr l)
 {
     UnityEngine.WebCamDevice o = (UnityEngine.WebCamDevice)checkSelf(l);
     pushValue(l, o.name);
     return(1);
 }
コード例 #29
0
ファイル: webCamScript.cs プロジェクト: keviniseborn/Air-Raid
    void OnGUI()
    {
        //FPS COUNTER
        /*
        if (timerFrames < DateTime.Now)
        {
            framesPerSecond = frameNum;
            frameNum = 0;
            timerFrames = DateTime.Now + TimeSpan.FromSeconds(1);
        }
        ++frameNum;

        GUILayout.Label(string.Format("Frames per second: {0}", framesPerSecond));
        */

        //if application has authorization for webcame
        if (Application.HasUserAuthorization(UserAuthorization.WebCam))
        {
            //if there are camera devices available
            if (null != WebCamTexture.devices)
            {

                //use back facing camera (hard coded to 0)
                device = WebCamTexture.devices[0];

                    //if cam active is set to false
                    if(camActive == false)
                    {

                        // stop playing if mTexture is present
                        if (null != mTexture)
                        {
                            if (mTexture.isPlaying)
                            {
                                mTexture.Stop();
                            }
                        }

                        // destroy the old texture
                        if (null != mTexture)
                        {
                            UnityEngine.Object.DestroyImmediate(mTexture, true);
                        }

                        // use the device name for the texture
                        mTexture = new WebCamTexture(device.name);

                        //assign camera material to new texture
                        cameraMaterial.mainTexture = mTexture;

                        // start playing
                        mTexture.Play();
                        //set camera to active
                        camActive = true;
                    }
            }

        }
        else
        {
            //if no authorization, write message to screen
            GUILayout.Label("Pending WebCam Authorization...");
        }
    }
コード例 #30
0
ファイル: CameraInput.cs プロジェクト: WishPotato/MED6Game
 /// <summary>
 /// Set the target device based on orientation
 /// </summary>
 /// <param name="isFrontFacing">Should the device be forward facing?</param>
 private void SelectCamera(bool isFrontFacing)
 {
     #if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN || UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX
     foreach (WebCamDevice d in devices)
     {
       if (d.isFrontFacing == isFrontFacing)
       {
             device = d;
             return;
       }
     }
     #endif
 }
コード例 #31
0
		private IEnumerator init ()
		{

				if (webCamTexture != null) {
						webCamTexture.Stop ();
						initDone = false;

						rgbaMat.Dispose ();
						grayMat.Dispose ();
						lineMat.Dispose ();
						maskMat.Dispose ();

						bgMat.Dispose ();
				}
		
				// Checks how many and which cameras are available on the device
				for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {
			

						if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing) {

				
								Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);
								
								webCamDevice = WebCamTexture.devices [cameraIndex];

								webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
	
								break;
						}

			
				}
		
				if (webCamTexture == null) {
						webCamDevice = WebCamTexture.devices [0];
						webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
				}

				Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);

		
				// Starts the camera
				webCamTexture.Play ();
		
		
				

				while (true) {

						//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
						#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
			if (webCamTexture.width > 16 && webCamTexture.height > 16) {
						#else
						if (webCamTexture.didUpdateThisFrame) {
								#if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2                                    
					while (webCamTexture.width <= 16) {
						webCamTexture.GetPixels32 ();
						yield return new WaitForEndOfFrame ();
					} 
								#endif
								#endif

								Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
								Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);


								colors = new Color32[webCamTexture.width * webCamTexture.height];
				
								rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
								grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
								lineMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
								maskMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);

								//create a striped background.
								bgMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1, new Scalar (255));
								for (int i = 0; i < bgMat.rows ()*2.5f; i=i+4) {
										Core.line (bgMat, new Point (0, 0 + i), new Point (bgMat.cols (), -bgMat.cols () + i), new Scalar (0), 1);
								}
				
								dstMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
				
								grayPixels = new byte[grayMat.cols () * grayMat.rows () * grayMat.channels ()];
								maskPixels = new byte[maskMat.cols () * maskMat.rows () * maskMat.channels ()];
				
								texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

								gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

								updateLayout ();

								screenOrientation = Screen.orientation;
								initDone = true;

								break;
						} else {
								yield return 0;
						}
				}
		}
コード例 #32
0
				private IEnumerator init ()
				{
						axes.SetActive (false);
						head.SetActive (false);
						rightEye.SetActive (false);
						leftEye.SetActive (false);
						mouth.SetActive (false);
			

						if (webCamTexture != null) {
								faceTracker.reset ();
								
								webCamTexture.Stop ();
								initDone = false;
				
								rgbaMat.Dispose ();
								grayMat.Dispose ();
								cascade.Dispose ();
								camMatrix.Dispose ();
								distCoeffs.Dispose ();

						}
			
						// Checks how many and which cameras are available on the device
						for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {
				
								if (WebCamTexture.devices [cameraIndex].isFrontFacing == shouldUseFrontFacing) {
					
										Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);

										webCamDevice = WebCamTexture.devices [cameraIndex];

										webCamTexture = new WebCamTexture (webCamDevice.name, width, height);

										break;
								}
						}
			
						if (webCamTexture == null) {
								webCamDevice = WebCamTexture.devices [0];
								webCamTexture = new WebCamTexture (webCamDevice.name, width, height);
						}
			
						Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
			
			
			
						// Starts the camera
						webCamTexture.Play ();


						while (true) {
								//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
								#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
				                if (webCamTexture.width > 16 && webCamTexture.height > 16) {
								#else
								if (webCamTexture.didUpdateThisFrame) {
										#if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2                                    
										while (webCamTexture.width <= 16) {
												webCamTexture.GetPixels32 ();
												yield return new WaitForEndOfFrame ();
										} 
										#endif
								#endif
										Debug.Log ("width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
										Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);
					
										colors = new Color32[webCamTexture.width * webCamTexture.height];
					
										rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
										grayMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC1);
					
										texture = new Texture2D (webCamTexture.width, webCamTexture.height, TextureFormat.RGBA32, false);

										gameObject.GetComponent<Renderer> ().material.mainTexture = texture;

										updateLayout ();

										cascade = new CascadeClassifier (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
										if (cascade.empty ()) {
												Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
										}


										int max_d = Mathf.Max (rgbaMat.rows (), rgbaMat.cols ());
										camMatrix = new Mat (3, 3, CvType.CV_64FC1);
										camMatrix.put (0, 0, max_d);
										camMatrix.put (0, 1, 0);
										camMatrix.put (0, 2, rgbaMat.cols () / 2.0f);
										camMatrix.put (1, 0, 0);
										camMatrix.put (1, 1, max_d);
										camMatrix.put (1, 2, rgbaMat.rows () / 2.0f);
										camMatrix.put (2, 0, 0);
										camMatrix.put (2, 1, 0);
										camMatrix.put (2, 2, 1.0f);
					
										Size imageSize = new Size (rgbaMat.cols (), rgbaMat.rows ());
										double apertureWidth = 0;
										double apertureHeight = 0;
										double[] fovx = new double[1];
										double[] fovy = new double[1];
										double[] focalLength = new double[1];
										Point principalPoint = new Point ();
										double[] aspectratio = new double[1];
					
					
					
					
										Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);
					
										Debug.Log ("imageSize " + imageSize.ToString ());
										Debug.Log ("apertureWidth " + apertureWidth);
										Debug.Log ("apertureHeight " + apertureHeight);
										Debug.Log ("fovx " + fovx [0]);
										Debug.Log ("fovy " + fovy [0]);
										Debug.Log ("focalLength " + focalLength [0]);
										Debug.Log ("principalPoint " + principalPoint.ToString ());
										Debug.Log ("aspectratio " + aspectratio [0]);
					
					
										ARCamera.fieldOfView = (float)fovy [0];
					
										Debug.Log ("camMatrix " + camMatrix.dump ());
					
					
										distCoeffs = new MatOfDouble (0, 0, 0, 0);
										Debug.Log ("distCoeffs " + distCoeffs.dump ());
					
					
					
										lookAtM = getLookAtMatrix (new Vector3 (0, 0, 0), new Vector3 (0, 0, 1), new Vector3 (0, -1, 0));
										Debug.Log ("lookAt " + lookAtM.ToString ());
					
										invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));


										screenOrientation = Screen.orientation;
										initDone = true;
					
										break;
								} else {
										yield return 0;
								}
						}
				}