private void RefreshActionUnits()
        {
            // initialize action units
            if (ActionUnitCount == 0)
            {
                ActionUnitCount = VisageTrackerNative._getActionUnitCount();
                ActionUnits     = new ActionUnitData[ActionUnitCount];
                for (int actionUnitIndex = 0; actionUnitIndex < ActionUnitCount; actionUnitIndex++)
                {
                    string         name           = Marshal.PtrToStringAnsi(VisageTrackerNative._getActionUnitName(actionUnitIndex));
                    bool           used           = VisageTrackerNative._getActionUnitUsed(actionUnitIndex);
                    ActionUnitData actionUnitData = new ActionUnitData(actionUnitIndex, name, used);
                    ActionUnits[actionUnitIndex] = actionUnitData;
                }
            }

            // get action unit values
            if (ActionUnitCount > 0)
            {
                float[] values = new float[ActionUnitCount];
                VisageTrackerNative._getActionUnitValues(values);
                for (int actionUnitIndex = 0; actionUnitIndex < ActionUnitCount; actionUnitIndex++)
                {
                    ActionUnits[actionUnitIndex].Value = values[actionUnitIndex];
                }
            }
        }
        void OnDestroy()
        {
#if UNITY_ANDROID && !UNITY_EDITOR
            this.androidCameraActivity.Call("closeCamera");
#else
            VisageTrackerNative._closeCamera();
#endif
        }
        public void StopCamera()
        {
#if UNITY_ANDROID && !UNITY_EDITOR
            this.androidCameraActivity.Call("closeCamera");
#else
            VisageTrackerNative._closeCamera();
#endif
            isTracking = false;
        }
Example #4
0
        float[] GetFeaturePoints(int[] points)
        {
            int number = points.Length / 2;

            int[] groups  = new int[number];
            int[] indices = new int[number];
            for (int i = 0; i < number; i++)
            {
                groups[i]  = points[i * 2];
                indices[i] = points[i * 2 + 1];
            }

            float[] positions = new float[number * 2];
            VisageTrackerNative._getFeaturePoints2D(number, groups, indices, positions);
            return(positions);
        }
        /** Returns current device orientation.
         */
        int getDeviceOrientation()
        {
            int devOrientation;

            //Device orientation is obtained in AndroidCameraPlugin so we only need information about whether orientation is changed
#if UNITY_ANDROID && !UNITY_EDITOR
            int oldWidth  = ImageWidth;
            int oldHeight = ImageHeight;
            VisageTrackerNative._getCameraInfo(out Focus, out ImageWidth, out ImageHeight);

            if ((oldWidth != ImageWidth || oldHeight != ImageHeight) && ImageWidth != 0 && ImageHeight != 0 && oldWidth != 0 && oldHeight != 0)
            {
                devOrientation = (Orientation == 1) ? 0:1;
            }
            else
            {
                devOrientation = Orientation;
            }
#else
            if (Input.deviceOrientation == DeviceOrientation.Portrait)
            {
                devOrientation = 0;
            }
            else if (Input.deviceOrientation == DeviceOrientation.PortraitUpsideDown)
            {
                devOrientation = 2;
            }
            else if (Input.deviceOrientation == DeviceOrientation.LandscapeLeft)
            {
                devOrientation = 3;
            }
            else if (Input.deviceOrientation == DeviceOrientation.LandscapeRight)
            {
                devOrientation = 1;
            }
            else if (Input.deviceOrientation == DeviceOrientation.FaceUp)
            {
                devOrientation = Orientation;
            }
            else
            {
                devOrientation = 0;
            }
#endif
            return(devOrientation);
        }
        // if width and height are -1, values will be set internally
        void OpenCamera(int orientation, int currDevice, int width, int height, int mirrored)
        {
#if UNITY_ANDROID && !UNITY_EDITOR
            if (device == currDevice && AppStarted)
            {
                return;
            }
            //camera needs to be opened on main thread
            this.androidCameraActivity.Call("runOnUiThread", new AndroidJavaRunnable(() => {
                this.androidCameraActivity.Call("closeCamera");
                this.androidCameraActivity.Call("GrabFromCamera", width, height, currDevice);
            }));
            AppStarted = true;
#else
            VisageTrackerNative._openCamera(orientation, currDevice, width, height, mirrored);
#endif
        }
        /** This method initializes the tracker.
         */
        bool InitializeTracker(string config, string license)
        {
            Debug.Log("VisageTracker: Initializing tracker with config: '" + config + "'");

#if (UNITY_IPHONE) && UNITY_EDITOR
            return(false);
#endif
#if UNITY_ANDROID && !UNITY_EDITOR
            // initialize visage vision
            VisageTrackerNative._loadVisageVision();
            Unzip();

            unity = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
            this.androidCameraActivity = unity.GetStatic <AndroidJavaObject>("currentActivity");
#endif
            // initialize tracker
            VisageTrackerNative._initTracker(config, license);
            return(true);
        }
        public void RefreshImage()
        {
#if (UNITY_IPHONE) && UNITY_EDITOR
            return;
#endif
            // create texture
            if (Frame == null && isTracking)
            {
                TexWidth  = Convert.ToInt32(Math.Pow(2.0, Math.Ceiling(Math.Log(ImageWidth) / Math.Log(2.0))));
                TexHeight = Convert.ToInt32(Math.Pow(2.0, Math.Ceiling(Math.Log(ImageHeight) / Math.Log(2.0))));
#if UNITY_ANDROID && !UNITY_EDITOR
                Frame = new Texture2D(TexWidth, TexHeight, TextureFormat.RGB24, false);
#else
                Frame = new Texture2D(TexWidth, TexHeight, TextureFormat.RGBA32, false);
#endif

#if UNITY_STANDALONE_WIN || UNITY_EDITOR_WIN
                // "pin" the pixel array in memory, so we can pass direct pointer to it's data to the plugin,
                // without costly marshaling of array of structures.
                texturePixels       = ((Texture2D)Frame).GetPixels32(0);
                texturePixelsHandle = GCHandle.Alloc(texturePixels, GCHandleType.Pinned);
#endif
            }
            if (Frame != null && isTracking)
            {
#if UNITY_STANDALONE_WIN || UNITY_EDITOR_WIN
                // send memory address of textures' pixel data to VisageTrackerUnityPlugin
                VisageTrackerNative._setFrameData(texturePixelsHandle.AddrOfPinnedObject());
                ((Texture2D)Frame).SetPixels32(texturePixels, 0);
                ((Texture2D)Frame).Apply();
#elif UNITY_IPHONE || UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX || UNITY_ANDROID
                if (SystemInfo.graphicsDeviceVersion.StartsWith("Metal"))
                {
                    VisageTrackerNative._bindTextureMetal(Frame.GetNativeTexturePtr());
                }
                else
                {
                    VisageTrackerNative._bindTexture((int)Frame.GetNativeTexturePtr());
                }
#endif
            }
        }
        void GetCameraInfo()
        {
#if (UNITY_IPHONE) && UNITY_EDITOR
            return;
#endif
            VisageTrackerNative._getCameraInfo(out Focus, out ImageWidth, out ImageHeight);
#if UNITY_ANDROID && !UNITY_EDITOR
            if (ImageWidth == 0 || ImageHeight == 0)
            {
                return;
            }
#endif
            if (setCameraFieldOfView)
            {
                // set camera field of view
                float aspect = ImageWidth / (float)ImageHeight;
                float yRange = (ImageWidth > ImageHeight) ? 1.0f : 1.0f / aspect;
                Camera.main.fieldOfView = Mathf.Rad2Deg * 2.0f * Mathf.Atan(yRange / Focus);
            }
        }
        /** This method is called every frame.
         *
         * It fetches the tracking data from the tracker and transforms controlled objects accordingly.
         * It also fetches vertex, triangle and texture coordinate data to generate 3D face model from the tracker.
         * And lastly it refreshes the video frame texture with the new frame data.
         *
         */
        private void Update()
        {
            if (Input.GetKeyDown(KeyCode.Escape))
            {
                Application.Quit();
            }
#if (UNITY_IPHONE) && UNITY_EDITOR
            // no tracking on ios while in editor
            return;
#endif
            // update tracker status, translation and rotation
            int trackStatus;

            if (isTracking)
            {
                //currentOrientation = getDeviceOrientation();

                //// check if orientation or camera device changed
                //if (currentOrientation != Orientation || currentDevice != device)
                //{
                //	OpenCamera(currentOrientation, currentDevice, defaultCameraWidth, defaultCameraHeight, isMirrored);
                //	Orientation = currentOrientation;
                //	device = currentDevice;
                //	Frame = null;
                //}

                // grab new frame and start face tracking on it
                VisageTrackerNative._grabFrame();
                trackStatus = VisageTrackerNative._track();
                VisageTrackerNative._get3DData(out Translation.x, out Translation.y, out Translation.z, out Rotation.x, out Rotation.y, out Rotation.z);

                TrackerStatus = (TrackStatus)trackStatus;
                //	isTracking = TrackerStatus != 0;
            }

            // exit if no tracking
            if (TrackerStatus == TrackStatus.Off)
            {
                return;
            }

            // set correct camera field of view
            GetCameraInfo();

#if UNITY_ANDROID && !UNITY_EDITOR
            //waiting to get information about frame width and height
            if (ImageWidth == 0 || ImageHeight == 0)
            {
                return;
            }
#endif

            // update gaze direction
            float[] gazeDirection = new float[2];
            VisageTrackerNative._getGazeDirection(gazeDirection);
            GazeDirection = new Vector2(gazeDirection[0] * Mathf.Rad2Deg, gazeDirection[1] * Mathf.Rad2Deg);

            // get image
            //RefreshImage();

            // get action units
            if (ActionUnitsEnabled)
            {
                RefreshActionUnits();
            }
        }