Beispiel #1
0
    /** This method initializes the tracker.
     */
    bool InitializeTracker(string config, string license)
    {
        Debug.Log("Visage Tracker: Initializing tracker with config: '" + config + "'");

#if (UNITY_IPHONE || UNITY_ANDROID) && UNITY_EDITOR
        return(false);
#endif

#if UNITY_ANDROID
        Shader shader = Shader.Find("Unlit/Texture");
        CameraViewMaterial.shader = shader;

        // initialize visage vision
        VisageTrackerNative._loadVisageVision();
        Unzip();

        AndroidJavaClass unity = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
        this.androidCameraActivity = unity.GetStatic <AndroidJavaObject>("currentActivity");
#else
        Shader shader = Shader.Find("Custom/BGRATex");
        CameraViewMaterial.shader = shader;
#endif
        // initialize tracker
        VisageTrackerNative._initTracker(config, license);

        return(true);
    }
Beispiel #2
0
    void RefreshImage()
    {
        // create texture
        if (texture == null && isTracking)
        {
            TexWidth  = Convert.ToInt32(Math.Pow(2.0, Math.Ceiling(Math.Log(ImageWidth) / Math.Log(2.0))));
            TexHeight = Convert.ToInt32(Math.Pow(2.0, Math.Ceiling(Math.Log(ImageHeight) / Math.Log(2.0))));

            texture = new Texture2D(TexWidth, TexHeight, TextureFormat.RGBA32, false);

            var cols = texture.GetPixels32();
            for (var i = 0; i < cols.Length; i++)
            {
                cols[i] = Color.black;
            }

            texture.SetPixels32(cols);
            texture.Apply(false);
        }

        if (texture != null && isTracking && TrackerStatus != 0)
        {
            CameraViewMaterial.SetTexture("_MainTex", texture);

            if (SystemInfo.graphicsDeviceVersion.StartsWith("Metal"))
            {
                VisageTrackerNative._bindTextureMetal(texture.GetNativeTexturePtr());
            }
            else
            {
                VisageTrackerNative._bindTexture((int)texture.GetNativeTexturePtr());
            }
        }
    }
Beispiel #3
0
    private void RefreshActionUnits()
    {
        // initialize action units
        if (ActionUnitCount == 0)
        {
            ActionUnitCount = VisageTrackerNative._getActionUnitCount();
            ActionUnits     = new ActionUnitData[ActionUnitCount];
            for (int actionUnitIndex = 0; actionUnitIndex < ActionUnitCount; actionUnitIndex++)
            {
                string         name           = Marshal.PtrToStringAnsi(VisageTrackerNative._getActionUnitName(actionUnitIndex));
                bool           used           = VisageTrackerNative._getActionUnitUsed(actionUnitIndex);
                ActionUnitData actionUnitData = new ActionUnitData(actionUnitIndex, name, used);
                ActionUnits [actionUnitIndex] = actionUnitData;
            }
        }

        // get action unit values
        if (ActionUnitCount > 0)
        {
            float[] values = new float[ActionUnitCount];
            VisageTrackerNative._getActionUnitValues(values);
            for (int actionUnitIndex = 0; actionUnitIndex < ActionUnitCount; actionUnitIndex++)
            {
                ActionUnits [actionUnitIndex].Value = values [actionUnitIndex];
            }
        }
    }
Beispiel #4
0
    void OnDestroy()
    {
#if UNITY_ANDROID
        this.androidCameraActivity.Call("closeCamera");
#else
        VisageTrackerNative._closeCamera();
#endif
    }
Beispiel #5
0
    void RefreshImage()
    {
        // create texture
        if (texture == null && isTracking)
        {
            TexWidth  = Convert.ToInt32(Math.Pow(2.0, Math.Ceiling(Math.Log(ImageWidth) / Math.Log(2.0))));
            TexHeight = Convert.ToInt32(Math.Pow(2.0, Math.Ceiling(Math.Log(ImageHeight) / Math.Log(2.0))));
#if UNITY_ANDROID
            texture = new Texture2D(TexWidth, TexHeight, TextureFormat.RGB24, false);
#else
            texture = new Texture2D(TexWidth, TexHeight, TextureFormat.RGBA32, false);
#endif

            var cols = texture.GetPixels32();
            for (var i = 0; i < cols.Length; i++)
            {
                cols[i] = UnityEngine.Color.black;
            }

            texture.SetPixels32(cols);
            texture.Apply(false);



#if UNITY_STANDALONE_WIN
            // "pin" the pixel array in memory, so we can pass direct pointer to it's data to the plugin,
            // without costly marshaling of array of structures.
            texturePixels       = texture.GetPixels32(0);
            texturePixelsHandle = GCHandle.Alloc(texturePixels, GCHandleType.Pinned);
#endif
        }

        if (texture != null && isTracking && TrackerStatus != 0)
        {
#if UNITY_STANDALONE_WIN
            CameraViewMaterial.SetTexture("_MainTex", texture);

            // send memory address of textures' pixel data to VisageTrackerUnityPlugin
            VisageTrackerNative._setFrameData(texturePixelsHandle.AddrOfPinnedObject());
            texture.SetPixels32(texturePixels, 0);
            texture.Apply();
#elif UNITY_IPHONE || UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX || UNITY_ANDROID
            CameraViewMaterial.SetTexture("_MainTex", texture);


            if (SystemInfo.graphicsDeviceVersion.StartsWith("Metal"))
            {
                VisageTrackerNative._bindTextureMetal(texture.GetNativeTexturePtr());
            }
            else
            {
                VisageTrackerNative._bindTexture((int)texture.GetNativeTexturePtr());
            }
#endif
        }
    }
Beispiel #6
0
    public bool InitializeTracker(string config, string license)
    {
        Debug.Log("Visage Tracker: Initializing tracker with config: '" + config + "'");

                #if (UNITY_IPHONE || UNITY_ANDROID) && UNITY_EDITOR
        return(false);
                #endif

        Shader shader = Shader.Find("Custom/BGRATex");
        CameraViewMaterial.shader = shader;

        // initialize tracker
        VisageTrackerNative._initTracker(config, license);
        return(true);
    }
Beispiel #7
0
    float[] GetFeaturePoints(int[] points)
    {
        int number = points.Length / 2;

        int[] groups  = new int[number];
        int[] indices = new int[number];
        for (int i = 0; i < number; i++)
        {
            groups [i]  = points [i * 2];
            indices [i] = points [i * 2 + 1];
        }

        float[] positions = new float[number * 2];
        VisageTrackerNative._getFeaturePoints2D(number, groups, indices, positions);
        return(positions);
    }
Beispiel #8
0
    // returns current device orientation
    int GetDeviceOrientation()
    {
        int devOrientation;

        //Device orientation is obtained in AndroidCameraPlugin so we only need information about whether orientation is changed
#if UNITY_ANDROID
        int oldWidth  = ImageWidth;
        int oldHeight = ImageHeight;
        VisageTrackerNative._getCameraInfo(out Focus, out ImageWidth, out ImageHeight);

        if ((oldWidth != ImageWidth || oldHeight != ImageHeight) && ImageWidth != 0 && ImageHeight != 0 && oldWidth != 0 && oldHeight != 0)
        {
            devOrientation = (Orientation == 1) ? 0:1;
        }
        else
        {
            devOrientation = Orientation;
        }
#else
        if (Input.deviceOrientation == DeviceOrientation.Portrait)
        {
            devOrientation = 0;
        }
        else if (Input.deviceOrientation == DeviceOrientation.PortraitUpsideDown)
        {
            devOrientation = 2;
        }
        else if (Input.deviceOrientation == DeviceOrientation.LandscapeLeft)
        {
            devOrientation = 3;
        }
        else if (Input.deviceOrientation == DeviceOrientation.LandscapeRight)
        {
            devOrientation = 1;
        }
        else if (Input.deviceOrientation == DeviceOrientation.FaceUp)
        {
            devOrientation = Orientation;
        }
        else
        {
            devOrientation = 0;
        }
#endif

        return(devOrientation);
    }
Beispiel #9
0
    void OpenCamera(int orientation, int currDevice)
    {
#if UNITY_ANDROID
        if (device == currDevice && AppStarted)
        {
            return;
        }
        this.androidCameraActivity.Call("closeCamera");
        ImWidth  = 320;
        ImHeight = 240;

        this.androidCameraActivity.Call("GrabFromCamera", ImWidth, ImHeight, currDevice);
        AppStarted = true;
#else
        VisageTrackerNative._openCamera(orientation, currDevice);
#endif
    }
Beispiel #10
0
 // if width and height are -1, values will be set internally
 void OpenCamera(int orientation, int currDevice, int width, int height, int mirrored)
 {
             #if UNITY_ANDROID
     if (device == currDevice && AppStarted)
     {
         return;
     }
     //camera needs to be opened on main thread
     this.androidCameraActivity.Call("runOnUiThread", new AndroidJavaRunnable(() => {
         this.androidCameraActivity.Call("closeCamera");
         this.androidCameraActivity.Call("GrabFromCamera", width, height, currDevice);
     }));
     AppStarted = true;
             #else
     VisageTrackerNative._openCamera(orientation, currDevice, width, height, mirrored);
             #endif
 }
Beispiel #11
0
    void GetCameraInfo()
    {
#if (UNITY_IPHONE || UNITY_ANDROID) && UNITY_EDITOR
        return;
#endif

        VisageTrackerNative._getCameraInfo(out Focus, out ImageWidth, out ImageHeight);
#if UNITY_ANDROID
        if (ImageWidth == 0 || ImageHeight == 0)
        {
            return;
        }
#endif
        // set camera field of view
        float aspect = ImageWidth / (float)ImageHeight;
        float yRange = (ImageWidth > ImageHeight) ? 1.0f : 1.0f / aspect;
        Camera.main.fieldOfView = Mathf.Rad2Deg * 2.0f * Mathf.Atan(yRange / Focus);
    }
Beispiel #12
0
    void RefreshImage()
    {
#if (UNITY_IPHONE || UNITY_ANDROID) && UNITY_EDITOR
        return;
#endif

        // create texture
        if (Frame == null && isTracking)
        {
            TexWidth  = Convert.ToInt32(Math.Pow(2.0, Math.Ceiling(Math.Log(ImageWidth) / Math.Log(2.0))));
            TexHeight = Convert.ToInt32(Math.Pow(2.0, Math.Ceiling(Math.Log(ImageHeight) / Math.Log(2.0))));
#if UNITY_ANDROID
            Frame = new Texture2D(TexWidth, TexHeight, TextureFormat.RGB24, false);
#else
            Frame = new Texture2D(TexWidth, TexHeight, TextureFormat.RGBA32, false);
#endif

#if UNITY_STANDALONE_WIN
            // "pin" the pixel array in memory, so we can pass direct pointer to it's data to the plugin,
            // without costly marshaling of array of structures.
            texturePixels       = ((Texture2D)Frame).GetPixels32(0);
            texturePixelsHandle = GCHandle.Alloc(texturePixels, GCHandleType.Pinned);
#endif
        }
        if (Frame != null && isTracking)
        {
#if UNITY_STANDALONE_WIN
            // send memory address of textures' pixel data to VisageTrackerUnityPlugin
            VisageTrackerNative._setFrameData(texturePixelsHandle.AddrOfPinnedObject());
            ((Texture2D)Frame).SetPixels32(texturePixels, 0);
            ((Texture2D)Frame).Apply();
#elif UNITY_IPHONE || UNITY_EDITOR_OSX || UNITY_STANDALONE_OSX || UNITY_ANDROID
            if (SystemInfo.graphicsDeviceVersion.StartsWith("Metal"))
            {
                VisageTrackerNative._bindTextureMetal(Frame.GetNativeTexturePtr());
            }
            else
            {
                VisageTrackerNative._bindTexture((int)Frame.GetNativeTexturePtr());
            }
#endif
        }
    }
Beispiel #13
0
    /** This method initializes the tracker.
     */
    bool InitializeTracker(string config, string license)
    {
        Debug.Log("Visage Tracker: Initializing tracker with config: '" + config + "'");

                #if (UNITY_IPHONE || UNITY_ANDROID) && UNITY_EDITOR
        return(false);
                #endif

                #if UNITY_ANDROID
        // initialize visage vision
        VisageTrackerNative._loadVisageVision();
        Unzip();

        unity = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
        this.androidCameraActivity = unity.GetStatic <AndroidJavaObject>("currentActivity");
        #endif
        // initialize tracker
        VisageTrackerNative._initTracker(config, license);
        return(true);
    }
 // Update is called once per frame
 void Update()
 {
     if (tracking)
     {
         if (gameObject.GetComponent <Tracker>().TrackerStatus != 0)
         {
             ///////////////////////////////////////////////////////////////////////////////////////////////////////
             // Emotions are acqured here! Be extremely careful about making any changes to these two lines!
             // The entire emotion system depends on these working successfully.
             IntPtr nums = VisageTrackerNative._getEmotions();
             // Due to cross language type mangling, we have to marshal the result into something we can use.
             Marshal.Copy(nums, probs, 0, 6);
             ///////////////////////////////////////////////////////////////////////////////////////////////////////
             FileManagement.dump(probs);
             if (probs[0] != -9999) // -9999 is the "error" code from the plugin.
             {
                 for (int i = 0; i < probs.Length; i++)
                 {
                     // Truncate to three places for easier display
                     probs[i] = (float)Math.Truncate(1000 * probs[i]) / 1000;
                 }
                 haveRecentData = true;
             }
             else
             {
                 haveRecentData = false;
                 currentState   = AffectiveStates.None;
             }
         }
         else
         {
             haveRecentData = false;
         }
         // If we have gotten recent data, and are not waiting for the next wave, analyse it.
         if (haveRecentData && !waitForWave)
         {
             analyzeData();
         }
     }
 }
Beispiel #15
0
    /** This method is called every frame.
     *
     * It fetches the tracking data from the tracker and transforms controlled objects accordingly.
     * It also fetches vertex, triangle and texture coordinate data to generate 3D face model from the tracker.
     * And lastly it refreshes the video frame texture with the new frame data.
     *
     */
    void Update()
    {
        if (Input.GetKeyDown(KeyCode.Escape))
        {
            Application.Quit();
        }

#if (UNITY_IPHONE || UNITY_ANDROID) && UNITY_EDITOR
        // no tracking on ios while in editor
        return;
#endif


        // update tracker status, translation and rotation
        int trackStatus;

        if (isTracking)
        {
            currentOrientation = getDeviceOrientation();

            // check if orientation or camera device changed
            if (currentOrientation != Orientation || currentDevice != device)
            {
                OpenCamera(currentOrientation, currentDevice, defaultCameraWidth, defaultCameraHeight, isMirrored);
                Orientation = currentOrientation;
                device      = currentDevice;
                Frame       = null;
            }

            // grab new frame and start face tracking on it
            VisageTrackerNative._grabFrame();
            trackStatus = VisageTrackerNative._track();
            VisageTrackerNative._get3DData(out Translation.x, out Translation.y, out Translation.z, out Rotation.x, out Rotation.y, out Rotation.z);


            TrackerStatus = (TrackStatus)trackStatus;
            //	isTracking = TrackerStatus != 0;
        }

        // exit if no tracking
        if (TrackerStatus == TrackStatus.Off)
        {
            return;
        }

        // set correct camera field of view
        GetCameraInfo();
#if UNITY_ANDROID
        //waiting to get information about frame width and height
        if (ImageWidth == 0 || ImageHeight == 0)
        {
            return;
        }
#endif

        // update gaze direction
        float[] gazeDirection = new float[2];
        VisageTrackerNative._getGazeDirection(gazeDirection);
        GazeDirection = new Vector2(gazeDirection [0] * Mathf.Rad2Deg, gazeDirection [1] * Mathf.Rad2Deg);

        // get image
        RefreshImage();

        // get action units
        if (ActionUnitsEnabled)
        {
            RefreshActionUnits();
        }
    }
Beispiel #16
0
 public void CloseCamera()
 {
     VisageTrackerNative._closeCamera();
 }
Beispiel #17
0
 // if width and height are -1, values will be set internally
 void OpenCamera(int orientation, int currDevice, int width, int height, int mirrored)
 {
     VisageTrackerNative._openCamera(orientation, currDevice, width, height, mirrored);
 }
Beispiel #18
0
    void Update()
    {
#if (UNITY_IPHONE || UNITY_ANDROID) && UNITY_EDITOR
        // no tracking on ios while in editor
        return;
#endif
        if (TrackerStatus != 0)
        {
            // find device orientation
            currentOrientation = GetDeviceOrientation();

            // check if orientation or camera device changed
            if (currentOrientation != Orientation || currentDevice != device)
            {
                OpenCamera(currentOrientation, currentDevice);
                Orientation = currentOrientation;
                device      = currentDevice;
                texture     = null;
            }

            // grab current frame and start face tracking

            VisageTrackerNative._grabFrame();

            TrackerStatus = VisageTrackerNative._track();

            // update tracker status and translation and rotation
            VisageTrackerNative._get3DData(out Translation.x, out Translation.y, out Translation.z, out Rotation.x, out Rotation.y, out Rotation.z);


            for (int i = 0; i < ControllableObjects.Length; i++)
            {
                ControllableObjects[i].transform.position = startingPositions[i] + Translation;
                ControllableObjects[i].transform.rotation = Quaternion.Euler(startingRotations[i] + Rotation);
            }

            transform.position = Translation;
            transform.rotation = Quaternion.Euler(Rotation);

            VisageTrackerNative._getCameraInfo(out Focus, out ImageWidth, out ImageHeight);

            float aspect = ImageWidth / (float)ImageHeight;

            float yRange = (ImageWidth > ImageHeight) ? 1.0f : 1.0f / aspect;

            Camera.main.fieldOfView = Mathf.Rad2Deg * 2.0f * Mathf.Atan(yRange / Focus);

            VisageTrackerNative._getFaceModel(out VertexNumber, vertices, out TriangleNumber, triangles, texCoords);

            // vertices
            if (Vertices.Length != VertexNumber)
            {
                Vertices = new Vector3[VertexNumber];
            }

            for (int i = 0; i < VertexNumber; i++)
            {
                Vertices [i] = new Vector3(vertices [i * 3 + 0], vertices [i * 3 + 1], vertices [i * 3 + 2]);
            }

            // triangles
            if (Triangles.Length != TriangleNumber)
            {
                Triangles = new int[TriangleNumber * 3];
            }

            for (int i = 0; i < TriangleNumber * 3; i++)
            {
                Triangles [i] = triangles [i];
            }

            // tex coords
            if (TexCoords.Length != VertexNumber)
            {
                TexCoords = new Vector2[VertexNumber];
            }

            for (int i = 0; i < VertexNumber; i++)
            {
                TexCoords[i] = new Vector2(modelTexCoords[i].x, modelTexCoords[i].y);         //new Vector2 (texCoords [i * 2 + 0], texCoords [i * 2 + 1]);
            }
        }
        else
        {
            VisageTrackerNative._grabFrame();

            TrackerStatus = VisageTrackerNative._track();
        }

        RefreshImage();

        // create mesh

        meshFilter.mesh.Clear();
        if (currentEffect == FaceEffect.Tiger)
        {
            for (int i = 0; i < ControllableObjects.Length; i++)
            {
                ControllableObjects[i].transform.position -= new Vector3(0, 0, 10000);
            }

            meshFilter.mesh.vertices  = Vertices;
            meshFilter.mesh.triangles = Triangles;
            meshFilter.mesh.uv        = TexCoords;
            meshFilter.mesh.uv2       = TexCoords;
            ;
            meshFilter.mesh.RecalculateNormals();
            meshFilter.mesh.RecalculateBounds();
        }
    }
Beispiel #19
0
    void Update()
    {
                #if (UNITY_IPHONE || UNITY_ANDROID) && UNITY_EDITOR
        // no tracking on ios while in editor
        return;
                #endif

        if (masterTrackingController.CurrentTracker != 0)
        {
            return;
        }

        if (isTracking)
        {
            // find device orientation
            currentOrientation = GetDeviceOrientation();

            // check if orientation or camera device changed
            if (currentOrientation != Orientation || currentDevice != device)
            {
                OpenCamera(currentOrientation, currentDevice, defaultCameraWidth, defaultCameraHeight, isMirrored);
                Orientation = currentOrientation;
                device      = currentDevice;
                texture     = null;
            }

            // grab current frame and start face tracking
            VisageTrackerNative._grabFrame();

            TrackerStatus = VisageTrackerNative._track();

            // update tracker status and translation and rotation
            VisageTrackerNative._get3DData(out Translation.x, out Translation.y, out Translation.z, out Rotation.x, out Rotation.y, out Rotation.z);

            this.transform.position = Translation;
            this.transform.rotation = Quaternion.Euler(Rotation);

            VisageTrackerNative._getCameraInfo(out Focus, out ImageWidth, out ImageHeight);

            float aspect = ImageWidth / (float)ImageHeight;

            float yRange = (ImageWidth > ImageHeight) ? 1.0f : 1.0f / aspect;

            Camera.main.fieldOfView = Mathf.Rad2Deg * 2.0f * Mathf.Atan(yRange / Focus);

            VisageTrackerNative._getFaceModel(out VertexNumber, vertices, out TriangleNumber, triangles, texCoords);

            // vertices
            if (Vertices.Length != VertexNumber)
            {
                Vertices = new Vector3[VertexNumber];
            }

            for (int i = 0; i < VertexNumber; i++)
            {
                Vertices [i] = new Vector3(vertices [i * 3 + 0], vertices [i * 3 + 1], vertices [i * 3 + 2]);
            }

            // triangles
            if (Triangles.Length != TriangleNumber)
            {
                Triangles = new int[TriangleNumber * 3];
            }

            for (int i = 0; i < TriangleNumber * 3; i++)
            {
                Triangles [i] = triangles [i];
            }

            // tex coords
            if (TexCoords.Length != VertexNumber)
            {
                TexCoords = new Vector2[VertexNumber];
            }

            for (int i = 0; i < VertexNumber; i++)
            {
                TexCoords[i] = new Vector2(modelTexCoords[i].x, modelTexCoords[i].y);                 //new Vector2 (texCoords [i * 2 + 0], texCoords [i * 2 + 1]);
            }

            // action unit values
            VisageTrackerNative._getActionUnitValues(values);
            ActionUnitValues = values;
        }

        RefreshImage();

        meshFilter.mesh.Clear();
    }
 // Release camera on exit. Make sure you do this, or else Unity usually crashes when you start the game again!
 void OnApplicationQuit()
 {
     VisageTrackerNative._freeCamera();
 }