Beispiel #1
0
    void Update()
    {
#if (UNITY_IPHONE || UNITY_ANDROID) && UNITY_EDITOR
        // no tracking on ios while in editor
        return;
#endif
        if (TrackerStatus != 0)
        {
            // find device orientation
            currentOrientation = GetDeviceOrientation();

            // check if orientation or camera device changed
            if (currentOrientation != Orientation || currentDevice != device)
            {
                OpenCamera(currentOrientation, currentDevice);
                Orientation = currentOrientation;
                device      = currentDevice;
                texture     = null;
            }

            // grab current frame and start face tracking

            VisageTrackerNative._grabFrame();

            TrackerStatus = VisageTrackerNative._track();

            // update tracker status and translation and rotation
            VisageTrackerNative._get3DData(out Translation.x, out Translation.y, out Translation.z, out Rotation.x, out Rotation.y, out Rotation.z);


            for (int i = 0; i < ControllableObjects.Length; i++)
            {
                ControllableObjects[i].transform.position = startingPositions[i] + Translation;
                ControllableObjects[i].transform.rotation = Quaternion.Euler(startingRotations[i] + Rotation);
            }

            transform.position = Translation;
            transform.rotation = Quaternion.Euler(Rotation);

            VisageTrackerNative._getCameraInfo(out Focus, out ImageWidth, out ImageHeight);

            float aspect = ImageWidth / (float)ImageHeight;

            float yRange = (ImageWidth > ImageHeight) ? 1.0f : 1.0f / aspect;

            Camera.main.fieldOfView = Mathf.Rad2Deg * 2.0f * Mathf.Atan(yRange / Focus);

            VisageTrackerNative._getFaceModel(out VertexNumber, vertices, out TriangleNumber, triangles, texCoords);

            // vertices
            if (Vertices.Length != VertexNumber)
            {
                Vertices = new Vector3[VertexNumber];
            }

            for (int i = 0; i < VertexNumber; i++)
            {
                Vertices [i] = new Vector3(vertices [i * 3 + 0], vertices [i * 3 + 1], vertices [i * 3 + 2]);
            }

            // triangles
            if (Triangles.Length != TriangleNumber)
            {
                Triangles = new int[TriangleNumber * 3];
            }

            for (int i = 0; i < TriangleNumber * 3; i++)
            {
                Triangles [i] = triangles [i];
            }

            // tex coords
            if (TexCoords.Length != VertexNumber)
            {
                TexCoords = new Vector2[VertexNumber];
            }

            for (int i = 0; i < VertexNumber; i++)
            {
                TexCoords[i] = new Vector2(modelTexCoords[i].x, modelTexCoords[i].y);         //new Vector2 (texCoords [i * 2 + 0], texCoords [i * 2 + 1]);
            }
        }
        else
        {
            VisageTrackerNative._grabFrame();

            TrackerStatus = VisageTrackerNative._track();
        }

        RefreshImage();

        // create mesh

        meshFilter.mesh.Clear();
        if (currentEffect == FaceEffect.Tiger)
        {
            for (int i = 0; i < ControllableObjects.Length; i++)
            {
                ControllableObjects[i].transform.position -= new Vector3(0, 0, 10000);
            }

            meshFilter.mesh.vertices  = Vertices;
            meshFilter.mesh.triangles = Triangles;
            meshFilter.mesh.uv        = TexCoords;
            meshFilter.mesh.uv2       = TexCoords;
            ;
            meshFilter.mesh.RecalculateNormals();
            meshFilter.mesh.RecalculateBounds();
        }
    }
Beispiel #2
0
    void Update()
    {
                #if (UNITY_IPHONE || UNITY_ANDROID) && UNITY_EDITOR
        // no tracking on ios while in editor
        return;
                #endif

        if (masterTrackingController.CurrentTracker != 0)
        {
            return;
        }

        if (isTracking)
        {
            // find device orientation
            currentOrientation = GetDeviceOrientation();

            // check if orientation or camera device changed
            if (currentOrientation != Orientation || currentDevice != device)
            {
                OpenCamera(currentOrientation, currentDevice, defaultCameraWidth, defaultCameraHeight, isMirrored);
                Orientation = currentOrientation;
                device      = currentDevice;
                texture     = null;
            }

            // grab current frame and start face tracking
            VisageTrackerNative._grabFrame();

            TrackerStatus = VisageTrackerNative._track();

            // update tracker status and translation and rotation
            VisageTrackerNative._get3DData(out Translation.x, out Translation.y, out Translation.z, out Rotation.x, out Rotation.y, out Rotation.z);

            this.transform.position = Translation;
            this.transform.rotation = Quaternion.Euler(Rotation);

            VisageTrackerNative._getCameraInfo(out Focus, out ImageWidth, out ImageHeight);

            float aspect = ImageWidth / (float)ImageHeight;

            float yRange = (ImageWidth > ImageHeight) ? 1.0f : 1.0f / aspect;

            Camera.main.fieldOfView = Mathf.Rad2Deg * 2.0f * Mathf.Atan(yRange / Focus);

            VisageTrackerNative._getFaceModel(out VertexNumber, vertices, out TriangleNumber, triangles, texCoords);

            // vertices
            if (Vertices.Length != VertexNumber)
            {
                Vertices = new Vector3[VertexNumber];
            }

            for (int i = 0; i < VertexNumber; i++)
            {
                Vertices [i] = new Vector3(vertices [i * 3 + 0], vertices [i * 3 + 1], vertices [i * 3 + 2]);
            }

            // triangles
            if (Triangles.Length != TriangleNumber)
            {
                Triangles = new int[TriangleNumber * 3];
            }

            for (int i = 0; i < TriangleNumber * 3; i++)
            {
                Triangles [i] = triangles [i];
            }

            // tex coords
            if (TexCoords.Length != VertexNumber)
            {
                TexCoords = new Vector2[VertexNumber];
            }

            for (int i = 0; i < VertexNumber; i++)
            {
                TexCoords[i] = new Vector2(modelTexCoords[i].x, modelTexCoords[i].y);                 //new Vector2 (texCoords [i * 2 + 0], texCoords [i * 2 + 1]);
            }

            // action unit values
            VisageTrackerNative._getActionUnitValues(values);
            ActionUnitValues = values;
        }

        RefreshImage();

        meshFilter.mesh.Clear();
    }
Beispiel #3
0
    /** This method is called every frame.
     *
     * It fetches the tracking data from the tracker and transforms controlled objects accordingly.
     * It also fetches vertex, triangle and texture coordinate data to generate 3D face model from the tracker.
     * And lastly it refreshes the video frame texture with the new frame data.
     *
     */
    void Update()
    {
        if (Input.GetKeyDown(KeyCode.Escape))
        {
            Application.Quit();
        }

#if (UNITY_IPHONE || UNITY_ANDROID) && UNITY_EDITOR
        // no tracking on ios while in editor
        return;
#endif


        // update tracker status, translation and rotation
        int trackStatus;

        if (isTracking)
        {
            currentOrientation = getDeviceOrientation();

            // check if orientation or camera device changed
            if (currentOrientation != Orientation || currentDevice != device)
            {
                OpenCamera(currentOrientation, currentDevice, defaultCameraWidth, defaultCameraHeight, isMirrored);
                Orientation = currentOrientation;
                device      = currentDevice;
                Frame       = null;
            }

            // grab new frame and start face tracking on it
            VisageTrackerNative._grabFrame();
            trackStatus = VisageTrackerNative._track();
            VisageTrackerNative._get3DData(out Translation.x, out Translation.y, out Translation.z, out Rotation.x, out Rotation.y, out Rotation.z);


            TrackerStatus = (TrackStatus)trackStatus;
            //	isTracking = TrackerStatus != 0;
        }

        // exit if no tracking
        if (TrackerStatus == TrackStatus.Off)
        {
            return;
        }

        // set correct camera field of view
        GetCameraInfo();
#if UNITY_ANDROID
        //waiting to get information about frame width and height
        if (ImageWidth == 0 || ImageHeight == 0)
        {
            return;
        }
#endif

        // update gaze direction
        float[] gazeDirection = new float[2];
        VisageTrackerNative._getGazeDirection(gazeDirection);
        GazeDirection = new Vector2(gazeDirection [0] * Mathf.Rad2Deg, gazeDirection [1] * Mathf.Rad2Deg);

        // get image
        RefreshImage();

        // get action units
        if (ActionUnitsEnabled)
        {
            RefreshActionUnits();
        }
    }