Beispiel #1
0
    // returns current device orientation
    int GetDeviceOrientation()
    {
        int devOrientation;

        //Device orientation is obtained in AndroidCameraPlugin so we only need information about whether orientation is changed
#if UNITY_ANDROID
        int oldWidth  = ImageWidth;
        int oldHeight = ImageHeight;
        VisageTrackerNative._getCameraInfo(out Focus, out ImageWidth, out ImageHeight);

        if ((oldWidth != ImageWidth || oldHeight != ImageHeight) && ImageWidth != 0 && ImageHeight != 0 && oldWidth != 0 && oldHeight != 0)
        {
            devOrientation = (Orientation == 1) ? 0:1;
        }
        else
        {
            devOrientation = Orientation;
        }
#else
        if (Input.deviceOrientation == DeviceOrientation.Portrait)
        {
            devOrientation = 0;
        }
        else if (Input.deviceOrientation == DeviceOrientation.PortraitUpsideDown)
        {
            devOrientation = 2;
        }
        else if (Input.deviceOrientation == DeviceOrientation.LandscapeLeft)
        {
            devOrientation = 3;
        }
        else if (Input.deviceOrientation == DeviceOrientation.LandscapeRight)
        {
            devOrientation = 1;
        }
        else if (Input.deviceOrientation == DeviceOrientation.FaceUp)
        {
            devOrientation = Orientation;
        }
        else
        {
            devOrientation = 0;
        }
#endif

        return(devOrientation);
    }
Beispiel #2
0
    void GetCameraInfo()
    {
#if (UNITY_IPHONE || UNITY_ANDROID) && UNITY_EDITOR
        return;
#endif

        VisageTrackerNative._getCameraInfo(out Focus, out ImageWidth, out ImageHeight);
#if UNITY_ANDROID
        if (ImageWidth == 0 || ImageHeight == 0)
        {
            return;
        }
#endif
        // set camera field of view
        float aspect = ImageWidth / (float)ImageHeight;
        float yRange = (ImageWidth > ImageHeight) ? 1.0f : 1.0f / aspect;
        Camera.main.fieldOfView = Mathf.Rad2Deg * 2.0f * Mathf.Atan(yRange / Focus);
    }
Beispiel #3
0
    void Update()
    {
                #if (UNITY_IPHONE || UNITY_ANDROID) && UNITY_EDITOR
        // no tracking on ios while in editor
        return;
                #endif

        if (masterTrackingController.CurrentTracker != 0)
        {
            return;
        }

        if (isTracking)
        {
            // find device orientation
            currentOrientation = GetDeviceOrientation();

            // check if orientation or camera device changed
            if (currentOrientation != Orientation || currentDevice != device)
            {
                OpenCamera(currentOrientation, currentDevice, defaultCameraWidth, defaultCameraHeight, isMirrored);
                Orientation = currentOrientation;
                device      = currentDevice;
                texture     = null;
            }

            // grab current frame and start face tracking
            VisageTrackerNative._grabFrame();

            TrackerStatus = VisageTrackerNative._track();

            // update tracker status and translation and rotation
            VisageTrackerNative._get3DData(out Translation.x, out Translation.y, out Translation.z, out Rotation.x, out Rotation.y, out Rotation.z);

            this.transform.position = Translation;
            this.transform.rotation = Quaternion.Euler(Rotation);

            VisageTrackerNative._getCameraInfo(out Focus, out ImageWidth, out ImageHeight);

            float aspect = ImageWidth / (float)ImageHeight;

            float yRange = (ImageWidth > ImageHeight) ? 1.0f : 1.0f / aspect;

            Camera.main.fieldOfView = Mathf.Rad2Deg * 2.0f * Mathf.Atan(yRange / Focus);

            VisageTrackerNative._getFaceModel(out VertexNumber, vertices, out TriangleNumber, triangles, texCoords);

            // vertices
            if (Vertices.Length != VertexNumber)
            {
                Vertices = new Vector3[VertexNumber];
            }

            for (int i = 0; i < VertexNumber; i++)
            {
                Vertices [i] = new Vector3(vertices [i * 3 + 0], vertices [i * 3 + 1], vertices [i * 3 + 2]);
            }

            // triangles
            if (Triangles.Length != TriangleNumber)
            {
                Triangles = new int[TriangleNumber * 3];
            }

            for (int i = 0; i < TriangleNumber * 3; i++)
            {
                Triangles [i] = triangles [i];
            }

            // tex coords
            if (TexCoords.Length != VertexNumber)
            {
                TexCoords = new Vector2[VertexNumber];
            }

            for (int i = 0; i < VertexNumber; i++)
            {
                TexCoords[i] = new Vector2(modelTexCoords[i].x, modelTexCoords[i].y);                 //new Vector2 (texCoords [i * 2 + 0], texCoords [i * 2 + 1]);
            }

            // action unit values
            VisageTrackerNative._getActionUnitValues(values);
            ActionUnitValues = values;
        }

        RefreshImage();

        meshFilter.mesh.Clear();
    }
Beispiel #4
0
    void Update()
    {
#if (UNITY_IPHONE || UNITY_ANDROID) && UNITY_EDITOR
        // no tracking on ios while in editor
        return;
#endif
        if (TrackerStatus != 0)
        {
            // find device orientation
            currentOrientation = GetDeviceOrientation();

            // check if orientation or camera device changed
            if (currentOrientation != Orientation || currentDevice != device)
            {
                OpenCamera(currentOrientation, currentDevice);
                Orientation = currentOrientation;
                device      = currentDevice;
                texture     = null;
            }

            // grab current frame and start face tracking

            VisageTrackerNative._grabFrame();

            TrackerStatus = VisageTrackerNative._track();

            // update tracker status and translation and rotation
            VisageTrackerNative._get3DData(out Translation.x, out Translation.y, out Translation.z, out Rotation.x, out Rotation.y, out Rotation.z);


            for (int i = 0; i < ControllableObjects.Length; i++)
            {
                ControllableObjects[i].transform.position = startingPositions[i] + Translation;
                ControllableObjects[i].transform.rotation = Quaternion.Euler(startingRotations[i] + Rotation);
            }

            transform.position = Translation;
            transform.rotation = Quaternion.Euler(Rotation);

            VisageTrackerNative._getCameraInfo(out Focus, out ImageWidth, out ImageHeight);

            float aspect = ImageWidth / (float)ImageHeight;

            float yRange = (ImageWidth > ImageHeight) ? 1.0f : 1.0f / aspect;

            Camera.main.fieldOfView = Mathf.Rad2Deg * 2.0f * Mathf.Atan(yRange / Focus);

            VisageTrackerNative._getFaceModel(out VertexNumber, vertices, out TriangleNumber, triangles, texCoords);

            // vertices
            if (Vertices.Length != VertexNumber)
            {
                Vertices = new Vector3[VertexNumber];
            }

            for (int i = 0; i < VertexNumber; i++)
            {
                Vertices [i] = new Vector3(vertices [i * 3 + 0], vertices [i * 3 + 1], vertices [i * 3 + 2]);
            }

            // triangles
            if (Triangles.Length != TriangleNumber)
            {
                Triangles = new int[TriangleNumber * 3];
            }

            for (int i = 0; i < TriangleNumber * 3; i++)
            {
                Triangles [i] = triangles [i];
            }

            // tex coords
            if (TexCoords.Length != VertexNumber)
            {
                TexCoords = new Vector2[VertexNumber];
            }

            for (int i = 0; i < VertexNumber; i++)
            {
                TexCoords[i] = new Vector2(modelTexCoords[i].x, modelTexCoords[i].y);         //new Vector2 (texCoords [i * 2 + 0], texCoords [i * 2 + 1]);
            }
        }
        else
        {
            VisageTrackerNative._grabFrame();

            TrackerStatus = VisageTrackerNative._track();
        }

        RefreshImage();

        // create mesh

        meshFilter.mesh.Clear();
        if (currentEffect == FaceEffect.Tiger)
        {
            for (int i = 0; i < ControllableObjects.Length; i++)
            {
                ControllableObjects[i].transform.position -= new Vector3(0, 0, 10000);
            }

            meshFilter.mesh.vertices  = Vertices;
            meshFilter.mesh.triangles = Triangles;
            meshFilter.mesh.uv        = TexCoords;
            meshFilter.mesh.uv2       = TexCoords;
            ;
            meshFilter.mesh.RecalculateNormals();
            meshFilter.mesh.RecalculateBounds();
        }
    }