private void UpdateMessageData(StringBuilder sb, float fpsResult, ARFace face)
        {
            sb.Append("FPS= ").Append(fpsResult).Append(System.Environment.NewLine);
            ARPose pose = face.Pose;

            if (pose != null)
            {
                sb.Append("face pose information:");
                sb.Append("face pose tx:[").Append(pose.Tx()).Append("]").Append(System.Environment.NewLine);
                sb.Append("face pose ty:[").Append(pose.Ty()).Append("]").Append(System.Environment.NewLine);
                sb.Append("face pose tz:[").Append(pose.Tz()).Append("]").Append(System.Environment.NewLine);
                sb.Append("face pose qx:[").Append(pose.Qx()).Append("]").Append(System.Environment.NewLine);
                sb.Append("face pose qy:[").Append(pose.Qy()).Append("]").Append(System.Environment.NewLine);
                sb.Append("face pose qz:[").Append(pose.Qz()).Append("]").Append(System.Environment.NewLine);
                sb.Append("face pose qw:[").Append(pose.Qw()).Append("]").Append(System.Environment.NewLine);
            }
            sb.Append(System.Environment.NewLine);

            float[] textureCoordinates = new float[face.FaceGeometry.TextureCoordinates.Capacity()];
            for (int i = 0; i == face.FaceGeometry.TextureCoordinates.Capacity(); i++)
            {
                textureCoordinates[i] = face.FaceGeometry.TextureCoordinates.Get(i);
            }
            sb.Append("textureCoordinates length:[ ").Append(textureCoordinates.Length).Append(" ]");
        }
예제 #2
0
    // Start is called before the first frame update


    void Awake()
    {
        arFace        = GetComponent <ARFace>();
        arFaceManager = FindObjectOfType <ARFaceManager>();

        Debug.Log("Instantiated debug objects");
    }
    void Awake()
    {
#if UNITY_IOS
        s_FaceArkitBlendShapeCoefficients = new List <XRFaceArkitBlendShapeCoefficient>();
#endif
        m_Face = GetComponent <ARFace>();
        CreateFeatureBlendMapping();
    }
        internal static void ToARFoundationFace(this ARFace face, XRFaceSubsystem xrFaceSubsystem, ref ARFoundationFace arFoundationFace)
        {
            if (arFoundationFace == null)
            {
                arFoundationFace = new ARFoundationFace(face.trackableId.ToMarsId());
            }

            arFoundationFace.pose = face.transform.GetWorldPose();

            var indices = face.indices;

            if (indices.Length > 0)
            {
                k_Vector3Buffer.Clear();
                foreach (var vertex in face.vertices)
                {
                    k_Vector3Buffer.Add(vertex);
                }

                arFoundationFace.Mesh.SetVertices(k_Vector3Buffer);

                k_Vector3Buffer.Clear();
                foreach (var normal in face.normals)
                {
                    k_Vector3Buffer.Add(normal);
                }

                arFoundationFace.Mesh.SetNormals(k_Vector3Buffer);

                k_Vector2Buffer.Clear();
                foreach (var uv in face.uvs)
                {
                    k_Vector2Buffer.Add(uv);
                }

                arFoundationFace.Mesh.SetUVs(0, k_Vector2Buffer);
                k_IntBuffer.Clear();
                foreach (var index in indices)
                {
                    k_IntBuffer.Add(index);
                }

                arFoundationFace.Mesh.SetTriangles(k_IntBuffer, 0);

#if !UNITY_EDITOR
#if UNITY_IOS && INCLUDE_ARKIT_FACE_PLUGIN
                // For iOS, we use ARKit Face Blendshapes to determine expressions
                arFoundationFace.GenerateLandmarks();
                arFoundationFace.CalculateExpressions(xrFaceSubsystem, face.trackableId);
#elif UNITY_ANDROID
                // For Android, we use the position of the face landmarks to determine expressions
                arFoundationFace.GenerateLandmarks();
                arFoundationFace.CalculateExpressions(ARCoreFaceLandmarksExtensions.LandmarkPositions);
#endif
#endif
            }
        }
예제 #5
0
        void FaceUpdated(ARFace anchorData)
        {
            var anchorTransform = anchorData.transform;

            m_FacePose.position = anchorTransform.localPosition;
            m_FacePose.rotation = anchorTransform.localRotation;

            UpdateBlendShapes(anchorData);
        }
예제 #6
0
        ARFoundationFace GetOrAddFace(ARFace arFace)
        {
            var trackableId = arFace.trackableId;

            m_TrackedFaces.TryGetValue(trackableId, out var arFoundationFace);
            arFace.ToARFoundationFace(m_ARFaceManager.subsystem, ref arFoundationFace);
            m_TrackedFaces[trackableId] = arFoundationFace;
            return(arFoundationFace);
        }
        /// <summary>
        /// Update the face geometric data in the buffer.
        /// This method is called when FaceRenderManager's OnDrawFrame method calling.
        /// </summary>
        /// <param name="camera">ARCamera</param>
        /// <param name="face">ARFace</param>
        public void OnDrawFrame(ARCamera camera, ARFace face)
        {
            ARFaceGeometry faceGeometry = face.FaceGeometry;

            UpdateFaceGeometryData(faceGeometry);
            UpdateModelViewProjectionData(camera, face);
            DrawFaceGeometry();
            faceGeometry.Release();
        }
예제 #8
0
    protected virtual void Awake()
    {
        if (Instance == null)
        {
            Instance = this;
        }

        ARFaceComponent = GetComponent <ARFace>();
    }
예제 #9
0
 void Awake()
 {
     arFace        = GetComponent <ARFace>();
     arFaceManager = FindObjectOfType <ARFaceManager>();
     // forehead = Instantiate(debugObject, transform).transform;
     // nose = Instantiate(debugObject, transform).transform;
     // head = GameObject.CreatePrimitive(PrimitiveType.Sphere).transform;
     // head.SetParent(transform, false);
     Debug.Log("Instantiated debug objects");
 }
예제 #10
0
        void FaceRemoved(ARFace anchorData)
        {
            // TODO: fix edge cases for multiple faces
            if (faceAnchor == anchorData.transform)
            {
                faceAnchor = null;
            }

            m_ARFaceActive = false;
        }
예제 #11
0
        public ARTrackable ARTrackableFactory(IntPtr nativeHandle, bool isCreate = false)
        {
            if (nativeHandle == IntPtr.Zero)
            {
                return(null);
            }
            ARTrackable result;

            if (m_trackableDict.TryGetValue(nativeHandle, out result))
            {
                m_ndkSession.TrackableAdapter.Release(nativeHandle);
                return(result);
            }
            if (isCreate)
            {
                NDKARTrackableType ndkTrackableType = m_ndkSession.TrackableAdapter.GetType(nativeHandle);
                ARDebug.LogInfo("trackable type {0}", ndkTrackableType.ToString());
                switch (ndkTrackableType)
                {
                case NDKARTrackableType.Plane:
                    result = new ARPlane(nativeHandle, m_ndkSession);
                    break;

                case NDKARTrackableType.Point:
                    result = new ARPoint(nativeHandle, m_ndkSession);
                    break;

                case NDKARTrackableType.Body:
                    result = new ARBody(nativeHandle, m_ndkSession);
                    break;

                case NDKARTrackableType.Hand:
                    result = new ARHand(nativeHandle, m_ndkSession);
                    break;

                case NDKARTrackableType.Face:
                    result = new ARFace(nativeHandle, m_ndkSession);
                    break;

                case NDKARTrackableType.AugmentedImage:
                    result = new ARAugmentedImage(nativeHandle, m_ndkSession);
                    break;

                //todo add more trackable
                default:
                    m_ndkSession.TrackableAdapter.Release(nativeHandle);
                    throw new NotImplementedException("ARTrackableFactory: no constructor for requested type");
                }

                m_trackableDict.Add(nativeHandle, result);
                return(result);
            }
            return(null);
        }
예제 #12
0
        void FaceAdded(ARFace anchorData)
        {
            var anchorTransform = anchorData.transform;

            m_FacePose.position = anchorTransform.localPosition;
            m_FacePose.rotation = anchorTransform.localRotation;
            m_ARFaceActive      = true;
            faceAnchor          = anchorTransform;

            UpdateBlendShapes(anchorData);
        }
        private void UpdateModelViewProjectionData(ARCamera camera, ARFace face)
        {
            // The size of the projection matrix is 4 * 4.
            float[] projectionMatrix = new float[16];
            camera.GetProjectionMatrix(projectionMatrix, 0, PROJECTION_MATRIX_NEAR, PROJECTION_MATRIX_FAR);
            ARPose facePose = face.Pose;

            // The size of viewMatrix is 4 * 4.
            float[] facePoseViewMatrix = new float[16];

            facePose.ToMatrix(facePoseViewMatrix, 0);
            Android.Opengl.Matrix.MultiplyMM(mModelViewProjections, 0, projectionMatrix, 0, facePoseViewMatrix, 0);
        }
예제 #14
0
 static XRFace ToARFace(ARFace face)
 {
     return(new XRFace()
     {
         trackableId = face.trackableId,
         pose = Pose.FromTransform(face.transform),
         trackingState = face.trackingState,
         nativePtr = face.nativePtr,
         leftEyePose = Pose.FromTransform(face.leftEye),
         rightEyePose = Pose.FromTransform(face.rightEye),
         fixationPoint = face.fixationPoint.localPosition,
     });
 }
예제 #15
0
        static ARKitRemotePacket.FaceMesh ToMesh(ARFace face, ARKitFaceSubsystem subsystem)
        {
            var id = face.trackableId;

            return(new ARKitRemotePacket.FaceMesh()
            {
                id = id,
                vertices = face.vertices.ToRawBytes(),
                normals = face.normals.ToRawBytes(),
                indices = face.indices.ToRawBytes(),
                uvs = face.uvs.ToRawBytes(),
                coefficients = subsystem.GetBlendShapeCoefficients(id, Allocator.Temp).ToRawBytes(),
            });
        }
예제 #16
0
        void Start()
        {
            ARFace   face = GetComponent <ARFace>();
            Material mat;

            if (!s_FaceTracker.TryGetValue(face.trackableId, out mat))
            {
                s_FaceTracker.Add(face.trackableId, m_FaceMaterials[s_CurrentMaterialIndex]);
                GetComponent <MeshRenderer>().material = m_FaceMaterials[s_CurrentMaterialIndex];
                s_CurrentMaterialIndex = (s_CurrentMaterialIndex + 1) % m_FaceMaterials.Length;
            }
            else
            {
                // Assign the material that was already used for the face's unique id.
                GetComponent <MeshRenderer>().material = mat;
            }
        }
예제 #17
0
    void OnEnable()
    {
        SetupInitialBlendShapeValues();

        CreateDebugOverlays();

        CleanUp();

        ARFaceManager faceManager = FindObjectOfType <ARFaceManager>();

        if (faceManager != null)
        {
            faceSubsystem = (ARKitFaceSubsystem)faceManager.subsystem;
        }

        face          = GetComponent <ARFace>();
        face.updated += OnUpdated;
    }
예제 #18
0
        private void OnFaceUpdate(ARFacesChangedEventArgs args)
        {
            //assign newly added ar face
            if (args.added.Count > 0)
            {
                Debug.Log("face added");
                var faceObj = args.added[0].gameObject;
                m_face = faceObj.GetComponent <ARFace>();
            }

            //unassign ar face when it's lost
            if (args.removed.Count > 0)
            {
                Debug.Log("face lost");
                m_face = null;
            }

            GetMeshDataAndWriteJson();
        }
        private void OnFaceUpdate(ARFacesChangedEventArgs args)
        {
            //assign newly added ar face
            if (args.added.Count > 0 && !write)
            {
                var faceObj = args.added[0].gameObject;
                m_face = faceObj.GetComponent <ARFace>();
            }


            //unassign ar face when it's lost
            if (args.removed.Count > 0)
            {
                m_face = null;
            }


            if (recording)
            {
                GetMeshIndices();
            }
        }
예제 #20
0
 private void Awake()
 {
     aRFaceManager = FindObjectOfType <ARFaceManager>();
     if (aRFaceManager != null)
     {
         aRFaceManager.facesChanged += delegate
         {
             if (arface != null)
             {
                 if (offset == Vector3.zero)
                 {
                     offset = transform.position - arface.transform.position;
                 }
                 Debug.Log(offset);
             }
             else
             {
                 arface      = FindObjectOfType <ARFace>();
                 initHeadPos = arface.transform.position;
             }
         };
     }
 }
예제 #21
0
        void UpdateBlendShapes(ARFace anchorData)
        {
            var xrFaceSubsystem    = m_FaceManager.subsystem;
            var arKitFaceSubsystem = (ARKitFaceSubsystem)xrFaceSubsystem;

            var faceId = anchorData.trackableId;

            using (var blendShapeCoefficients = arKitFaceSubsystem.GetBlendShapeCoefficients(faceId, Allocator.Temp))
            {
                if (m_BlendShapeIndices == null)
                {
                    m_BlendShapeIndices = new Dictionary <int, int>();

                    var names = m_StreamSettings.locations.ToList();
                    names.Sort();

                    foreach (var featureCoefficient in blendShapeCoefficients)
                    {
                        var location = featureCoefficient.blendShapeLocation;
                        var index    = names.IndexOf(location.ToString());
                        if (index >= 0)
                        {
                            m_BlendShapeIndices[(int)location] = index;
                        }
                    }
                }

                foreach (var featureCoefficient in blendShapeCoefficients)
                {
                    var location = (int)featureCoefficient.blendShapeLocation;
                    if (m_BlendShapeIndices.TryGetValue(location, out var index))
                    {
                        m_BlendShapes[index] = featureCoefficient.coefficient;
                    }
                }
            }
        }
 void Awake()
 {
     m_Face = GetComponent <ARFace>();
     CreateFeatureBlendMapping();
 }
예제 #23
0
 void Awake()
 {
     arFace = this.GetComponent <ARFace>();
 }
예제 #24
0
 void OnUpdated(ARFace face)
 {
     UpdateVisibility();
     UpdateFaceFeatures();
 }
        private void LateUpdate()
        {
            if (deviceCamera == null ||
                eyeCamera == null ||
                arCamera == null)
            {
                Debug.LogWarning("deviceCamera, eyeCamera, arCameraがセットされていません");
                return;
            }


            Quaternion q = deviceCamera.transform.rotation * Quaternion.Euler(Vector3.up * 180f);

            eyeCamera.transform.rotation = q;

            if (arFaceObj == null || arFace == null)
            {
                try
                {
                    arFaceObj = GameObject.FindWithTag("ARFace");
                    arFace    = arFaceObj.GetComponent <ARFace>();
                }
                catch (System.Exception e)
                {
                    Debug.LogWarning(e);
                }
            }
            else
            {
                Vector2 eyePos = arCamera.WorldToViewportPoint(arFace.leftEye.position);
                eyePos.x -= 0.5f;
                eyePos.y -= 0.5f;
                eyePos.x  = Mathf.Clamp(eyePos.x, -moveAmount.x, moveAmount.x);
                eyePos.y  = Mathf.Clamp(eyePos.y, -moveAmount.y, moveAmount.y);
                eyeCamera.transform.localPosition = new Vector3(-eyePos.x, eyePos.y, 0f);
            }

            Vector3 deviceCamPos = eyeCamera.transform.worldToLocalMatrix.MultiplyPoint(deviceCamera.transform.position);
            Vector3 fwd          = eyeCamera.transform.worldToLocalMatrix.MultiplyVector(deviceCamera.transform.forward);
            var     devicePlane  = new Plane(fwd, deviceCamPos);
            Vector3 close        = devicePlane.ClosestPointOnPlane(Vector3.zero);

            near = close.magnitude;

            // iPhoneのサイズを設定する
            left   = deviceCamPos.x - 0.040f;
            right  = deviceCamPos.x + 0.022f;
            top    = deviceCamPos.y + 0.000f;
            bottom = deviceCamPos.y - 0.135f;
            far    = 10f;

            float scale_factor = 0.01f / near;

            near   *= scale_factor;
            left   *= scale_factor;
            right  *= scale_factor;
            top    *= scale_factor;
            bottom *= scale_factor;

            Matrix4x4 m = PerspectiveOffCenter(left, right, bottom, top, near, far);

            eyeCamera.projectionMatrix = m;
        }
예제 #26
0
 void Awake()
 {
     m_Face = GetComponent <ARFace>();
 }
예제 #27
0
 private void Start()
 {
     faceManager = FindObjectOfType <ARFaceManager>();
     face        = GetComponent <ARFace>();
 }
예제 #28
0
 void Awake()
 {
     udp  = new UDPSender(GeneralSettings.DistinationIP, GeneralSettings.DistinationPort);
     face = GetComponent <ARFace>();
 }
예제 #29
0
 public void Initialize(ARFace face)
 {
     m_face = face;
     Update();
 }
 void Awake()
 {
     m_Face = this.gameObject.GetComponent <ARFace>();
 }