/// <summary> /// Decodes face tracking data received from the data stream into meaningful values. /// </summary> /// <param name="data">The data that encodes the facetracking information.</param> /// <param name="faceData">FaceTracking data.</param> public static void DecodeFaceTrackingData(string data, out FaceData faceData) { string[] tokens = data.Split(' '); faceData = new FaceData(); faceData.Au0 = float.Parse(tokens[0], CultureInfo.InvariantCulture); faceData.Au1 = float.Parse(tokens[1], CultureInfo.InvariantCulture); faceData.Au2 = float.Parse(tokens[2], CultureInfo.InvariantCulture); faceData.Au3 = float.Parse(tokens[3], CultureInfo.InvariantCulture); faceData.Au4 = float.Parse(tokens[4], CultureInfo.InvariantCulture); faceData.Au5 = float.Parse(tokens[5], CultureInfo.InvariantCulture); faceData.PosX = float.Parse(tokens[6], CultureInfo.InvariantCulture); faceData.PosY = float.Parse(tokens[7], CultureInfo.InvariantCulture); faceData.PosZ = float.Parse(tokens[8], CultureInfo.InvariantCulture); faceData.RotX = float.Parse(tokens[9], CultureInfo.InvariantCulture); faceData.RotY = float.Parse(tokens[10], CultureInfo.InvariantCulture); faceData.RotZ = float.Parse(tokens[11], CultureInfo.InvariantCulture); }
/// <summary> /// Encodes face tracking data for transmission through the data stream. /// </summary> /// <param name="au0">Animation unit 0.</param> /// <param name="au1">Animation unit 1.</param> /// <param name="au2">Animation unit 2.</param> /// <param name="au3">Animation unit 3.</param> /// <param name="au4">Animation unit 4.</param> /// <param name="au5">Animation unit 5.</param> /// <param name="posX">Head position (x) in meters.</param> /// <param name="posY">Head position (y) in meters.</param> /// <param name="posZ">Head position (z) in meters.</param> /// <param name="rotX">Head rotation (x - euler angle).</param> /// <param name="rotY">Head rotation (y - euler angle).</param> /// <param name="rotZ">Head rotation (z - euler angle).</param> /// <returns>The string that encodes the facetracking information.</returns> public static string EncodeFaceTrackingData(FaceData data) { return string.Format(CultureInfo.InvariantCulture, "{0}|{1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11} {12}", FaceTrackingFrameType, data.Au0, data.Au1, data.Au2, data.Au3, data.Au4, data.Au5, data.PosX, data.PosY, data.PosZ, data.RotX, data.RotY, data.RotZ); }
/// <summary> /// Encodes face tracking data for transmission through the data stream. /// </summary> /// <param name="au0">Animation unit 0.</param> /// <param name="au1">Animation unit 1.</param> /// <param name="au2">Animation unit 2.</param> /// <param name="au3">Animation unit 3.</param> /// <param name="au4">Animation unit 4.</param> /// <param name="au5">Animation unit 5.</param> /// <param name="posX">Head position (x) in meters.</param> /// <param name="posY">Head position (y) in meters.</param> /// <param name="posZ">Head position (z) in meters.</param> /// <param name="rotX">Head rotation (x - euler angle).</param> /// <param name="rotY">Head rotation (y - euler angle).</param> /// <param name="rotZ">Head rotation (z - euler angle).</param> /// <returns>The string that encodes the facetracking information.</returns> public static string EncodeFaceTrackingData(FaceData data) { return(string.Format(CultureInfo.InvariantCulture, "{0}|{1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11} {12}", FaceTrackingFrameType, data.Au0, data.Au1, data.Au2, data.Au3, data.Au4, data.Au5, data.PosX, data.PosY, data.PosZ, data.RotX, data.RotY, data.RotZ)); }
/// <summary> /// Updates the face tracking information for this skeleton /// </summary> internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest) { this._skeletonTrackingState = skeletonOfInterest.TrackingState; if (this._skeletonTrackingState != SkeletonTrackingState.Tracked) { // nothing to do with an untracked skeleton. return; } if (this._faceTracker == null) { try { this._faceTracker = new FaceTracker(kinectSensor); } catch (InvalidOperationException e) { // During some shutdown scenarios the FaceTracker // is unable to be instantiated. Catch that exception // and don't track a face. Console.WriteLine(Converter.EncodeError("FaceTracker.OnFrameReady - creating a new FaceTracker threw an InvalidOperationException: " + e.Message)); this._faceTracker = null; } } if (this._faceTracker != null) { FaceTrackFrame frame = this._faceTracker.Track( colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest); this._lastFaceTrackSucceeded = frame.TrackSuccessful; if (this._lastFaceTrackSucceeded) { var animUnits = frame.GetAnimationUnitCoefficients(); var pos = frame.Translation; var rot = frame.Rotation; var faceData = new FaceData(); faceData.Au0 = animUnits[0]; faceData.Au1 = animUnits[1]; faceData.Au2 = animUnits[2]; faceData.Au3 = animUnits[3]; faceData.Au4 = animUnits[4]; faceData.Au5 = animUnits[5]; faceData.PosX = pos.X; faceData.PosY = pos.Y; faceData.PosZ = pos.Z; faceData.RotX = rot.X; faceData.RotY = rot.Y; faceData.RotZ = rot.Z; var data = Converter.EncodeFaceTrackingData(faceData); Console.WriteLine(data); } } }