/// <summary> /// Updates the face tracking information for this skeleton /// </summary> internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest) { this.skeletonTrackingState = skeletonOfInterest.TrackingState; if (this.skeletonTrackingState != SkeletonTrackingState.Tracked) { // nothing to do with an untracked skeleton. return; } if (this.faceTracker == null) { try { this.faceTracker = new FaceTracker(kinectSensor); } catch (InvalidOperationException) { // During some shutdown scenarios the FaceTracker // is unable to be instantiated. Catch that exception // and don't track a face. Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException"); this.faceTracker = null; } } if (this.faceTracker != null) { FaceTrackFrame frame = this.faceTracker.Track( colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest); this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { // only need to get this once. It doesn't change. faceTriangles = frame.GetTriangles(); } if (this.DrawFaceMesh || this.DrawFeaturePoints != DrawFeaturePoint.None) { this.facePoints = frame.GetProjected3DShape(); } // get the shape points array if (this.DrawShapePoints) { // see the !!!README.txt file to add the function // to your toolkit project this.shapePoints = frame.GetShapePoints(); } } // draw/remove the components SetFeaturePointsLocations(); SetShapePointsLocations(); } }
/// <summary> /// Updates the face tracking information for this skeleton /// </summary> internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest) // <---------Skeleton data passed here ***************** { // Here a skeletonOfInterest is available ********************************** this.skeletonTrackingState = skeletonOfInterest.TrackingState; if (this.skeletonTrackingState != SkeletonTrackingState.Tracked) { // nothing to do with an untracked skeleton. return; } if (this.faceTracker == null) { try { this.faceTracker = new FaceTracker(kinectSensor); } catch (InvalidOperationException) { // During some shutdown scenarios the FaceTracker // is unable to be instantiated. Catch that exception // and don't track a face. Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException"); this.faceTracker = null; } } if (this.faceTracker != null) { // Here is where a skeletonOfInterest is available *** // Call Track(), passing skeletonOfInterest FaceTrackFrame frame = this.faceTracker.Track( colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest); this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { // only need to get this once. It doesn't change. faceTriangles = frame.GetTriangles(); } // Assign the facePoints this.facePoints = frame.GetProjected3DShape(); // Gets the face data but does not draw the face or the skeleton // This code gets the yaw, pitch and roll Capture it here this.rotation = frame.Rotation; // <-- frame is a FaceTrackFrame } } }
/// <summary> /// Updates the face tracking information for this skeleton /// </summary> internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest) { this.skeletonTrackingState = skeletonOfInterest.TrackingState; if (this.skeletonTrackingState != SkeletonTrackingState.Tracked) { // nothing to do with an untracked skeleton. return; } if (this.faceTracker == null) { try { this.faceTracker = new FaceTracker(kinectSensor); } catch (InvalidOperationException) { // During some shutdown scenarios the FaceTracker // is unable to be instantiated. Catch that exception // and don't track a face. this.faceTracker = null; } } if (this.faceTracker != null) { FaceTrackFrame frame = this.faceTracker.Track( colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest); this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { // only need to get this once. It doesn't change. faceTriangles = frame.GetTriangles(); } this.facePoints = frame.GetProjected3DShape(); double dbX = facePoints[5].X; double dbY = facePoints[5].Y; App thisApp = App.Current as App; thisApp.m_dbX = dbX; thisApp.m_dbY = dbY; } } }
/// <summary> /// Updates the face tracking information for this skeleton /// </summary> internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest) { this.skeletonTrackingState = skeletonOfInterest.TrackingState; if (this.skeletonTrackingState != SkeletonTrackingState.Tracked) { // nothing to do with an untracked skeleton. return; } if (this.faceTracker == null) { try { this.faceTracker = new FaceTracker(kinectSensor); } catch (InvalidOperationException) { // During some shutdown scenarios the FaceTracker // is unable to be instantiated. Catch that exception // and don't track a face. Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException"); this.faceTracker = null; } } if (this.faceTracker != null) { FaceTrackFrame frame = this.faceTracker.Track( colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest); this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { // only need to get this once. It doesn't change. faceTriangles = frame.GetTriangles(); } this.facePoints = frame.GetProjected3DShape(); this.test = frame.Get3DShape(); //info about rotations pitchVal = frame.Rotation.X; rollVal = frame.Rotation.Z; yawVal = frame.Rotation.Y; } } }
protected void UpdateFrame(FaceTrackFrame frame) { this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { // only need to get this once. It doesn't change. faceTriangles = frame.GetTriangles(); } this.facePoints = frame.Get3DShape(); this.FaceRect = frame.FaceRect; this.FaceTranslation = frame.Translation; this.FaceRotation = frame.Rotation; } }
private void UpdateMesh(FaceTrackFrame faceTrackingFrame) { EnumIndexableCollection <FeaturePoint, Vector3DF> shapePoints = faceTrackingFrame.Get3DShape(); EnumIndexableCollection <FeaturePoint, PointF> projectedShapePoints = faceTrackingFrame.GetProjected3DShape(); if (this.triangleIndices == null) { // Update stuff that doesn't change from frame to frame this.triangleIndices = faceTrackingFrame.GetTriangles(); var indices = new Int32Collection(this.triangleIndices.Length * 3); foreach (FaceTriangle triangle in this.triangleIndices) { indices.Add(triangle.Third); indices.Add(triangle.Second); indices.Add(triangle.First); } this.theGeometry.TriangleIndices = indices; this.theGeometry.Normals = null; // Let WPF3D calculate these. this.theGeometry.Positions = new Point3DCollection(shapePoints.Count); this.theGeometry.TextureCoordinates = new PointCollection(projectedShapePoints.Count); for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++) { this.theGeometry.Positions.Add(new Point3D()); this.theGeometry.TextureCoordinates.Add(new Point()); } } // Update the 3D model's vertices and texture coordinates for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++) { Vector3DF point = shapePoints[pointIndex]; this.theGeometry.Positions[pointIndex] = new Point3D(point.X, point.Y, -point.Z); PointF projected = projectedShapePoints[pointIndex]; this.theGeometry.TextureCoordinates[pointIndex] = new Point( projected.X / (double)this.colorImageWritableBitmap.PixelWidth, projected.Y / (double)this.colorImageWritableBitmap.PixelHeight); } }
internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest) { this.skeletonTrackingState = skeletonOfInterest.TrackingState; if (this.skeletonTrackingState != SkeletonTrackingState.Tracked) { return; } if (this.faceTracker == null) { try { this.faceTracker = new FaceTracker(kinectSensor); } catch (InvalidOperationException) { Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException"); this.faceTracker = null; } } if (this.faceTracker != null) { FaceTrackFrame frame = this.faceTracker.Track(colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest); this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { faceTriangles = frame.GetTriangles(); } this.facePoints = frame.GetProjected3DShape(); } } }
internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest) { if (!timerStarted) { timer.Start(); timerStarted = true; } //increment our frames numberOfFrames++; this.skeletonTrackingState = skeletonOfInterest.TrackingState; if (this.skeletonTrackingState != SkeletonTrackingState.Tracked) { // nothing to do with an untracked skeleton. return; } if (this.faceTracker == null) { try { this.faceTracker = new FaceTracker(kinectSensor); } catch (InvalidOperationException) { // During some shutdown scenarios the FaceTracker // is unable to be instantiated. Catch that exception // and don't track a face. Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException"); this.faceTracker = null; } } if (this.faceTracker != null) { FaceTrackFrame frame = this.faceTracker.Track( colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest); this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { // only need to get this once. It doesn't change. faceTriangles = frame.GetTriangles(); } this.facePoints = frame.GetProjected3DShape(); /*if () * { * Debug.WriteLine("hit " + (frameIter * sampleRate) + " frames in " + (timer.Elapsed) + " seconds"); * frameIter++; * }*/ //Also grab our points EnumIndexableCollection <FeaturePoint, Vector3DF> facePoints3D = frame.Get3DShape(); int index = 0; if (numberOfFrames > frameIter * sampleRate && frameIter < 5) //only grab 4 samples over our given sample rate { //Create a new thread so we don't make the visual thread throw up all over the place new Thread(() => { Thread.CurrentThread.IsBackground = true; List <Tuple <float, float, float> > myPoints = new List <Tuple <float, float, float> >(); foreach (Vector3DF vector in facePoints3D) { //csv.Append(string.Format("( ({1}, {2}, {3}){4}",vector.X, vector.Y, vector.Z, Environment.NewLine)); myPoints.Add(new Tuple <float, float, float>(vector.X, vector.Y, vector.Z)); index++; } calculateDistances(myPoints); frameIter++; }).Start(); //once = true; } if (frameIter == 5) { SetStatusText("Generating histograms..."); Console.WriteLine("We are ready to sample"); foreach (float distance in sampleOneDistances) { int sampleOneIndex = (int)Math.Floor(64 * distance / sampleOneMaxDistance); sampleOneHistogram[sampleOneIndex]++; } foreach (float distance in sampleTwoDistances) { sampleTwoHistogram[(int)Math.Floor(64 * distance / sampleTwoMaxDistance)]++; } foreach (float distance in sampleThreeDistances) { sampleThreeHistogram[(int)Math.Floor(64 * distance / sampleThreeMaxDistance)]++; } foreach (float distance in sampleFourDistances) { sampleFourHistogram[(int)Math.Floor(64 * distance / sampleFourMaxDistance)]++; } //Go through histogram and divide by distances //Get for (int i = 0; i < sampleOneHistogram.Length; i++) { sampleOneHistogram[i] = sampleOneHistogram[i] / sampleOneDistances.Count; } for (int i = 0; i < sampleTwoHistogram.Length; i++) { sampleTwoHistogram[i] = sampleTwoHistogram[i] / sampleTwoDistances.Count; } for (int i = 0; i < sampleThreeHistogram.Length; i++) { sampleThreeHistogram[i] = sampleThreeHistogram[i] / sampleThreeDistances.Count; } for (int i = 0; i < sampleFourHistogram.Length; i++) { sampleFourHistogram[i] = sampleFourHistogram[i] / sampleFourDistances.Count; } int iter = 0; foreach (int count in sampleTwoHistogram)//can iterate through any histogram, they're all of size 65 { Console.WriteLine("Count for hist1/2/3/4[" + iter + "] is " + count + "/" + sampleOneHistogram[iter] + "/" + sampleThreeHistogram[iter] + "/" + sampleFourHistogram[iter]); iter++; } //Write our histograms to a csv file String[] sampleOneHistString = Array.ConvertAll(sampleOneHistogram, x => x.ToString()); using (System.IO.StreamWriter file = new System.IO.StreamWriter(testFilePath)) { file.Write(string.Join(",", Enumerable.Range(1, 65).ToArray()) + Environment.NewLine); file.Write(string.Join(",", sampleOneHistString)); file.Write(Environment.NewLine); file.Write(string.Join(",", Array.ConvertAll(sampleTwoHistogram, x => x.ToString()))); file.Write(Environment.NewLine); file.Write(string.Join(",", Array.ConvertAll(sampleThreeHistogram, x => x.ToString()))); file.Write(Environment.NewLine); file.Write(string.Join(",", Array.ConvertAll(sampleFourHistogram, x => x.ToString()))); } //pass that data file to jar String jarPath = "C:\\Users\\Datalab\\Documents\\GitHub\\WekaClassifier\\jar\\wekaClassifier.jar"; System.Diagnostics.Process clientProcess = new Process(); String jarargs = "C:\\Users\\Datalab\\Documents\\GitHub\\WekaClassifier\\data\\training_data.arff C:\\Users\\Datalab\\Documents\\GitHub\\WekaClassifier\\data\\testFormat.dat"; clientProcess.StartInfo.FileName = "java"; clientProcess.StartInfo.Arguments = "-jar " + jarPath + " " + jarargs; clientProcess.StartInfo.RedirectStandardOutput = true; clientProcess.StartInfo.UseShellExecute = false; clientProcess.Start(); String output = clientProcess.StandardOutput.ReadToEnd(); Console.WriteLine(output); clientProcess.WaitForExit(); int code = clientProcess.ExitCode; //write to dat file with 4 histograms averaged frameIter++; //only do this once (will make conditional evaluate to false. Is this clean and clear? Not really? Do I care? Not particularly. At least it's documented. ftNumPeople++; SetPeopleText("People tracked : " + ftNumPeople); SetStatusText("Status: waiting...."); SetPredictionText("Guess: " + output); } } } }
private bool CheckFace(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest) { this.skeletonTrackingState = skeletonOfInterest.TrackingState; if (this.skeletonTrackingState != SkeletonTrackingState.Tracked) { // nothing to do with an untracked skeleton. return(false); } if (this.faceTracker == null) { try { this.faceTracker = new FaceTracker(kinectSensor); } catch (InvalidOperationException) { // During some shutdown scenarios the FaceTracker // is unable to be instantiated. Catch that exception // and don't track a face. Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException"); this.faceTracker = null; } } if (this.faceTracker != null) { FaceTrackFrame frame = this.faceTracker.Track( colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest); this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { // only need to get this once. It doesn't change. faceTriangles = frame.GetTriangles(); } //getting the Animation Unit Coefficients this.AUs = frame.GetAnimationUnitCoefficients(); var jawLowerer = AUs[AnimationUnit.JawLower]; var browLower = AUs[AnimationUnit.BrowLower]; var browRaiser = AUs[AnimationUnit.BrowRaiser]; var lipDepressor = AUs[AnimationUnit.LipCornerDepressor]; var lipRaiser = AUs[AnimationUnit.LipRaiser]; var lipStretcher = AUs[AnimationUnit.LipStretcher]; //set up file for output using (System.IO.StreamWriter file = new System.IO.StreamWriter (@"C:\Users\Public\data.txt")) { file.WriteLine("FaceTrack Data, started recording at " + DateTime.Now.ToString("HH:mm:ss tt")); } //here is the algorithm to test different facial features //BrowLower is messed up if you wear glasses, works if you don't wear 'em string state = ""; //surprised if ((jawLowerer < 0.25 || jawLowerer > 0.25) && browLower < 0) { state = "surprised"; } //smiling if (lipStretcher > 0.4 || lipDepressor < 0) { state = "smiling"; } //sad if (browRaiser < 0 && lipDepressor > 0) { state = "sad"; } //angry if ((browLower > 0 && (jawLowerer > 0.25 || jawLowerer < -0.25)) || (browLower > 0 && lipDepressor > 0)) { state = "angry"; } //System.Diagnostics.Debug.WriteLine(browLower); this.facePoints = frame.GetProjected3DShape(); if (states[currentState] == state) { Trace.WriteLine("Yo!"); return(true); } } } return(false); }
/// <summary> /// Updates the face tracking information for this skeleton /// </summary> internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest) { // int cursorX = 500; // int cursorY = 500; // bool click = false; this.skeletonTrackingState = skeletonOfInterest.TrackingState; if (this.skeletonTrackingState != SkeletonTrackingState.Tracked) { // nothing to do with an untracked skeleton. return; } if (this.faceTracker == null) { try { this.faceTracker = new FaceTracker(kinectSensor); } catch (InvalidOperationException) { // During some shutdown scenarios the FaceTracker // is unable to be instantiated. Catch that exception // and don't track a face. Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException"); this.faceTracker = null; } } if (this.faceTracker != null) { FaceTrackFrame frame = this.faceTracker.Track( colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest); this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { // only need to get this once. It doesn't change. faceTriangles = frame.GetTriangles(); } this.facePoints = frame.GetProjected3DShape(); if (Globals.cambio == false) { int Pitch = (int)frame.Rotation.X; int Yaw = (int)frame.Rotation.Y; /* if ((Yaw > 20) && (Pitch > 0) || (Yaw < -20) && (Pitch > 0) || (Yaw < -20) && (Pitch < -10) || (Yaw > 20) && (Pitch < -10)) //si va en diagonal * { * if ((Yaw > 20) && (Pitch > 0))//arribaderecha * { * Globals.YAW = 1; * Globals.PITCH = 1; * } * if ((Yaw < -20) && (Pitch > 0))//abajoderecha * { * Globals.YAW = -1; * Globals.PITCH = 1; * } * if ((Yaw < -20) && (Pitch < -10))//abajoizda * { * Globals.YAW = -1; * Globals.PITCH = -1; * } * if ((Yaw > 20) && (Pitch < -10))//arribaizda * { * Globals.YAW = 1; * Globals.PITCH = -1; * } * } * else * { */ if (Yaw > 20) //izquierda { Globals.YAW = 1; } if (Yaw < -20) //derecha { Globals.YAW = -1; } if ((Yaw < 20) && (Yaw > -20)) { Globals.YAW = 0; } if (Pitch > 0) //arriba { Globals.PITCH = 1; } if (Pitch < -10) //abajo { Globals.PITCH = -1; } if ((Pitch < 0) && (Pitch > -10)) { Globals.PITCH = 0; } //} } } } }
private async Task FaceTrackingAsync(TimeSpan dueTime, TimeSpan interval, CancellationToken token) { if (interval.TotalMilliseconds == 0) { return; } // Initial wait time before we begin the periodic loop. if (dueTime > TimeSpan.Zero) { await Task.Delay(dueTime, token); } DateTime LocalTimestamp = Timestamp; FaceTracker tracker = new FaceTracker(Sensor); // Repeat this loop until cancelled. while (!token.IsCancellationRequested) { // Skip already work with given data if (Timestamp == LocalTimestamp) { await Task.Delay(interval, token); continue; } // Timestamp data LocalTimestamp = Timestamp; FaceTrackWatch.Again(); // Do Job try { CopyColorData = true; CopySkeletons = true; FPoints = null; Mood = 0; if (null != GestureManager && null != GestureManager.Skeleton) { FaceTrackFrame frame = tracker.Track(ColorFormat, ColorData, DepthFormat, DepthData, GestureManager.Skeleton); if (frame.TrackSuccessful) { // Only once. It doesn't change. if (FTriangles == null) { FTriangles = frame.GetTriangles(); } FPoints = frame.GetProjected3DShape(); Mood = frame.GetAnimationUnitCoefficients()[AnimationUnit.LipCornerDepressor]; WSRProfileManager.GetInstance().UpdateMood(Mood); } } } catch (Exception ex) { WSRConfig.GetInstance().logError("FACE", ex); } FaceTrackWatch.Stop(); // Wait to repeat again. if (interval > TimeSpan.Zero) { await Task.Delay(interval, token); } } // Dispose Tracker tracker.Dispose(); }
private void UpdateMesh(FaceTrackFrame faceTrackingFrame) { //Console.Out.WriteLine(" ###################### In UpdateMesh ############################# "); bool faceInCentre = true; EnumIndexableCollection <FeaturePoint, Vector3DF> shapePoints = faceTrackingFrame.Get3DShape(); EnumIndexableCollection <FeaturePoint, PointF> projectedShapePoints = faceTrackingFrame.GetProjected3DShape(); if (this.triangleIndices == null) { // Update stuff that doesn't change from frame to frame this.triangleIndices = faceTrackingFrame.GetTriangles(); var indices = new Int32Collection(this.triangleIndices.Length * 3); foreach (FaceTriangle triangle in this.triangleIndices) { indices.Add(triangle.Third); indices.Add(triangle.Second); indices.Add(triangle.First); } this.theGeometry.TriangleIndices = indices; this.theGeometry.Normals = null; // Let WPF3D calculate these. this.theGeometry.Positions = new Point3DCollection(shapePoints.Count); this.theGeometry.TextureCoordinates = new PointCollection(projectedShapePoints.Count); for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++) { this.theGeometry.Positions.Add(new Point3D()); this.theGeometry.TextureCoordinates.Add(new Point()); } } // Update the 3D model's vertices and texture coordinates for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++) { Vector3DF point = shapePoints[pointIndex]; this.theGeometry.Positions[pointIndex] = new Point3D(point.X, point.Y, -point.Z); PointF projected = projectedShapePoints[pointIndex]; this.theGeometry.TextureCoordinates[pointIndex] = new Point( projected.X / (double)this.colorImageWritableBitmap.PixelWidth, projected.Y / (double)this.colorImageWritableBitmap.PixelHeight); // Console.Out.WriteLine("X = " + projected.X / (double)this.colorImageWritableBitmap.PixelWidth + "Y = " + projected.Y / (double)this.colorImageWritableBitmap.PixelHeight); if (projected.X / (double)this.colorImageWritableBitmap.PixelWidth > .6 || projected.Y / (double)this.colorImageWritableBitmap.PixelHeight > .75) { faceInCentre = false; } } if (faceInCentre) { // copyFaceImage(); FaceMesh tempMeshData = new FaceMesh(); tempMeshData.FaceViewport = viewport3d; FaceMeshData = tempMeshData; } }
/// <summary> /// Updates the face tracking information for this skeleton /// </summary> internal void OnFrameReady(KinectSensor sensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage) { if (this.faceTracker == null) { try { this.faceTracker = new FaceTracker(sensor); } catch (InvalidOperationException) { // During some shutdown scenarios the FaceTracker // is unable to be instantiated. Catch that exception // and don't track a face. Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException"); this.faceTracker = null; } } if (this.faceTracker != null) { FaceTrackFrame frame = this.faceTracker.Track(colorImageFormat, colorImage, depthImageFormat, depthImage); this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { // only need to get this once. It doesn't change. faceTriangles = frame.GetTriangles(); } this.facePoints = frame.GetProjected3DShape(); this.facePoints3D = frame.Get3DShape(); //X-man modification this.AU = frame.GetAnimationUnitCoefficients(); } if (faceTriangles != null && facePoints != null) { faceUpdated(this, new FaceTrackerEventArgs(facePoints, faceTriangles)); } if (FaceRecognition.compare) { FaceRecognition.recognizer(this); } if (mouthOpened.detect) { mouthOpened.mouthRecognizer(); } if (mouthShut.detect) { mouthShut.mouth2Recognizer(); } if (lookingDirection.detect) { lookingDirection.lookRecognizer(); } if (pupilRight.detect) { pupilRight.pupilRecognizer(); } } }
private void KinectSensorOnAllFramesReady(object sender, AllFramesReadyEventArgs e) { if (frameProccessed[1] == false) { frameProccessed[1] = true; } else { frameProccessed[1] = false; return; } ColorImageFrame colorImageFrame = null; DepthImageFrame depthImageFrame = null; SkeletonFrame skeletonFrame = null; try { colorImageFrame = e.OpenColorImageFrame(); depthImageFrame = e.OpenDepthImageFrame(); skeletonFrame = e.OpenSkeletonFrame(); if (colorImageFrame == null || depthImageFrame == null || skeletonFrame == null) { return; } if (this.depthImageFormat != depthImageFrame.Format) { this.depthImage = null; this.depthImageFormat = depthImageFrame.Format; } if (this.colorImageFormat != colorImageFrame.Format) { this.colorImage = null; this.colorImageFormat = colorImageFrame.Format; } if (this.depthImage == null) { this.depthImage = new short[depthImageFrame.PixelDataLength]; } if (this.colorImage == null) { this.colorImage = new byte[colorImageFrame.PixelDataLength]; } if (this.skeletonData == null || this.skeletonData.Length != skeletonFrame.SkeletonArrayLength) { this.skeletonData = new Skeleton[skeletonFrame.SkeletonArrayLength]; } colorImageFrame.CopyPixelDataTo(this.colorImage); depthImageFrame.CopyPixelDataTo(this.depthImage); skeletonFrame.CopySkeletonDataTo(this.skeletonData); } finally { if (colorImageFrame != null) { colorImageFrame.Dispose(); } if (depthImageFrame != null) { depthImageFrame.Dispose(); } if (skeletonFrame != null) { skeletonFrame.Dispose(); } using (depthImageFrame) { if (depthImageFrame != null && skeletonData != null) { foreach (Skeleton sd in skeletonData) { if (sd.TrackingState == SkeletonTrackingState.Tracked || sd.TrackingState == SkeletonTrackingState.PositionOnly) { Joint joint = sd.Joints[JointType.Head]; DepthImagePoint depthPoint; CoordinateMapper coordinateMapper = new CoordinateMapper(frontSensor); depthPoint = coordinateMapper.MapSkeletonPointToDepthPoint(joint.Position, DepthImageFormat.Resolution320x240Fps30); point = new System.Windows.Point((int)(frontSensor.ColorStream.FrameWidth * depthPoint.X / depthImageFrame.Width), (int)(frontSensor.ColorStream.FrameHeight * depthPoint.Y / depthImageFrame.Height)); /* textBlock1.Text = string.Format("X:{0:0.00} Y:{1:0.00} Z:{2:0.00}", * point.X, * point.Y, * joint.Position.Z); */ Canvas.SetLeft(headEllipse, point.X - headEllipse.Width / 2); Canvas.SetTop(headEllipse, point.Y - headEllipse.Height / 2); if (this.faceTracker == null) { try { this.faceTracker = new FaceTracker(frontSensor); } catch (InvalidOperationException) { // During some shutdown scenarios the FaceTrack // is unable to be instantiated. Catch that exception // and don't track a face. this.faceTracker = null; } } if (this.faceTracker != null) { FaceTrackFrame frame = this.faceTracker.Track( colorImageFormat, colorImage, depthImageFormat, depthImage, sd); if (frame.TrackSuccessful) { faceTriangles = frame.GetTriangles(); this.facePoints = frame.GetProjected3DShape(); var faceModelPts = new List <Point>(); var faceModel = new List <FaceModelTriangle>(); for (int i = 0; i < this.facePoints.Count; i++) { faceModelPts.Add(new Point(this.facePoints[i].X + 0.5f, this.facePoints[i].Y + 0.5f)); } foreach (var t in faceTriangles) { var triangle = new FaceModelTriangle(); triangle.P1 = faceModelPts[t.First]; //triangle.P2 = faceModelPts[t.Second]; //triangle.P3 = faceModelPts[t.Third]; faceModel.Add(triangle); } Canvas.SetLeft(noseEllipse, faceModel[108].P1.X - noseEllipse.Width / 2); Canvas.SetTop(noseEllipse, faceModel[108].P1.Y - noseEllipse.Height / 2); nosePoint = new Point(faceModel[108].P1.X, faceModel[108].P1.Y); } } } } } } getAttentionAngle(nosePoint); } }
/// <summary> /// Updates the face tracking information for this skeleton /// </summary> internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest) { this.skeletonTrackingState = skeletonOfInterest.TrackingState; if (this.skeletonTrackingState != SkeletonTrackingState.Tracked) { // if the current skeleton is not tracked, track it now //kinectSensor.SkeletonStream.ChooseSkeletons(skeletonOfInterest.TrackingId); } if (this.faceTracker == null) { try { this.faceTracker = new FaceTracker(kinectSensor); } catch (InvalidOperationException) { // During some shutdown scenarios the FaceTracker // is unable to be instantiated. Catch that exception // and don't track a face. Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException"); this.faceTracker = null; } } if (this.faceTracker != null) { // hack to make this face tracking detect the face even when it is not actually tracked // <!>need to confirm if it works //skeletonOfInterest.TrackingState = SkeletonTrackingState.Tracked; FaceTrackFrame frame = this.faceTracker.Track( colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest); //new Microsoft.Kinect.Toolkit.FaceTracking.Rect(skeletonOfInterest.Position.)); this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { // only need to get this once. It doesn't change. faceTriangles = frame.GetTriangles(); } if (faceTag == null) { // here call the face detection faceTag = new FaceRecognizer().getFaceTag(this.colorImageBmp); if (faceTag != null) { Global.StatusBarText.Text = "Found " + faceTag + "!"; if (Global.trackedPeople.ContainsKey(skeletonOfInterest)) { Global.trackedPeople[skeletonOfInterest] = faceTag; } else { Global.trackedPeople.Add(skeletonOfInterest, faceTag); } } } this.facePoints = frame.GetProjected3DShape(); this.faceRect = frame.FaceRect; } } }
/// <summary> /// Updates the face tracking information for this skeleton /// </summary> internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest) { this.skeletonTrackingState = skeletonOfInterest.TrackingState; if (this.skeletonTrackingState != SkeletonTrackingState.Tracked) { // nothing to do with an untracked skeleton. return; } if (this.faceTracker == null) { try { this.faceTracker = new FaceTracker(kinectSensor); } catch (InvalidOperationException) { // During some shutdown scenarios the FaceTracker // is unable to be instantiated. Catch that exception // and don't track a face. Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException"); this.faceTracker = null; } } if (this.faceTracker != null) { FaceTrackFrame frame = this.faceTracker.Track( colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest); this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { // only need to get this once. It doesn't change. faceTriangles = frame.GetTriangles(); } this.facePoints3DRaw = frame.Get3DShape(); this.facePoints = frame.GetProjected3DShape(); animationUnitsRaw = frame.GetAnimationUnitCoefficients(); } x = frame.Rotation.X; y = frame.Rotation.Y; z = frame.Rotation.Z; facePointS3D = this.facePoints3DRaw; animationUnits = animationUnitsRaw; //Debug.WriteLine(animationUnits[AnimationUnit.JawLower]); //Debug.WriteLine(animationUnits[AnimationUnit.BrowLower]); //Debug.WriteLine(animationUnits[AnimationUnit.BrowRaiser]); //Debug.WriteLine(animationUnits[AnimationUnit.JawLower]); //Debug.WriteLine(animationUnits[AnimationUnit.LipCornerDepressor]); //Debug.WriteLine(animationUnits[AnimationUnit.LipRaiser]); //Debug.WriteLine(animationUnits[AnimationUnit.LipStretcher]); //Debug.WriteLine(frame.Translation.ToString()); //Debug.WriteLine(frame.Rotation.ToString()); //this.facePoints[FeaturePoint.AboveChin].X+2; //Debug.WriteLine(frame.Translation.X.ToString()); //Debug.WriteLine(frame.Translation.Y.ToString()); //Debug.WriteLine(frame.Translation.Z.ToString()); } }
public void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest) { this.skeletonTrackingState = skeletonOfInterest.TrackingState; if (this.skeletonTrackingState != SkeletonTrackingState.Tracked) { return; } if (this.faceTracker == null) { try { this.faceTracker = new FaceTracker(kinectSensor); } catch (InvalidOperationException) { Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException"); this.faceTracker = null; } } if (this.faceTracker != null) { FaceTrackFrame frame = this.faceTracker.Track( colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest); this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { faceTriangles = frame.GetTriangles(); } this.facePoints = frame.GetProjected3DShape(); XmlDocument XmlDocKalibracja = new XmlDocument(); XmlDocKalibracja.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml"); XmlNodeList elemListKalibracja = XmlDocKalibracja.GetElementsByTagName("options"); kalibracja = Convert.ToBoolean(Convert.ToInt32(elemListKalibracja[0].Attributes["kalibracja"].Value)); if (kalibracja == false) { kalibracjaCounter++; if (kalibracjaCounter == 1) { Kalibracja okno = new Kalibracja(); okno.Show(); } if (kalibracjaCounter > 150) { oldX = Convert.ToInt32(this.facePoints[23].X); oldY = -Convert.ToInt32(this.facePoints[23].Y); oldMouseX = System.Windows.Forms.Cursor.Position.X; oldMouseY = System.Windows.Forms.Cursor.Position.Y; aktualnyX = oldX; aktualnyY = oldY; kalibracja = true; string newValue = "1"; kalibracjaCounter = 0; XmlDocument xmlDoc = new XmlDocument(); xmlDoc.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml"); XmlNode node = xmlDoc.SelectSingleNode("options"); node.Attributes[5].Value = Convert.ToString(newValue); xmlDoc.Save(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml"); } } if (kalibracja == true) { try { //ustawienie gestów XmlDocument XmlDoc = new XmlDocument(); XmlDoc.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml"); XmlNodeList elemList = XmlDoc.GetElementsByTagName("options"); gest[0] = Convert.ToInt32(elemList[0].Attributes["lpm"].Value); //ustawienie gestów XmlDocument XmlDoc2 = new XmlDocument(); XmlDoc2.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml"); XmlNodeList elemList2 = XmlDoc2.GetElementsByTagName("options"); gest[1] = Convert.ToInt32(elemList2[0].Attributes["ppm"].Value); //ustawienie gestów XmlDocument XmlDoc3 = new XmlDocument(); XmlDoc3.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml"); XmlNodeList elemList3 = XmlDoc3.GetElementsByTagName("options"); gest[2] = Convert.ToInt32(elemList3[0].Attributes["scrollup"].Value); //ustawienie gestów XmlDocument XmlDoc4 = new XmlDocument(); XmlDoc4.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml"); XmlNodeList elemList4 = XmlDoc4.GetElementsByTagName("options"); gest[3] = Convert.ToInt32(elemList4[0].Attributes["scrolldown"].Value); } catch { MessageBox.Show("Błąd przy odczycie pliku settings.xml"); } newX = Convert.ToInt32(this.facePoints[23].X); newY = -Convert.ToInt32(this.facePoints[23].Y); stosunekX = Math.Abs(newX / oldX); stosunekY = Math.Abs(newY / oldY); //odczyt czułości z pliku settings.xml try { XmlDocument XmlDoc = new XmlDocument(); XmlDoc.Load(System.AppDomain.CurrentDomain.BaseDirectory + "\\settings.xml"); XmlNodeList elemList = XmlDoc.GetElementsByTagName("options"); sensitive = Convert.ToInt32(elemList[0].Attributes["sensitive"].Value); } catch { sensitive = 80; } if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) < 28 && Math.Abs(oldX - newX) > 70 && Math.Abs(oldY - newY) > 70 && Convert.ToInt32(Math.Abs(oldMouseX + (sensitive * stosunekX - sensitive))) < 1980 || Convert.ToInt32(Math.Abs(oldMouseY + (sensitive * stosunekY - sensitive))) < 1200 && Convert.ToInt32(Math.Abs(oldMouseX + (sensitive * stosunekX))) >= 0 && Convert.ToInt32(Math.Abs(oldMouseY + (sensitive * stosunekY))) >= 0) { if (stosunekX > 1.03 && ruchY == false) { System.Windows.Forms.Cursor.Position = new System.Drawing.Point(Convert.ToInt32(Math.Abs(oldMouseX + (sensitive * stosunekX - sensitive))), Convert.ToInt32(Math.Abs(oldMouseY))); oldMouseX = Convert.ToInt32(Math.Abs(oldMouseX + (sensitive * stosunekX - sensitive))); ruchX = true; } if (stosunekX < 0.97 && ruchY == false) { System.Windows.Forms.Cursor.Position = new System.Drawing.Point(Convert.ToInt32(Math.Abs(oldMouseX - (sensitive - (sensitive * stosunekX)))), Convert.ToInt32(Math.Abs(oldMouseY))); oldMouseX = Convert.ToInt32(Math.Abs(oldMouseX - (sensitive - (sensitive * stosunekX)))); ruchX = true; } if (stosunekY > 1.03 && ruchX == false) { System.Windows.Forms.Cursor.Position = new System.Drawing.Point(Convert.ToInt32(Math.Abs(oldMouseX)), Convert.ToInt32(Math.Abs(oldMouseY + (sensitive * stosunekY - sensitive)))); oldMouseY = Convert.ToInt32(Math.Abs(oldMouseY + (sensitive * stosunekY - sensitive))); ruchY = true; } if (stosunekY < 0.97 && ruchX == false) { System.Windows.Forms.Cursor.Position = new System.Drawing.Point(Convert.ToInt32(Math.Abs(oldMouseX)), Convert.ToInt32(Math.Abs(oldMouseY - (sensitive - (sensitive * stosunekY))))); oldMouseY = Convert.ToInt32(Math.Abs(oldMouseY - (sensitive - (sensitive * stosunekY)))); ruchY = true; } } //stan spoczynku if (Math.Abs(oldX - newX) < 70) { ruchX = false; } //stan spoczynku if (Math.Abs(oldY - newY) < 70) { ruchY = false; } //PIERWSZY GEST if (gest[0] == 0) { if (this.facePoints[40].Y - this.facePoints[87].Y > 12) { clickCounter++; if (clickCounter % 10 == 0) { DoMouseClick(0); } if (clickCounter == 10000) { clickCounter = 0; } } } if (gest[0] == 1) { if (this.facePoints[57].Y - this.facePoints[51].Y > 15) { clickCounter++; if (clickCounter % 10 == 0) { DoMouseClick(1); } if (clickCounter == 10000) { clickCounter = 0; } } } if (gest[0] == 2) { if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY > 1.03) { clickCounter++; if (clickCounter % 2 == 0) { DoMouseScroll(1); } if (clickCounter == 10000) { clickCounter = 0; } } } if (gest[0] == 3) { if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY < 0.97) { clickCounter++; if (clickCounter % 2 == 0) { DoMouseScroll(0); } if (clickCounter == 10000) { clickCounter = 0; } } } //DRUGI GEST if (gest[1] == 0) { if (this.facePoints[40].Y - this.facePoints[87].Y > 12) { clickCounter++; if (clickCounter % 10 == 0) { DoMouseClick(0); } if (clickCounter == 10000) { clickCounter = 0; } } } if (gest[1] == 1) { if (this.facePoints[57].Y - this.facePoints[51].Y > 14) { clickCounter++; if (clickCounter % 10 == 0) { DoMouseClick(1); } if (clickCounter == 10000) { clickCounter = 0; } } } if (gest[1] == 2) { if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY > 1.03) { clickCounter++; if (clickCounter % 2 == 0) { DoMouseScroll(1); } if (clickCounter == 10000) { clickCounter = 0; } } } if (gest[1] == 3) { if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY < 0.97) { clickCounter++; if (clickCounter % 2 == 0) { DoMouseScroll(0); } if (clickCounter == 10000) { clickCounter = 0; } } } //TRZECI GEST if (gest[2] == 0) { if (this.facePoints[40].Y - this.facePoints[87].Y > 12) { clickCounter++; if (clickCounter % 10 == 0) { DoMouseClick(0); } if (clickCounter == 10000) { clickCounter = 0; } } } if (gest[2] == 1) { if (this.facePoints[57].Y - this.facePoints[51].Y > 14) { clickCounter++; if (clickCounter % 10 == 0) { DoMouseClick(1); } if (clickCounter == 10000) { clickCounter = 0; } } } if (gest[2] == 2) { if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY > 1.03) { clickCounter++; if (clickCounter % 2 == 0) { DoMouseScroll(1); } if (clickCounter == 10000) { clickCounter = 0; } } } if (gest[2] == 3) { if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY < 0.97) { clickCounter++; if (clickCounter % 2 == 0) { DoMouseScroll(0); } if (clickCounter == 10000) { clickCounter = 0; } } } //CZWARTY GEST if (gest[3] == 0) { if (this.facePoints[40].Y - this.facePoints[87].Y > 12) { clickCounter++; if (clickCounter % 10 == 0) { DoMouseClick(0); } if (clickCounter == 10000) { clickCounter = 0; } } } if (gest[3] == 1) { if (this.facePoints[57].Y - this.facePoints[51].Y > 14) { clickCounter++; if (clickCounter % 10 == 0) { DoMouseClick(1); } if (clickCounter == 10000) { clickCounter = 0; } } } if (gest[3] == 2) { if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY > 1.03) { clickCounter++; if (clickCounter % 2 == 0) { DoMouseScroll(1); } if (clickCounter == 10000) { clickCounter = 0; } } } if (gest[3] == 3) { if (Math.Abs(this.facePoints[88].X - this.facePoints[89].X) > 28 && stosunekY < 0.97) { clickCounter++; if (clickCounter % 2 == 0) { DoMouseScroll(0); } if (clickCounter == 10000) { clickCounter = 0; } } } } } } }
/// <summary> /// Handles the AllFramesReady event of the kinectSensor control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="Microsoft.Kinect.AllFramesReadyEventArgs"/> instance containing the event data.</param> void kinectSensor_AllFramesReady(object sender, AllFramesReadyEventArgs e) { // Retrieve each single frame and copy the data using (ColorImageFrame colorImageFrame = e.OpenColorImageFrame()) { if (colorImageFrame == null) { return; } colorImageFrame.CopyPixelDataTo(colorPixelData); } using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame()) { if (depthImageFrame == null) { return; } depthImageFrame.CopyPixelDataTo(depthPixelData); } using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame()) { if (skeletonFrame == null) { return; } skeletonFrame.CopySkeletonDataTo(skeletonData); } // Retrieve the first tracked skeleton if any. Otherwise, do nothing. var skeleton = skeletonData.FirstOrDefault(s => s.TrackingState == SkeletonTrackingState.Tracked); if (skeleton == null) { return; } // Make the faceTracker processing the data. FaceTrackFrame faceFrame = faceTracker.Track(kinectSensor.ColorStream.Format, colorPixelData, kinectSensor.DepthStream.Format, depthPixelData, skeleton); // If a face is tracked, then we can use it. if (faceFrame.TrackSuccessful) { var triangles = faceFrame.GetTriangles(); // Retrieve only the Animation Units coeffs. var AUCoeff = faceFrame.GetAnimationUnitCoefficients(); var jawLowerer = AUCoeff[AnimationUnit.JawLower]; jawLowerer = jawLowerer < 0 ? 0 : jawLowerer; MouthScaleTransform.ScaleY = jawLowerer * 5 + 0.1; MouthScaleTransform.ScaleX = (AUCoeff[AnimationUnit.LipStretcher] + 1); LeftBrow.Y = RightBrow.Y = (AUCoeff[AnimationUnit.BrowLower]) * 40; RightBrowRotate.Angle = (AUCoeff[AnimationUnit.BrowRaiser] * 20); LeftBrowRotate.Angle = -RightBrowRotate.Angle; CanvasRotate.Angle = -faceFrame.Rotation.Z; // CanvasTranslate.X = faceFrame.Translation.X; // CanvasTranslate.Y = faceFrame.Translation.Y; if (logToFile) { writeToFile(filename_txt.Text, faceFrame); } if (writeToOSC) { sendOsc(osc_channel_txt.Text, faceFrame, oscWriter); } } }
/// <summary> /// Updates the face tracking information for this skeleton /// </summary> /// public void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest) { //No Touchy this.skeletonTrackingState = skeletonOfInterest.TrackingState; if (this.skeletonTrackingState != SkeletonTrackingState.Tracked) { return; } if (faceTracker == null) { faceTracker = new FaceTracker(kinectSensor); } frame = this.faceTracker.Track( colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest); this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { faceTriangles = frame.GetTriangles(); } this.facePoints = frame.GetProjected3DShape(); //Touchy //Assign Reference points this.absfacePoints = frame.Get3DShape(); leftForehead = this.absfacePoints[FeaturePoint.TopLeftForehead]; rightForehead = this.absfacePoints[FeaturePoint.TopRightForehead]; jaw = this.absfacePoints[FeaturePoint.BottomOfChin]; faceRotationX = frame.Rotation.X; faceRotationY = frame.Rotation.Y; faceRotationZ = frame.Rotation.Z; //Calculate Reference Points foreheadReferencePointX = ((rightForehead.X - leftForehead.X) / 2); foreheadReferencePointY = ((rightForehead.Y - leftForehead.Y) / 2); foreheadReferencePointZ = ((rightForehead.Z - leftForehead.Z) / 2); //Set Animation Units AUCoeff = frame.GetAnimationUnitCoefficients(); jawLowererAU = AUCoeff[AnimationUnit.JawLower]; lipStretcherAU = AUCoeff[AnimationUnit.LipStretcher]; browRaiserAU = AUCoeff[AnimationUnit.BrowRaiser]; setJawData(jaw.Y, leftForehead.Y, rightForehead.Y, jawLowererAU, lipStretcherAU); rotations = new float[5]; //set up matlab matlab = new MLApp.MLApp(); matlab.Execute(@"cd C:\Users\Bala\Documents\MATLAB"); result = null; //get rotation values rotations[0] = faceRotationX; rotations[1] = faceRotationY; rotations[2] = faceRotationZ; rotations[3] = jawLowererAU; rotations[4] = lipStretcherAU; //Set up GlovePie OscPacket.LittleEndianByteOrder = false; IPEndPoint myapp = new IPEndPoint(IPAddress.Loopback, 1944); IPEndPoint glovepie = new IPEndPoint(IPAddress.Loopback, 1945); Console.WriteLine(browRaiserAU); matlab.Feval("nnW", 1, out result, rotations[0]); object[] resW = result as object[]; nnoutput = (int)((float)resW[0] + 0.5f); if (nnoutput == 1) { commandtoSend = 1; } else { result = null; matlab.Feval("nnA", 1, out result, rotations[1]); object[] resA = result as object[]; nnoutput = (int)((float)resA[0] + 0.5f); if (nnoutput == 1) { commandtoSend = 2; } else { result = null; matlab.Feval("nnS", 1, out result, rotations[0]); object[] resS = result as object[]; nnoutput = (int)((float)resS[0] + 0.5f); if (nnoutput == 1) { commandtoSend = 3; } else { result = null; matlab.Feval("nnd", 1, out result, rotations[1]); object[] resD = result as object[]; nnoutput = (int)((float)resD[0] + 0.5f); if (nnoutput == 1) { commandtoSend = 4; } else { result = null; matlab.Feval("nnLC", 1, out result, rotations[2]); object[] resLC = result as object[]; nnoutput = (int)((float)resLC[0] + 0.5f); if (nnoutput == 1) { commandtoSend = 5; } else { result = null; matlab.Feval("nnRC", 1, out result, rotations[2]); object[] resRC = result as object[]; nnoutput = (int)((float)resRC[0] + 0.5f); if (nnoutput == 1) { commandtoSend = 6; } else { result = null; if (jawLowererAU > 0.7) { commandtoSend = 7; } /* * matlab.Feval("nnSpace", 1, out result, rotations[3]); * object[] resSpace = result as object[]; * nnoutput = (int)((float)resSpace[0] + 0.5f); * if (nnoutput == 1) * { * commandtoSend = 7; * }*/ else { result = null; if (browRaiserAU > 0.4) { commandtoSend = 8; } else { result = null; commandtoSend = 0; } /*result = null; * matlab.Feval("nnMiddle", 1, out result, lipStretcherAU); * object[] resMiddle = result as object[]; * nnoutput = (int)((float)resMiddle[0] + 0.5f); * if (nnoutput == 1) * { * commandtoSend = 8; * } * else * { * result = null; * commandtoSend = 0; * }*/ } } } } } } } //Console.WriteLine("Iteration Complete"); switch (commandtoSend) { case 0: msg = new OscMessage(myapp, "/move/w", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/a", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/s", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/d", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/lc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/rc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/middle", 0.0f); msg.Send(glovepie); break; case 1: msg = new OscMessage(myapp, "/move/w", 10.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/a", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/s", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/d", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/lc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/rc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/space", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/middle", 0.0f); msg.Send(glovepie); break; case 2: msg = new OscMessage(myapp, "/move/w", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/a", 10.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/s", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/d", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/lc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/rc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/space", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/middle", 0.0f); msg.Send(glovepie); break; case 3: msg = new OscMessage(myapp, "/move/w", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/a", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/s", 10.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/d", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/lc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/rc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/space", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/middle", 0.0f); msg.Send(glovepie); break; case 4: msg = new OscMessage(myapp, "/move/w", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/a", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/s", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/d", 10.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/lc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/rc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/space", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/middle", 0.0f); msg.Send(glovepie); break; case 5: msg = new OscMessage(myapp, "/move/w", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/a", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/s", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/d", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/lc", 10.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/rc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/space", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/middle", 0.0f); msg.Send(glovepie); break; case 6: msg = new OscMessage(myapp, "/move/w", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/a", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/s", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/d", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/lc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/rc", 10.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/space", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/middle", 0.0f); msg.Send(glovepie); break; case 7: msg = new OscMessage(myapp, "/move/w", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/a", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/s", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/d", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/lc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/rc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/space", 10.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/middle", 0.0f); msg.Send(glovepie); break; case 8: msg = new OscMessage(myapp, "/move/w", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/a", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/s", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/d", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/lc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/rc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/space", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/middle", 10.0f); msg.Send(glovepie); break; } } }