public void writeToFile(string fileName, FaceTrackFrame faceFrame) { string path = Directory.GetCurrentDirectory() + @"\output"; string target = path + "\\" + fileName; if (!Directory.Exists(path)) { Directory.CreateDirectory(path); } if (!File.Exists(target)) { using (StreamWriter file = new StreamWriter(target, true)) { string heading = "Label "; heading += "TimeStamp "; foreach (var fp in featurePoints) { heading += fp.ToString() + ".X "; heading += fp.ToString() + ".Y "; heading += fp.ToString() + ".Z "; } heading.TrimEnd(' '); file.WriteLine(heading); } } using (StreamWriter file = new StreamWriter(target, true)) { string data_string = label_txt.Text + " "; //Timestamp TimeSpan t = DateTime.UtcNow - new DateTime(1970, 1, 1); String time = ((long)t.TotalMilliseconds).ToString(); data_string += time + " "; EnumIndexableCollection <FeaturePoint, Vector3DF> shapePoints = faceFrame.Get3DShape(); var shape = faceFrame.Get3DShape(); float[][] arr = shape .Select(x => new float[3] { x.X, x.Y, x.Z }) .ToArray(); foreach (var v in arr) { data_string += v[0] + " "; data_string += v[1] + " "; data_string += v[2] + " "; } file.WriteLine(data_string); } }
public void Evaluate(int SpreadMax) { //Output static indices all the time if (this.first) { this.FOutIndices.AssignFrom(KinectRuntime.FACE_INDICES); this.first = false; } if (this.FInFrame.PluginIO.IsConnected) { if (this.FInFrame.IsChanged) { this.FOutOK.SliceCount = FInFrame.SliceCount; this.FOutPosition.SliceCount = FInFrame.SliceCount; this.FOutRotation.SliceCount = FInFrame.SliceCount; this.FOutPts.SliceCount = FInFrame.SliceCount; this.FOutPPTs.SliceCount = FInFrame.SliceCount; for (int cnt = 0; cnt < this.FInFrame.SliceCount; cnt++) { FaceTrackFrame frame = this.FInFrame[cnt]; this.FOutOK[cnt] = frame.TrackSuccessful; this.FOutPosition[cnt] = new Vector3(frame.Translation.X, frame.Translation.Y, frame.Translation.Z); this.FOutRotation[cnt] = new Vector3(frame.Rotation.X, frame.Rotation.Y, frame.Rotation.Z) * INVTWOPI; EnumIndexableCollection <FeaturePoint, PointF> pp = frame.GetProjected3DShape(); EnumIndexableCollection <FeaturePoint, Vector3DF> p = frame.Get3DShape(); this.FOutPPTs[cnt].SliceCount = pp.Count; this.FOutPts[cnt].SliceCount = p.Count; for (int i = 0; i < pp.Count; i++) { this.FOutPPTs[cnt][i] = new Vector2(pp[i].X, pp[i].Y); this.FOutPts[cnt][i] = new Vector3(p[i].X, p[i].Y, p[i].Z); } /*FaceTriangle[] d = frame.GetTriangles(); * this.FOutIndices.SliceCount = d.Length * 3; * for (int i = 0; i < d.Length; i++) * { * this.FOutIndices[i * 3] = d[i].First; * this.FOutIndices[i * 3 + 1] = d[i].Second; * this.FOutIndices[i * 3 + 2] = d[i].Third; * }*/ } } } else { this.FOutPosition.SliceCount = 0; this.FOutPPTs.SliceCount = 0; this.FOutPts.SliceCount = 0; this.FOutRotation.SliceCount = 0; this.FOutOK.SliceCount = 0; } }
/// <summary> /// Updates the face tracking information for this skeleton /// </summary> internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest) { this.skeletonTrackingState = skeletonOfInterest.TrackingState; if (this.skeletonTrackingState != SkeletonTrackingState.Tracked) { // nothing to do with an untracked skeleton. return; } if (this.faceTracker == null) { try { this.faceTracker = new FaceTracker(kinectSensor); } catch (InvalidOperationException) { // During some shutdown scenarios the FaceTracker // is unable to be instantiated. Catch that exception // and don't track a face. Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException"); this.faceTracker = null; } } if (this.faceTracker != null) { FaceTrackFrame frame = this.faceTracker.Track( colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest); this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { // only need to get this once. It doesn't change. faceTriangles = frame.GetTriangles(); } this.facePoints = frame.GetProjected3DShape(); this.test = frame.Get3DShape(); //info about rotations pitchVal = frame.Rotation.X; rollVal = frame.Rotation.Z; yawVal = frame.Rotation.Y; } } }
protected void UpdateFrame(FaceTrackFrame frame) { this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { // only need to get this once. It doesn't change. faceTriangles = frame.GetTriangles(); } this.facePoints = frame.Get3DShape(); this.FaceRect = frame.FaceRect; this.FaceTranslation = frame.Translation; this.FaceRotation = frame.Rotation; } }
private void UpdateMesh(FaceTrackFrame faceTrackingFrame) { EnumIndexableCollection <FeaturePoint, Vector3DF> shapePoints = faceTrackingFrame.Get3DShape(); EnumIndexableCollection <FeaturePoint, PointF> projectedShapePoints = faceTrackingFrame.GetProjected3DShape(); if (this.triangleIndices == null) { // Update stuff that doesn't change from frame to frame this.triangleIndices = faceTrackingFrame.GetTriangles(); var indices = new Int32Collection(this.triangleIndices.Length * 3); foreach (FaceTriangle triangle in this.triangleIndices) { indices.Add(triangle.Third); indices.Add(triangle.Second); indices.Add(triangle.First); } this.theGeometry.TriangleIndices = indices; this.theGeometry.Normals = null; // Let WPF3D calculate these. this.theGeometry.Positions = new Point3DCollection(shapePoints.Count); this.theGeometry.TextureCoordinates = new PointCollection(projectedShapePoints.Count); for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++) { this.theGeometry.Positions.Add(new Point3D()); this.theGeometry.TextureCoordinates.Add(new Point()); } } // Update the 3D model's vertices and texture coordinates for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++) { Vector3DF point = shapePoints[pointIndex]; this.theGeometry.Positions[pointIndex] = new Point3D(point.X, point.Y, -point.Z); PointF projected = projectedShapePoints[pointIndex]; this.theGeometry.TextureCoordinates[pointIndex] = new Point( projected.X / (double)this.colorImageWritableBitmap.PixelWidth, projected.Y / (double)this.colorImageWritableBitmap.PixelHeight); } }
private void saveFaceModel() { this.saveModel = false; //notify model is saved: Skeleton skeletonOfInterest = this.skeletonData.FirstOrDefault( skeleton => skeleton.TrackingId == this.trackingId && skeleton.TrackingState != SkeletonTrackingState.NotTracked); if (skeletonOfInterest != null && skeletonOfInterest.TrackingState == SkeletonTrackingState.Tracked) { FaceTrackFrame faceTrackFrame = this.faceTracker.Track( this.colorImageFormat, this.colorImage, this.depthImageFormat, this.depthImage, skeletonOfInterest); if (faceTrackFrame.TrackSuccessful && number <= 3) { EnumIndexableCollection <FeaturePoint, Vector3DF> fpA = faceTrackFrame.Get3DShape(); MessageBox.Show("saved model " + number + " for " + name); //saveColorImage(name); // save to file : System.IO.File.WriteAllText(@"C:\Kex\data\" + name + number + ".txt", name); using (System.IO.StreamWriter file = new System.IO.StreamWriter(@"C:\Kex\data\" + name + number + ".txt")) { foreach (Vector3DF fp in fpA) { file.WriteLine("" + fp.X + " , " + fp.Y + " , " + fp.Z); } } number++; } } }
public void sendOsc(string channel, FaceTrackFrame faceFrame, UdpWriter oscWriter) { TimeSpan t = DateTime.UtcNow - new DateTime(1970, 1, 1); String time = ((long)t.TotalMilliseconds).ToString(); var shape = faceFrame.Get3DShape(); float[][] arr = shape .Select(x => new float[3] { x.X, x.Y, x.Z }) .ToArray(); OscBundle b = new OscBundle(0); int i = 0; foreach (var v in shape) { var el = new OscElement("/kinect" + i++, v.X, v.Y, v.Z); b.AddElement(el); } oscWriter.Send(b); }
internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest) { if (!timerStarted) { timer.Start(); timerStarted = true; } //increment our frames numberOfFrames++; this.skeletonTrackingState = skeletonOfInterest.TrackingState; if (this.skeletonTrackingState != SkeletonTrackingState.Tracked) { // nothing to do with an untracked skeleton. return; } if (this.faceTracker == null) { try { this.faceTracker = new FaceTracker(kinectSensor); } catch (InvalidOperationException) { // During some shutdown scenarios the FaceTracker // is unable to be instantiated. Catch that exception // and don't track a face. Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException"); this.faceTracker = null; } } if (this.faceTracker != null) { FaceTrackFrame frame = this.faceTracker.Track( colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest); this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { // only need to get this once. It doesn't change. faceTriangles = frame.GetTriangles(); } this.facePoints = frame.GetProjected3DShape(); /*if () * { * Debug.WriteLine("hit " + (frameIter * sampleRate) + " frames in " + (timer.Elapsed) + " seconds"); * frameIter++; * }*/ //Also grab our points EnumIndexableCollection <FeaturePoint, Vector3DF> facePoints3D = frame.Get3DShape(); int index = 0; if (numberOfFrames > frameIter * sampleRate && frameIter < 5) //only grab 4 samples over our given sample rate { //Create a new thread so we don't make the visual thread throw up all over the place new Thread(() => { Thread.CurrentThread.IsBackground = true; List <Tuple <float, float, float> > myPoints = new List <Tuple <float, float, float> >(); foreach (Vector3DF vector in facePoints3D) { //csv.Append(string.Format("( ({1}, {2}, {3}){4}",vector.X, vector.Y, vector.Z, Environment.NewLine)); myPoints.Add(new Tuple <float, float, float>(vector.X, vector.Y, vector.Z)); index++; } calculateDistances(myPoints); frameIter++; }).Start(); //once = true; } if (frameIter == 5) { SetStatusText("Generating histograms..."); Console.WriteLine("We are ready to sample"); foreach (float distance in sampleOneDistances) { int sampleOneIndex = (int)Math.Floor(64 * distance / sampleOneMaxDistance); sampleOneHistogram[sampleOneIndex]++; } foreach (float distance in sampleTwoDistances) { sampleTwoHistogram[(int)Math.Floor(64 * distance / sampleTwoMaxDistance)]++; } foreach (float distance in sampleThreeDistances) { sampleThreeHistogram[(int)Math.Floor(64 * distance / sampleThreeMaxDistance)]++; } foreach (float distance in sampleFourDistances) { sampleFourHistogram[(int)Math.Floor(64 * distance / sampleFourMaxDistance)]++; } //Go through histogram and divide by distances //Get for (int i = 0; i < sampleOneHistogram.Length; i++) { sampleOneHistogram[i] = sampleOneHistogram[i] / sampleOneDistances.Count; } for (int i = 0; i < sampleTwoHistogram.Length; i++) { sampleTwoHistogram[i] = sampleTwoHistogram[i] / sampleTwoDistances.Count; } for (int i = 0; i < sampleThreeHistogram.Length; i++) { sampleThreeHistogram[i] = sampleThreeHistogram[i] / sampleThreeDistances.Count; } for (int i = 0; i < sampleFourHistogram.Length; i++) { sampleFourHistogram[i] = sampleFourHistogram[i] / sampleFourDistances.Count; } int iter = 0; foreach (int count in sampleTwoHistogram)//can iterate through any histogram, they're all of size 65 { Console.WriteLine("Count for hist1/2/3/4[" + iter + "] is " + count + "/" + sampleOneHistogram[iter] + "/" + sampleThreeHistogram[iter] + "/" + sampleFourHistogram[iter]); iter++; } //Write our histograms to a csv file String[] sampleOneHistString = Array.ConvertAll(sampleOneHistogram, x => x.ToString()); using (System.IO.StreamWriter file = new System.IO.StreamWriter(testFilePath)) { file.Write(string.Join(",", Enumerable.Range(1, 65).ToArray()) + Environment.NewLine); file.Write(string.Join(",", sampleOneHistString)); file.Write(Environment.NewLine); file.Write(string.Join(",", Array.ConvertAll(sampleTwoHistogram, x => x.ToString()))); file.Write(Environment.NewLine); file.Write(string.Join(",", Array.ConvertAll(sampleThreeHistogram, x => x.ToString()))); file.Write(Environment.NewLine); file.Write(string.Join(",", Array.ConvertAll(sampleFourHistogram, x => x.ToString()))); } //pass that data file to jar String jarPath = "C:\\Users\\Datalab\\Documents\\GitHub\\WekaClassifier\\jar\\wekaClassifier.jar"; System.Diagnostics.Process clientProcess = new Process(); String jarargs = "C:\\Users\\Datalab\\Documents\\GitHub\\WekaClassifier\\data\\training_data.arff C:\\Users\\Datalab\\Documents\\GitHub\\WekaClassifier\\data\\testFormat.dat"; clientProcess.StartInfo.FileName = "java"; clientProcess.StartInfo.Arguments = "-jar " + jarPath + " " + jarargs; clientProcess.StartInfo.RedirectStandardOutput = true; clientProcess.StartInfo.UseShellExecute = false; clientProcess.Start(); String output = clientProcess.StandardOutput.ReadToEnd(); Console.WriteLine(output); clientProcess.WaitForExit(); int code = clientProcess.ExitCode; //write to dat file with 4 histograms averaged frameIter++; //only do this once (will make conditional evaluate to false. Is this clean and clear? Not really? Do I care? Not particularly. At least it's documented. ftNumPeople++; SetPeopleText("People tracked : " + ftNumPeople); SetStatusText("Status: waiting...."); SetPredictionText("Guess: " + output); } } } }
/// <summary> /// Track a face and update the states. /// </summary> /// <param name="sensor">Instance of KinectSensor</param> /// <param name="colorImageFormat">Format of the colorImage array</param> /// <param name="colorImage">Input color image frame retrieved from Kinect sensor</param> /// <param name="depthImageFormat">Format of the depthImage array</param> /// <param name="depthImage">Input depth image frame retrieved from Kinect sensor</param> /// <param name="skeletonOfInterest">Input skeleton to track. Head and shoulder joints in the skeleton are used to calculate the head vector</param> /// <param name="computedBoundingBox">Whether compute the bounding box of the face mask</param> public void TrackFace( KinectSensor sensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest, bool computedBoundingBox) { // Reset the valid flag this.faceInfo.TrackValid = false; if (null == this.faceTracker) { try { this.faceTracker = new FaceTracker(sensor); } catch (InvalidOperationException) { // Fail silently this.faceTracker = null; return; } } // Set the color image width Size colorImageSize = Helper.GetImageSize(colorImageFormat); this.colorWidth = colorImageSize.Width; // Track the face and update the states if (this.faceTracker != null && skeletonOfInterest != null && skeletonOfInterest.TrackingState == SkeletonTrackingState.Tracked) { FaceTrackFrame faceTrackFrame = this.faceTracker.Track( colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest); this.faceInfo.TrackValid = faceTrackFrame.TrackSuccessful; if (this.faceInfo.TrackValid) { this.faceInfo.FaceRect = faceTrackFrame.FaceRect; this.faceInfo.Rotation = faceTrackFrame.Rotation; this.faceInfo.Translation = faceTrackFrame.Translation; // Get the bounding box of face mask if (computedBoundingBox) { var shapePoints = faceTrackFrame.Get3DShape(); this.ResetBoundingBox(); // Reset the minimum and maximum points of bounding box foreach (var point in shapePoints) { if (point.X < this.minimumPoint.X) { this.minimumPoint.X = point.X; } if (point.X > this.maximumPoint.X) { this.maximumPoint.X = point.X; } if (point.Y < this.minimumPoint.Y) { this.minimumPoint.Y = point.Y; } if (point.Y > this.maximumPoint.Y) { this.maximumPoint.Y = point.Y; } if (point.Z < this.minimumPoint.Z) { this.minimumPoint.Z = point.Z; } if (point.Z > this.maximumPoint.Z) { this.maximumPoint.Z = point.Z; } } } } } // To render the face rectangle Dispatcher.BeginInvoke((Action)(() => this.InvalidateVisual())); }
private void UpdateMesh(FaceTrackFrame faceTrackingFrame) { //Console.Out.WriteLine(" ###################### In UpdateMesh ############################# "); bool faceInCentre = true; EnumIndexableCollection <FeaturePoint, Vector3DF> shapePoints = faceTrackingFrame.Get3DShape(); EnumIndexableCollection <FeaturePoint, PointF> projectedShapePoints = faceTrackingFrame.GetProjected3DShape(); if (this.triangleIndices == null) { // Update stuff that doesn't change from frame to frame this.triangleIndices = faceTrackingFrame.GetTriangles(); var indices = new Int32Collection(this.triangleIndices.Length * 3); foreach (FaceTriangle triangle in this.triangleIndices) { indices.Add(triangle.Third); indices.Add(triangle.Second); indices.Add(triangle.First); } this.theGeometry.TriangleIndices = indices; this.theGeometry.Normals = null; // Let WPF3D calculate these. this.theGeometry.Positions = new Point3DCollection(shapePoints.Count); this.theGeometry.TextureCoordinates = new PointCollection(projectedShapePoints.Count); for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++) { this.theGeometry.Positions.Add(new Point3D()); this.theGeometry.TextureCoordinates.Add(new Point()); } } // Update the 3D model's vertices and texture coordinates for (int pointIndex = 0; pointIndex < shapePoints.Count; pointIndex++) { Vector3DF point = shapePoints[pointIndex]; this.theGeometry.Positions[pointIndex] = new Point3D(point.X, point.Y, -point.Z); PointF projected = projectedShapePoints[pointIndex]; this.theGeometry.TextureCoordinates[pointIndex] = new Point( projected.X / (double)this.colorImageWritableBitmap.PixelWidth, projected.Y / (double)this.colorImageWritableBitmap.PixelHeight); // Console.Out.WriteLine("X = " + projected.X / (double)this.colorImageWritableBitmap.PixelWidth + "Y = " + projected.Y / (double)this.colorImageWritableBitmap.PixelHeight); if (projected.X / (double)this.colorImageWritableBitmap.PixelWidth > .6 || projected.Y / (double)this.colorImageWritableBitmap.PixelHeight > .75) { faceInCentre = false; } } if (faceInCentre) { // copyFaceImage(); FaceMesh tempMeshData = new FaceMesh(); tempMeshData.FaceViewport = viewport3d; FaceMeshData = tempMeshData; } }
public void Evaluate(int SpreadMax) { //Output static indices all the time if (this.first) { this.FOutIndices.AssignFrom(KinectRuntime.FACE_INDICES); this.first = false; } if (this.FInFrame.IsConnected) { if (this.FInFrame.IsChanged) { this.FOutOK.SliceCount = FInFrame.SliceCount; this.FOutPosition.SliceCount = FInFrame.SliceCount; this.FOutRotation.SliceCount = FInFrame.SliceCount; this.FOutPts.SliceCount = FInFrame.SliceCount; this.FOutPPTs.SliceCount = FInFrame.SliceCount; for (int cnt = 0; cnt < this.FInFrame.SliceCount; cnt++) { FaceTrackFrame frame = this.FInFrame[cnt]; this.FOutOK[cnt] = frame.TrackSuccessful; this.FOutPosition[cnt] = new Vector3(frame.Translation.X, frame.Translation.Y, frame.Translation.Z); this.FOutRotation[cnt] = new Vector3(frame.Rotation.X, frame.Rotation.Y, frame.Rotation.Z) * (float)VMath.DegToCyc; EnumIndexableCollection <FeaturePoint, PointF> pp = frame.GetProjected3DShape(); EnumIndexableCollection <FeaturePoint, Vector3DF> p = frame.Get3DShape(); this.FOutPPTs[cnt].SliceCount = pp.Count; this.FOutPts[cnt].SliceCount = p.Count; this.FOutNormals[cnt].SliceCount = p.Count; //Compute smoothed normals Vector3[] norms = new Vector3[p.Count]; int[] inds = KinectRuntime.FACE_INDICES; int tricount = inds.Length / 3; for (int j = 0; j < tricount; j++) { int i1 = inds[j * 3]; int i2 = inds[j * 3 + 1]; int i3 = inds[j * 3 + 2]; Vector3 v1 = p[i1].SlimVector(); Vector3 v2 = p[i2].SlimVector(); Vector3 v3 = p[i3].SlimVector(); Vector3 faceEdgeA = v2 - v1; Vector3 faceEdgeB = v1 - v3; Vector3 norm = Vector3.Cross(faceEdgeB, faceEdgeA); norms[i1] += norm; norms[i2] += norm; norms[i3] += norm; } for (int i = 0; i < pp.Count; i++) { this.FOutPPTs[cnt][i] = new Vector2(pp[i].X, pp[i].Y); this.FOutPts[cnt][i] = new Vector3(p[i].X, p[i].Y, p[i].Z); this.FOutNormals[cnt][i] = Vector3.Normalize(norms[i]); } /*FaceTriangle[] d = frame.GetTriangles(); * this.FOutIndices.SliceCount = d.Length * 3; * for (int i = 0; i < d.Length; i++) * { * this.FOutIndices[i * 3] = d[i].First; * this.FOutIndices[i * 3 + 1] = d[i].Second; * this.FOutIndices[i * 3 + 2] = d[i].Third; * }*/ } } } else { this.FOutPosition.SliceCount = 0; this.FOutPPTs.SliceCount = 0; this.FOutPts.SliceCount = 0; this.FOutRotation.SliceCount = 0; this.FOutOK.SliceCount = 0; } }
/// <summary> /// Updates the face tracking information for this skeleton /// </summary> internal void OnFrameReady(KinectSensor sensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage) { if (this.faceTracker == null) { try { this.faceTracker = new FaceTracker(sensor); } catch (InvalidOperationException) { // During some shutdown scenarios the FaceTracker // is unable to be instantiated. Catch that exception // and don't track a face. Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException"); this.faceTracker = null; } } if (this.faceTracker != null) { FaceTrackFrame frame = this.faceTracker.Track(colorImageFormat, colorImage, depthImageFormat, depthImage); this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { // only need to get this once. It doesn't change. faceTriangles = frame.GetTriangles(); } this.facePoints = frame.GetProjected3DShape(); this.facePoints3D = frame.Get3DShape(); //X-man modification this.AU = frame.GetAnimationUnitCoefficients(); } if (faceTriangles != null && facePoints != null) { faceUpdated(this, new FaceTrackerEventArgs(facePoints, faceTriangles)); } if (FaceRecognition.compare) { FaceRecognition.recognizer(this); } if (mouthOpened.detect) { mouthOpened.mouthRecognizer(); } if (mouthShut.detect) { mouthShut.mouth2Recognizer(); } if (lookingDirection.detect) { lookingDirection.lookRecognizer(); } if (pupilRight.detect) { pupilRight.pupilRecognizer(); } } }
/// <summary> /// Allows the game to run logic such as updating the world, /// checking for collisions, gathering input, and playing audio. /// </summary> /// <param name="gameTime">Provides a snapshot of timing values.</param> protected override void Update(GameTime gameTime) { // Allows the game to exit if (GamePad.GetState(PlayerIndex.One).Buttons.Back == ButtonState.Pressed) { this.Exit(); } colorData = colorStream.ColorData; depthData = depthStream.DepthData; nearestSkeleton = skeletonStream.Skel; if (nearestSkeleton != null && nearestSkeleton.TrackingState == SkeletonTrackingState.Tracked) { if (this.faceTracker == null) { try { this.faceTracker = new FaceTracker(this.chooser.Sensor); } catch (InvalidOperationException) { this.faceTracker = null; } } if (this.faceTracker != null) { FaceTrackFrame faceTrackFrame = this.faceTracker.Track( colorImageFormat, colorData, depthImageFormat, depthData, nearestSkeleton); if (faceTrackFrame.TrackSuccessful) { EnumIndexableCollection <FeaturePoint, Vector3DF> shapePoints = faceTrackFrame.Get3DShape(); EnumIndexableCollection <FeaturePoint, PointF> projectedShapePoints = faceTrackFrame.GetProjected3DShape(); yaw = -MathHelper.ToRadians(faceTrackFrame.Rotation.Y); pitch = -MathHelper.ToRadians(faceTrackFrame.Rotation.X); roll = MathHelper.ToRadians(faceTrackFrame.Rotation.Z); vector.X = 9.3f * (shapePoints[4].X / shapePoints[4].Z); vector.Y = 9.3f * (shapePoints[4].Y / shapePoints[4].Z) * 0.95f; vector.Z = 0; scale = 0.4f; Window.Title = shapePoints[4].X.ToString() + " " + shapePoints[4].Y.ToString() + " " + shapePoints[4].Z.ToString(); } else { scale = 0; } } } if (gameTime.TotalGameTime.Seconds > 3) { } base.Update(gameTime); }
/// <summary> /// Updates the face tracking information for this skeleton /// </summary> /// public void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest) { //No Touchy this.skeletonTrackingState = skeletonOfInterest.TrackingState; if (this.skeletonTrackingState != SkeletonTrackingState.Tracked) { return; } if (faceTracker == null) { faceTracker = new FaceTracker(kinectSensor); } frame = this.faceTracker.Track( colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest); this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { faceTriangles = frame.GetTriangles(); } this.facePoints = frame.GetProjected3DShape(); //Touchy //Assign Reference points this.absfacePoints = frame.Get3DShape(); leftForehead = this.absfacePoints[FeaturePoint.TopLeftForehead]; rightForehead = this.absfacePoints[FeaturePoint.TopRightForehead]; jaw = this.absfacePoints[FeaturePoint.BottomOfChin]; faceRotationX = frame.Rotation.X; faceRotationY = frame.Rotation.Y; faceRotationZ = frame.Rotation.Z; //Calculate Reference Points foreheadReferencePointX = ((rightForehead.X - leftForehead.X) / 2); foreheadReferencePointY = ((rightForehead.Y - leftForehead.Y) / 2); foreheadReferencePointZ = ((rightForehead.Z - leftForehead.Z) / 2); //Set Animation Units AUCoeff = frame.GetAnimationUnitCoefficients(); jawLowererAU = AUCoeff[AnimationUnit.JawLower]; lipStretcherAU = AUCoeff[AnimationUnit.LipStretcher]; browRaiserAU = AUCoeff[AnimationUnit.BrowRaiser]; setJawData(jaw.Y, leftForehead.Y, rightForehead.Y, jawLowererAU, lipStretcherAU); rotations = new float[5]; //set up matlab matlab = new MLApp.MLApp(); matlab.Execute(@"cd C:\Users\Bala\Documents\MATLAB"); result = null; //get rotation values rotations[0] = faceRotationX; rotations[1] = faceRotationY; rotations[2] = faceRotationZ; rotations[3] = jawLowererAU; rotations[4] = lipStretcherAU; //Set up GlovePie OscPacket.LittleEndianByteOrder = false; IPEndPoint myapp = new IPEndPoint(IPAddress.Loopback, 1944); IPEndPoint glovepie = new IPEndPoint(IPAddress.Loopback, 1945); Console.WriteLine(browRaiserAU); matlab.Feval("nnW", 1, out result, rotations[0]); object[] resW = result as object[]; nnoutput = (int)((float)resW[0] + 0.5f); if (nnoutput == 1) { commandtoSend = 1; } else { result = null; matlab.Feval("nnA", 1, out result, rotations[1]); object[] resA = result as object[]; nnoutput = (int)((float)resA[0] + 0.5f); if (nnoutput == 1) { commandtoSend = 2; } else { result = null; matlab.Feval("nnS", 1, out result, rotations[0]); object[] resS = result as object[]; nnoutput = (int)((float)resS[0] + 0.5f); if (nnoutput == 1) { commandtoSend = 3; } else { result = null; matlab.Feval("nnd", 1, out result, rotations[1]); object[] resD = result as object[]; nnoutput = (int)((float)resD[0] + 0.5f); if (nnoutput == 1) { commandtoSend = 4; } else { result = null; matlab.Feval("nnLC", 1, out result, rotations[2]); object[] resLC = result as object[]; nnoutput = (int)((float)resLC[0] + 0.5f); if (nnoutput == 1) { commandtoSend = 5; } else { result = null; matlab.Feval("nnRC", 1, out result, rotations[2]); object[] resRC = result as object[]; nnoutput = (int)((float)resRC[0] + 0.5f); if (nnoutput == 1) { commandtoSend = 6; } else { result = null; if (jawLowererAU > 0.7) { commandtoSend = 7; } /* * matlab.Feval("nnSpace", 1, out result, rotations[3]); * object[] resSpace = result as object[]; * nnoutput = (int)((float)resSpace[0] + 0.5f); * if (nnoutput == 1) * { * commandtoSend = 7; * }*/ else { result = null; if (browRaiserAU > 0.4) { commandtoSend = 8; } else { result = null; commandtoSend = 0; } /*result = null; * matlab.Feval("nnMiddle", 1, out result, lipStretcherAU); * object[] resMiddle = result as object[]; * nnoutput = (int)((float)resMiddle[0] + 0.5f); * if (nnoutput == 1) * { * commandtoSend = 8; * } * else * { * result = null; * commandtoSend = 0; * }*/ } } } } } } } //Console.WriteLine("Iteration Complete"); switch (commandtoSend) { case 0: msg = new OscMessage(myapp, "/move/w", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/a", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/s", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/d", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/lc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/rc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/middle", 0.0f); msg.Send(glovepie); break; case 1: msg = new OscMessage(myapp, "/move/w", 10.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/a", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/s", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/d", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/lc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/rc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/space", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/middle", 0.0f); msg.Send(glovepie); break; case 2: msg = new OscMessage(myapp, "/move/w", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/a", 10.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/s", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/d", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/lc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/rc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/space", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/middle", 0.0f); msg.Send(glovepie); break; case 3: msg = new OscMessage(myapp, "/move/w", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/a", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/s", 10.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/d", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/lc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/rc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/space", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/middle", 0.0f); msg.Send(glovepie); break; case 4: msg = new OscMessage(myapp, "/move/w", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/a", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/s", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/d", 10.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/lc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/rc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/space", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/middle", 0.0f); msg.Send(glovepie); break; case 5: msg = new OscMessage(myapp, "/move/w", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/a", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/s", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/d", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/lc", 10.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/rc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/space", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/middle", 0.0f); msg.Send(glovepie); break; case 6: msg = new OscMessage(myapp, "/move/w", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/a", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/s", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/d", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/lc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/rc", 10.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/space", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/middle", 0.0f); msg.Send(glovepie); break; case 7: msg = new OscMessage(myapp, "/move/w", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/a", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/s", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/d", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/lc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/rc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/space", 10.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/middle", 0.0f); msg.Send(glovepie); break; case 8: msg = new OscMessage(myapp, "/move/w", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/a", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/s", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/d", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/lc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/rc", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/space", 0.0f); msg.Send(glovepie); msg = new OscMessage(myapp, "/move/middle", 10.0f); msg.Send(glovepie); break; } } }
/// <summary> /// Updates the face tracking information for this skeleton /// </summary> internal void OnFrameReady(KinectSensor kinectSensor, ColorImageFormat colorImageFormat, byte[] colorImage, DepthImageFormat depthImageFormat, short[] depthImage, Skeleton skeletonOfInterest) { this.skeletonTrackingState = skeletonOfInterest.TrackingState; if (this.skeletonTrackingState != SkeletonTrackingState.Tracked) { // nothing to do with an untracked skeleton. return; } if (this.faceTracker == null) { try { this.faceTracker = new FaceTracker(kinectSensor); } catch (InvalidOperationException) { // During some shutdown scenarios the FaceTracker // is unable to be instantiated. Catch that exception // and don't track a face. Debug.WriteLine("AllFramesReady - creating a new FaceTracker threw an InvalidOperationException"); this.faceTracker = null; } } if (this.faceTracker != null) { FaceTrackFrame frame = this.faceTracker.Track( colorImageFormat, colorImage, depthImageFormat, depthImage, skeletonOfInterest); this.lastFaceTrackSucceeded = frame.TrackSuccessful; if (this.lastFaceTrackSucceeded) { if (faceTriangles == null) { // only need to get this once. It doesn't change. faceTriangles = frame.GetTriangles(); } this.facePoints3DRaw = frame.Get3DShape(); this.facePoints = frame.GetProjected3DShape(); animationUnitsRaw = frame.GetAnimationUnitCoefficients(); } x = frame.Rotation.X; y = frame.Rotation.Y; z = frame.Rotation.Z; facePointS3D = this.facePoints3DRaw; animationUnits = animationUnitsRaw; //Debug.WriteLine(animationUnits[AnimationUnit.JawLower]); //Debug.WriteLine(animationUnits[AnimationUnit.BrowLower]); //Debug.WriteLine(animationUnits[AnimationUnit.BrowRaiser]); //Debug.WriteLine(animationUnits[AnimationUnit.JawLower]); //Debug.WriteLine(animationUnits[AnimationUnit.LipCornerDepressor]); //Debug.WriteLine(animationUnits[AnimationUnit.LipRaiser]); //Debug.WriteLine(animationUnits[AnimationUnit.LipStretcher]); //Debug.WriteLine(frame.Translation.ToString()); //Debug.WriteLine(frame.Rotation.ToString()); //this.facePoints[FeaturePoint.AboveChin].X+2; //Debug.WriteLine(frame.Translation.X.ToString()); //Debug.WriteLine(frame.Translation.Y.ToString()); //Debug.WriteLine(frame.Translation.Z.ToString()); } }