public void Reader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { kc.Controller_FrameArrived(sender, e); // Fill all text boxes FillBoxes(); arm.Text = kc.Arm.ToString(); if (kc.Arm == ArmPointing.Right) { float pointedX = kc.GetPointedX(); float pointedY = kc.GetPointedY(); pointedPoint.Text = "X= " + pointedX.ToString() + "\nY= " + pointedY.ToString(); zoneBox.Text = kc.GetPointedZone().ToString(); } else if (kc.Arm == ArmPointing.Left) { float pointedX = kc.GetPointedX(); float pointedY = kc.GetPointedY(); pointedPoint.Text = "X= " + pointedX.ToString() + "\nY= " + pointedY.ToString(); zoneBox.Text = kc.GetPointedZone().ToString(); } else { this.pointedPoint.Text = "notPointing"; zoneBox.Text = "0"; } }
public void Reader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { bool dataReceived = false; Body[] bodies = null; using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { bodies = new Body[bodyFrame.BodyCount]; bodyFrame.GetAndRefreshBodyData(bodies); dataReceived = true; } } if (dataReceived) { foreach (Body body in bodies) { gestureCollectionManager.DetectGestures(body); //BasicHandGesture(body); } } }
private void Calibration_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { kc.Controller_FrameArrived(sender, e); if (kc.Arm != ArmPointing.Nothing) { float pointedX = kc.GetPointedX(); float pointedY = kc.GetPointedY(); DataLog.Log(DataLog.DebugLevel.Message, "Calibrated with X=" + pointedX.ToString() + " Y=" + pointedY.ToString()); if (!File.Exists(OPT_FILE)) File.Create(OPT_FILE).Close(); List<float> l = new List<float>(); l.Add(pointedX); l.Add(pointedY); StreamWriter cal_file = File.CreateText(OPT_FILE); JsonTextWriter cal_writer = new JsonTextWriter(cal_file); string data = JsonConvert.SerializeObject(l); cal_writer.WriteRaw(data); cal_file.Close(); this.Close(); } }
void BodyFrame_Arrived(object sender, BodyFrameArrivedEventArgs e) { var frameReference = e.FrameReference; if (frameReference == null) return; var frame = frameReference.AcquireFrame(); if (frame == null) return; using (frame) { frame.GetAndRefreshBodyData(bodies); using (var dc = drawingGroup.Open()) { dc.DrawRectangle(Brushes.Transparent, null, new Rect(0.0, 0.0, colorSpaceWidth, colorSpaceHeight)); foreach (var body in bodies) { if (body.IsTracked) { var rShoulder = body.Joints[JointType.ShoulderRight]; var rElbow = body.Joints[JointType.ElbowRight]; var rWrist = body.Joints[JointType.HandRight]; DrawBone(rShoulder, rElbow, dc); DrawBone(rElbow, rWrist, dc); UpdateAngle(rShoulder, rElbow, rWrist); } } } } }
// ボディの更新 private void UpdateBodyFrame( BodyFrameArrivedEventArgs e ) { using ( var bodyFrame = e.FrameReference.AcquireFrame() ) { if ( bodyFrame == null ) { return; } // ボディデータを取得する bodyFrame.GetAndRefreshBodyData( bodies ); } }
private void BodyReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { using (var bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { bodyFrame.GetAndRefreshBodyData(_bodies); _body = _bodies.Where(b => b.IsTracked).FirstOrDefault(); } } }
void bosyReader_FrameArrived( object sender, BodyFrameArrivedEventArgs e ) { try { using ( var bodyFrame = e.FrameReference.AcquireFrame() ) { if ( bodyFrame != null ) { ShowBody( bodyFrame ); } } } catch ( Exception ex ) { Trace.WriteLine( ex.Message ); } }
private void SkeletonReady(object sender, BodyFrameArrivedEventArgs e) { using (BodyFrame skeletonFrame = e.FrameReference.AcquireFrame()) { if (skeletonFrame != null) { lock (m_lock) { skeletonFrame.GetAndRefreshBodyData(this.lastframe); } skeletonFrame.Dispose(); } } this.FInvalidate = true; }
private void BodyReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { if (!frame_freeze) { using (var bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { bodyFrame.GetAndRefreshBodyData(_bodies); _body = _bodies.Where(b => b.IsTracked).FirstOrDefault(); } } } }
private void BodyReader_FrameArrived(object sender, BodyFrameArrivedEventArgs args) { using (var bodyFrame = args.FrameReference.AcquireFrame()) { if (bodyFrame != null) { bodyFrame.GetAndRefreshBodyData(_bodies); var body = _bodies.Where(b => b.IsTracked).FirstOrDefault(); if (!_faceFrameSource.IsTrackingIdValid && body != null) { _faceFrameSource.TrackingId = body.TrackingId; } } } }
private void BodyReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { lock (_bodyFrameLock) { _bodyFrame = e.BodyFrame; if (_saveLatestFrames) { _latestBodyFrame = e.Frame; } _bodyFrameTime = DateTime.Now.Ticks; // _bodyFrame.SystemRelativeTime.Value.Ticks; _bodyFrameReady = true; } }
//bodyframe が到着した際のイベントハンドラ void bodyFrameReader_FrameArrived(BodyFrameReader sender, BodyFrameArrivedEventArgs args) { //bodyframe への参照から実際のデータを取得します。 using (BodyFrame bodyframe = args.FrameReference.AcquireFrame()) { if (bodyframe != null) { //保持するデータを最新のものに更新する bodyframe.GetAndRefreshBodyData(this.bodies); //body情報をコンソールに出力するメソッドを呼び出します。 this.writeJointsData(); } } }
// Handles the body frame data arriving from the sensor private void Reader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { bool dataReceived = false; using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { if (this.bodies == null) { this.bodies = new Body[bodyFrame.BodyCount]; } bodyFrame.GetAndRefreshBodyData(this.bodies); dataReceived = true; } } if (dataReceived) { Body body = null; if (this.bodyTracked) { if (this.bodies[this.bodyIndex].IsTracked) { body = this.bodies[this.bodyIndex]; } else { bodyTracked = false; } } if (!bodyTracked) { for (int i = 0; i < this.bodies.Length; ++i) { if (this.bodies[i].IsTracked) { this.bodyIndex = i; this.bodyTracked = true; break; } } } if (body != null && this.bodyTracked && body.IsTracked) { // body represents your single tracked skeleton } } }
private void Reader_BodyFrameArrived(object sender, BodyFrameArrivedEventArgs e) { if (recordMode != RecordMode.Playingback) { bool dataReceived = false; Body[] tempo = null; using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { if (this.recordingBodies == null) { this.recordingBodies = new Body[bodyFrame.BodyCount]; } tempo = new Body[bodyFrame.BodyCount]; // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array. // As long as those body objects are not disposed and not set to null in the array, // those body objects will be re-used. bodyFrame.GetAndRefreshBodyData(this.recordingBodies); bodyFrame.GetAndRefreshBodyData(tempo); dataReceived = true; } } if (dataReceived) { rgbBoard.Invalidate(); if (recordMode == RecordMode.Recording && this.rigWriter != null) { if (tmspStartRecording.HasValue) { // Save a copy of rig for later replay recordedRigs.Add(tempo.Where(body => body.IsTracked).ToArray()); var currentTime = DateTime.Now.TimeOfDay; TimeSpan elapse = currentTime - tmspStartRecording.Value; recordedRigTimePoints.Add((int)elapse.TotalMilliseconds); WriteRig(elapse); } } } } }
private void OnBodyFrameArrived(object sender, BodyFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { frame.GetAndRefreshBodyData(Bodies); for (int i = 0; i < BFSource.BodyCount; ++i) { GestureDetectors[i].TrackingID = Bodies[i].TrackingId; JointDetectors[i].OnBodyUpdate(Bodies[i]); } } } }
public void FrameArrived(object sender, BodyFrameArrivedEventArgs e) { try { using (var frame = e.FrameReference.AcquireFrame()) { if (frame == null) return; frame.GetAndRefreshBodyData(Bodies); } AnalyzeNewBodyData(); } catch (Exception ex) { Logger.ErrorFormat("An error occured during processing of gesture: {0}", ex.Message); } }
private void FrameArrived(object sender, BodyFrameArrivedEventArgs e) { var frame = e.FrameReference.AcquireFrame(); if (frame != null) { frame.GetAndRefreshBodyData(this.bodies); frame.Dispose(); KinectBody[] newFrame = this.bodies.Select(body => new KinectBody(body)).ToArray(); if (this.FrameReceived != null) { this.FrameReceived(this, new KinectBodyFrameDataEventArgs(newFrame)); } } }
// ボディの更新をする。イベントハンドラ(フレームが取得できた、イベントが発生した ときにコールされる) private void UpdateBodyFrame(BodyFrameArrivedEventArgs e) { // usingブロック内で宣言された変数はGCに任せずに確実に開放される // フレームはKinectから送られてくるデータの最小単位。e.FrameReference.AcquireFrame()で取得する。 using (var bodyFrame = e.FrameReference.AcquireFrame()) { // nullが送られてくる可能性があるので、その場合は破棄。 if (bodyFrame == null) { return; } // ボディデータをbodiesにコピーする bodyFrame.GetAndRefreshBodyData(bodies); } }
private void BodyFrameReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { var bodyFrame = e.FrameReference.AcquireFrame(); if (bodyFrame != null) { using (bodyFrame) { if (bodies == null) { bodies = new Body[bodyFrame.BodyCount]; } bodyFrame.GetAndRefreshBodyData(bodies); } } }
private void Reader_BodyFrameArrived(object sender, BodyFrameArrivedEventArgs e) { bool dataReceived = false; processHandStates(); using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { if (bodies == null) { // creates an array of 6 bodies, which is the max number of bodies that Kinect can track simultaneously bodies = new Body[bodyFrame.BodyCount]; } // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array. // As long as those body objects are not disposed and not set to null in the array, // those body objects will be re-used. bodyFrame.GetAndRefreshBodyData(bodies); dataReceived = true; } } if (dataReceived) { kinectBodyView.UpdateBodyFrame(bodies); if (bodies != null) { int maxBodies = kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { Body body = bodies[i]; ulong trackingId = body.TrackingId; foreach (GestureDetector det in detectorPerBodyList[i]) { if (trackingId != det.TrackingId) { det.TrackingId = trackingId; det.IsPaused = trackingId == 0; } } } } } }
private void BodyFrameReaderOnFrameArrived(object sender, BodyFrameArrivedEventArgs e) { bool dataReceived = false; using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { if (this.bodies == null) { // creates an array of 6 bodies, which is the max number of bodies that Kinect can track simultaneously this.bodies = new Body[bodyFrame.BodyCount]; } // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array. // As long as those body objects are not disposed and not set to null in the array, // those body objects will be re-used. bodyFrame.GetAndRefreshBodyData(this.bodies); dataReceived = true; } } if (dataReceived) { // we may have lost/acquired bodies, so update the corresponding gesture detectors if (this.bodies != null) { // loop through all bodies to see if any of the gesture detectors need to be updated int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { Body body = this.bodies[i]; ulong trackingId = body.TrackingId; // if the current body TrackingId changed, update the corresponding gesture detector with the new value if (trackingId != this.gestureDetectorList[i].TrackingId) { this.gestureDetectorList[i].TrackingId = trackingId; // if the current body is tracked, unpause its detector to get VisualGestureBuilderFrameArrived events // if the current body is not tracked, pause its detector so we don't waste resources trying to get invalid gesture results this.gestureDetectorList[i].IsPaused = trackingId == 0; } } } } }
/// <summary> /// Handles the body frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void FrameArrived(object sender, BodyFrameArrivedEventArgs e) { bool dataReceived = false; using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { if (Bodies == null) { Bodies = new Body[bodyFrame.BodyCount]; } // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array. // As long as those body objects are not disposed and not set to null in the array, // those body objects will be re-used. bodyFrame.GetAndRefreshBodyData(Bodies); dataReceived = true; } } if (dataReceived) { List <BodyMessage> bodyMessages = new List <BodyMessage>(); foreach (Body body in Bodies) { if (body.IsTracked) { CameraSpacePoint headPosition = body.Joints[JointType.Head].Position; if (headPosition.Z < 0) { headPosition.Z = 0.1f; } BodyMessage bodyMessage = new BodyMessage(body.TrackingId, headPosition.X, headPosition.Y, headPosition.Z); bodyMessages.Add(bodyMessage); } } if (bodyMessages.Count > 0) { BodyUpdated(bodyMessages); } } }
private void OnBodyFrameArrived(object sender, BodyFrameArrivedEventArgs e) { // Get frame reference BodyFrameReference refer = e.FrameReference; if (refer == null) { return; } { //Get body frame BodyFrame frame = refer.AcquireFrame(); if (frame == null) { StatusText.Text = "BodyOff"; StatusText.Visibility = Visibility.Visible; return; } else { StatusText.Text = "BodyOn"; StatusText.Visibility = Visibility.Visible; } //Process it using (frame) { //Aquire body data frame.GetAndRefreshBodyData(_bodies); //Clear Skeleton Canvas SkeletonCanvas.Children.Clear(); //Loop all bodies foreach (Body body in _bodies) { //Only process tracked bodyie if (body.IsTracked) { DrawBody(body); } } } } }
/// <summary> /// Handles the body frame data arriving from the sensor and updates the associated gesture detector object for each body /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_BodyFrameArrived(object sender, BodyFrameArrivedEventArgs e) { bool dataReceived = false; using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { if (this.bodies == null) { // creates an array of 6 bodies, which is the max number of bodies that Kinect can track simultaneously this.bodies = new Body[bodyFrame.BodyCount]; } // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array. // As long as those body objects are not disposed and not set to null in the array, // those body objects will be re-used. bodyFrame.GetAndRefreshBodyData(this.bodies); dataReceived = true; } } if (dataReceived) { // visualize the new body data this.kinectBodyView.UpdateBodyFrame(this.bodies); // we may have lost/acquired bodies, so update the corresponding gesture detectors if (this.bodies != null) { Body body = this.bodies[0]; if (this.bodies.Where(b => b.IsTracked == true).Count() != 0) { body = this.bodies.Where(b => b.IsTracked == true).First(); } if (body != null) { ulong trackingId = body.TrackingId; if (trackingId != this.gestureDetectorList[0].TrackingId) { this.gestureDetectorList[0].TrackingId = trackingId; this.gestureDetectorList[0].IsPaused = trackingId == 0; } } } } }
/// <summary> /// 骨架事件 /// </summary> private void BodyFrameReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame == null) { return; } bodies = new Body[bodyFrame.BodyCount]; bodyFrame.GetAndRefreshBodyData(bodies); Body body = bodies.Where(b => b.IsTracked).FirstOrDefault(); if (!_faceSource.IsTrackingIdValid) { if (body != null) { //Assign a tracking ID to the face source _faceSource.TrackingId = body.TrackingId; } } //判斷骨架,並傳入參數 for (int i = 0; i < bodies.Length; i++) { //如果骨架有追蹤到才能執行 if (bodies[i].IsTracked) { if (StopDetect != true) { //Body class ,posture_number ,face class //poseture.Poseture_Detected(bodies[i], vocabulary.Give_Vocabulary(posture_number), face_result); //如果是空就跳出 if (TaskWords == null) { return; } //把從句子中取得的字典單字拿出來給姿勢做偵測 for (int j = 0; j < TaskWords.Count; j++) { poseture.Poseture_Detected(bodies[i], TaskWords[j], face_result); } } } } } }
/// <summary> /// Handles the body frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { bool dataReceived = false; using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { if (this.bodies == null) { this.bodies = new Body[bodyFrame.BodyCount]; } // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array. // As long as those body objects are not disposed and not set to null in the array, // those body objects will be re-used. bodyFrame.GetAndRefreshBodyData(this.bodies); dataReceived = true; } } if (dataReceived) { foreach (Body body in this.bodies) { if (body.IsTracked) { IReadOnlyDictionary <JointType, Joint> joints = body.Joints; foreach (JointType jointType in joints.Keys) { // sometimes the depth(Z) of an inferred joint may show as negative // clamp down to 0.1f to prevent coordinatemapper from returning (-Infinity, -Infinity) CameraSpacePoint position = joints[jointType].Position; if (position.Z < 0) { position.Z = InferredZPositionClamp; } } SetBodyFrameValues(body); string data = ""; OnKinectDataReceived(new clsKinectArgs(e, data, DateTime.Now)); } } } }
private static void Reader_BodyFrameArrived(object sender, BodyFrameArrivedEventArgs e) { bool dataReceived = false; using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { if (bodies == null) { // creates an array of 6 bodies, which is the max number of bodies that Kinect can track simultaneously bodies = new Body[bodyFrame.BodyCount]; } // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array. // As long as those body objects are not disposed and not set to null in the array, // those body objects will be re-used. bodyFrame.GetAndRefreshBodyData(bodies); dataReceived = true; } } if (dataReceived) { // we may have lost/acquired bodies, so update the corresponding gesture detectors if (bodies != null) { // loop through all bodies to see if any of the gesture detectors need to be updated int maxBodies = kinectSensor.BodyFrameSource.BodyCount; for (int i = 0; i < maxBodies; ++i) { Body body = bodies[i]; ulong trackingId = body.TrackingId; // if the current body TrackingId changed, update the corresponding gesture detector with the new value if (trackingId != gestureDetectorList[i].TrackingId) { gestureDetectorList[i].TrackingId = trackingId; // if the current body is tracked, unpause its detector to get VisualGestureBuilderFrameArrived events // if the current body is not tracked, pause its detector so we don't waste resources trying to get invalid gesture results gestureDetectorList[i].IsPaused = trackingId == 0; } } } } }
void BodyReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { frame.GetAndRefreshBodyData(_bodies); Body body = _bodies.Where(b => b != null && b.IsTracked).FirstOrDefault(); if (body != null) { _recorder.Update(body); } } } }
void bodyFrameReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { using (var bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame == null) { return; } // ボディデータを取得する bodyFrame.GetAndRefreshBodyData(bodies); DrawBodyFrame(); // フレームレート更新 TextFps.Text = counter.Update().ToString(); } }
private Body ExtractBody(BodyFrameArrivedEventArgs args) { using (var frame = args.FrameReference.AcquireFrame()) { if (frame != null) { var bodies = new Body[frame.BodyCount]; frame.GetAndRefreshBodyData(bodies); // ASSUMPTION: We are just tracking 1 body var theOneBody = bodies.FirstOrDefault(x => x.IsTracked); return(theOneBody); } } return(null); }
/// <summary> /// 跟踪人体 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void Reader_BodyFrameArrived(object sender, BodyFrameArrivedEventArgs e) { using (var bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { // 更新人体数据 bodyFrame.GetAndRefreshBodyData(this.bodies); bool drawFaceResult = false; if (this.myBodyIndex >= 0) { if (this.faceFrameResults[this.myBodyIndex] != null) { this.NumeFaigute = this.myClock.Scheduler(this.faceFrameResults[this.myBodyIndex]); drawFaceResult = true; this.is_Face = true; } else { // 检查是否跟踪相应的body if (this.bodies[this.myBodyIndex].IsTracked) { // 更新人脸源来跟踪这个body this.faceFrameSources[this.myBodyIndex].TrackingId = this.bodies[this.myBodyIndex].TrackingId; } } } if (!drawFaceResult) { // if no faces were drawn then this indicates one of the following: // a body was not tracked // a body was tracked but the corresponding face was not tracked // a body and the corresponding face was tracked though the face box or the face points were not valid // 如果没有人脸信息 //Console.WriteLine("当前没有检测到Face"); //com.WriteLine("0"); this.is_Face = false; } UpdateDebugWindow(); } } //this.changeFrame(); }
/// <summary> /// Handles the body frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_BodyFrameArrived(object sender, BodyFrameArrivedEventArgs e) { bool dataReceived = false; bool hasTrackedBody = false; using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { bodyFrame.GetAndRefreshBodyData(this.bodies); dataReceived = true; } } if (dataReceived) { this.BeginBodiesUpdate(); // iterate through each body for (int bodyIndex = 0; bodyIndex < this.bodies.Length; bodyIndex++) { Body body = this.bodies[bodyIndex]; if (body.IsTracked) { // check if this body clips an edge this.UpdateClippedEdges(body, hasTrackedBody); this.UpdateBody(body, bodyIndex); hasTrackedBody = true; } else { // collapse this body from canvas as it goes out of view this.ClearBody(bodyIndex); } } if (!hasTrackedBody) { // clear clipped edges if no bodies are tracked this.ClearClippedEdges(); } } }
private void BodyFrameReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame == null) { return; } bodies = new Body[bodyFrame.BodyCount]; bodyFrame.GetAndRefreshBodyData(bodies); Body body = bodies.Where(b => b.IsTracked).FirstOrDefault(); if (!_faceSource.IsTrackingIdValid) { if (body != null) { // 4) Assign a tracking ID to the face source _faceSource.TrackingId = body.TrackingId; } } //判斷骨架,並傳入參數 for (int i = 0; i < bodies.Length; i++) { if (bodies[i].IsTracked) //如果骨架有追蹤到才能執行 /* * poseture.Poseture_Detected( * bodies[i].Joints[JointType.Head], * bodies[i].Joints[JointType.HandRight], * bodies[i].Joints[JointType.ElbowRight], * bodies[i].Joints[JointType.HandLeft], * bodies[i].Joints[JointType.ElbowLeft], * bodies[i].Joints[JointType.ShoulderRight], * bodies[i].Joints[JointType.ShoulderLeft], * bodies[i].Joints[JointType.SpineMid], * posture_number,//posture_number 從伺服器收到的值,傳入判斷式 * face_result);//Face features 類別直接傳過去,讓他自己呼叫類別的屬性 */ { poseture.Poseture_Detected(bodies[i], vocabulary.Give_Vocabulary(posture_number), face_result); } } } }
/// <summary> /// Handle the new body frames /// </summary> private async void OnBodiesArrive(object sender, BodyFrameArrivedEventArgs e) { // Retrieve the body reference BodyFrameReference bodyRef = e.FrameReference; if (bodyRef == null) { return; } // Acquire the body frame using (BodyFrame frame = bodyRef.AcquireFrame()) { if (frame == null) { return; } // Create a new collection when required if (_bodies == null || _bodies.Count() != frame.BodyCount) { _bodies = new Body[frame.BodyCount]; } // Refresh the bodies frame.GetAndRefreshBodyData(_bodies); // Start tracking faces foreach (Body body in _bodies) { if (body.IsTracked) { // Create a new tracker if required if (!_trackers.ContainsKey(body.TrackingId)) { FaceTracker tracker = new FaceTracker(body.TrackingId, _faceFrameFeatures, _kinect); tracker.FaceAnalyticsAvailable += OnFaceAnalyticsAvailable; // Add to dictionary _trackers.Add(body.TrackingId, tracker); } } } } }
private void bfr_FrameArrived(object o, BodyFrameArrivedEventArgs args) { using (var bodyFrame = args.FrameReference.AcquireFrame()) { if (bodyFrame == null) { return; } bodyFrame.GetAndRefreshBodyData(bodies); bodyCanvas.Children.Clear(); engagerCanvas.Children.Clear(); foreach (var body in Bodies) { if (!eManager.users.Contains(body)) { eManager.users.Add(body); } // Multithreading maybe drawer.CurrentCanvas = bodyCanvas; drawer.drawSkeleton(body); } if (!eManager.IsEngage) { return; } eTracker.Engager = eManager.Engager; // Multithreading maybe drawer.CurrentCanvas = engagerCanvas; eManager.IsEngage = drawer.drawSkeleton(eTracker); if (eManager.DisablingEngagement) { return; } var recognisedGestures = recogniser.recognise(); man.reactGesture(recognisedGestures); } }
void OnFrameArrived(object sender, BodyFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if ((frame != null) && (frame.BodyCount > 0)) { if ((this.bodies == null) || (this.bodies.Length != frame.BodyCount)) { this.bodies = new Body[frame.BodyCount]; } frame.GetAndRefreshBodyData(this.bodies); // clear if running if (Constants.bPaused == true) { Console.Clear(); } #if ZERO bool first = true; #endif for (int i = 0; i < colours.Length; i++) { if (this.bodies[i].IsTracked) { #if ZERO this.bodyDrawers[i].DrawFrame( this.bodies[i], this.sensor.CoordinateMapper, this.DepthFrameSize, first); first = false; #else this.bodyDrawers[i].DrawFrame( this.bodies[i], this.sensor.CoordinateMapper, this.DepthFrameSize); #endif } } } } }
/// <summary> /// Handles the body frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { bool dataReceived = false; using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { if (this.bodies == null) { this.bodies = new Body[bodyFrame.BodyCount]; } // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array. // As long as those body objects are not disposed and not set to null in the array, // those body objects will be re-used. bodyFrame.GetAndRefreshBodyData(this.bodies); dataReceived = true; } } if (dataReceived) { for (int i = 0; i < bodies.Length; i++) { Body body = bodies[i]; if (body.IsTracked && body.TrackingId == engagedBodyId) { if (engagedHandType == HandType.LEFT) { processHandState(body.HandLeftState); } else if (engagedHandType == HandType.RIGHT) { processHandState(body.HandRightState); } else { processHandStateClosed(false); } } } } }
void lecteurBody_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { BodyFrameReference refer = e.FrameReference; if (refer == null) { return; } // Obtenir body frame BodyFrame frame = refer.AcquireFrame(); if (frame == null) { return; } using (frame) { // Obtenir les données des joueurs frame.GetAndRefreshBodyData(bodies); // Clear Skeleton Canvas CanvasCameraPrincipale.Children.Clear(); // Loop all bodies foreach (Body body in bodies) { // Only process tracked bodies and if the timer is over if (body.IsTracked && tempsFini) { if (body.HandLeftState == HandState.Closed) //Si la main gauche est fermée on met l'image d'avant { indexPhoto--; afficherPhoto(); } else if (body.HandRightState == HandState.Closed) //Si la main droite est fermée on met l'image d'après { indexPhoto++; afficherPhoto(); } } } } }
/// <summary> /// Handles the body frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { bool dataReceived = false; using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { if (this.bodies == null) { this.bodies = new Body[bodyFrame.BodyCount]; } // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array. // As long as those body objects are not disposed and not set to null in the array, // those body objects will be re-used. bodyFrame.GetAndRefreshBodyData(this.bodies); dataReceived = true; } } if (dataReceived) { activeBodies = new List <Body>(); for (int i = 0; i < this.bodies.Length; i++) { Body body = this.bodies[i]; if (body.IsTracked) { /*Dictionary<JointType, Windows.Kinect.Joint> joints = body.Joints; * * foreach (JointType jointType in joints.Keys) { * // sometimes the depth(Z) of an inferred joint may show as negative * // clamp down to 0.1f to prevent coordinatemapper from returning (-Infinity, -Infinity) * CameraSpacePoint position = joints[jointType].Position; * if (position.Z < 0) { * position.Z = InferredZPositionClamp; * } * }*/ activeBodies.Add(body); //Debug.Log(joints[JointType.HandLeft].Position.X + " " + joints[JointType.HandLeft].Position.Y + " " + joints[JointType.HandLeft].Position.Z); } } } }
private void BodyReader_FrameArrived(object sender, BodyFrameArrivedEventArgs args) { bool gotData = false; using (var frame = args.FrameReference.AcquireFrame()) { if (frame != null) { frame.GetAndRefreshBodyData(bodies); gotData = true; } } if (gotData && !stopped) { this.TrackEngagedPlayersViaHandOverHead(); } }
private static void Reader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { // get total number of bodies from BodyFrameSource var bodies = new Body[(sender as BodyFrameReader).BodyFrameSource.BodyCount]; using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { bodyFrame.GetAndRefreshBodyData(bodies); if (bodies.ToList().Any(x => x.IsTracked)) { var body = bodies.ToList().Where(x => x.IsTracked).First(); } } } }
private void Reader_BodyFrameArrived(object sender, BodyFrameArrivedEventArgs e) { using(BodyFrame frame = e.FrameReference.AcquireFrame()) { if (frame != null) { Vector4 temp = frame.FloorClipPlane; if (((temp.X - up.X) * (temp.X - up.X) + (temp.Y - up.Y) * (temp.Y - up.Y) + (temp.Z - up.Z) * (temp.Z - up.Z)) > .1) { this.needsCal = true; } up = temp; } } }
private static void bodyFrameReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { //throw new NotImplementedException(); bool dataReceived = false; using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { if (bodies == null) { bodies = new Body[bodyFrame.BodyCount]; } // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array. // As long as those body objects are not disposed and not set to null in the array, // those body objects will be re-used. bodyFrame.GetAndRefreshBodyData(bodies); dataReceived = true; } } if (dataReceived) { foreach (var client in clients) { var users = bodies.Where(s => s.IsTracked.Equals(true)).ToList(); if (users.Count>0){ string json = users.Serialize(_coordinateMapper, _mode); Console.WriteLine("jsonstring: " + json); Console.WriteLine("After body serialization and to send"); client.Send(json); } } } }
void bfr_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { //Check to see if VGB has a valid tracking id, if not find a new body to track if (!pgfs.IsTrackingIdValid) { using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { Body[] bodies = new Body[6]; bodyFrame.GetAndRefreshBodyData(bodies); Body closestBody = null; //iterate through the bodies and pick the one closest to the camera foreach (Body b in bodies) { if (b.IsTracked) { if (closestBody == null) { closestBody = b; } else { Joint newHeadJoint = b.Joints[Microsoft.Kinect.JointType.Head]; Joint oldHeadJoint = closestBody.Joints[Microsoft.Kinect.JointType.Head]; if (newHeadJoint.TrackingState == TrackingState.Tracked && newHeadJoint.Position.Z < oldHeadJoint.Position.Z) { closestBody = b; } } } } //if we found a tracked body, update the trackingid for vgb if (closestBody != null) { pgfs.TrackingId = closestBody.TrackingId; } } } } }
private void BodyReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { Body[] bodies = new Body[frame.BodyCount]; frame.GetAndRefreshBodyData(bodies); Body body = bodies.Where(b => b.IsTracked).FirstOrDefault(); if (!_faceSource.IsTrackingIdValid) { if (body != null) { _faceSource.TrackingId = body.TrackingId; } } } } }
private void BodyReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { frame.GetAndRefreshBodyData(_bodies); Body body = _bodies.Where(b => b.IsTracked).FirstOrDefault(); if (!_faceSource.IsTrackingIdValid) { if (body != null) { // Assign a tracking ID to the face source _faceSource.TrackingId = body.TrackingId; } } } } }
private void OnBodyFrameArrived(object sender, BodyFrameArrivedEventArgs e) { using (var bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame == null) return; bodyFrame.GetAndRefreshBodyData(_bodies); foreach (var body in _bodies.Where(b => b.IsTracked && !_scans.ContainsKey(b.TrackingId))) { var gestureScan = new GestureScan(_kinectSensor, body.TrackingId, _visualGestureBuilderDatabase); gestureScan.Next += OnNext; gestureScan.Previous += OnPrevious; gestureScan.TakeControl += OnTakeControl; gestureScan.TrackingIdLost += OnTrackingIdLost; gestureScan.End += OnEnd; _scans.TryAdd(body.TrackingId, gestureScan); } } }
private void OnFrameArrived(object sender, BodyFrameArrivedEventArgs e) { bool dataReceived = false; using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { if (_bodies == null) { _bodies = new Body[bodyFrame.BodyCount]; } bodyFrame.GetAndRefreshBodyData(_bodies); dataReceived = true; } } if (!dataReceived) return; if (IsBodyIndexFixed) { Body fixedBody = _bodies[FixedBodyIndex]; if(fixedBody != null && fixedBody.IsTracked) { BodyUpdated?.Invoke(this, new BodyEventArgs(fixedBody)); } } else { Body body = _bodies.FirstOrDefault(b => b.IsTracked); if (body != null) { BodyUpdated?.Invoke(this, new BodyEventArgs(body)); } } }
void _bodyReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { if (_displayType != FrameTypes.Body) return; IEnumerable<IBody> bodies = null; // to make the GetBitmap call a little cleaner using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { if (SmoothingCombo.SelectedIndex == 0) { frame.GetAndRefreshBodyData(_bodies); bodies = _bodies; } else if (SmoothingCombo.SelectedIndex == 1) { frame.GetAndRefreshBodyData(_kalmanBodies); bodies = _kalmanBodies; } else { frame.GetAndRefreshBodyData(_exponentialBodies); bodies = _exponentialBodies; } } } if (bodies != null) { bodies.MapDepthPositions(); OutputImage.Source = bodies.GetBitmap(Colors.LightGreen, Colors.Yellow); } else { OutputImage.Source = null; } }
private void bodyReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { Wall.Clear(); var bodies = new Body[frame.BodyCount]; frame.GetAndRefreshBodyData(bodies); foreach (var body in bodies.Where(b => b.IsTracked)) { foreach (var joint in body.Joints.Select(j => j.Value)) { Wall.DrawPoint(joint, Brushes.White); } //Serializer.Save(body); } } } }
//Process Frames private static void Reader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { var dataReceived = false; using (var bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { if (_bodies == null) { _bodies = new Body[bodyFrame.BodyCount]; } bodyFrame.GetAndRefreshBodyData(_bodies); dataReceived = true; } } if (dataReceived) { //Check if hand is over the head _handOverHeadDetector.UpdateData(_bodies); } }
/// <summary> /// Handles a new body frame by creating a list of mapped bodies and sending it over the wire /// </summary> /// <param name="sender">Sender</param> /// <param name="e">Event arguments</param> private void OnFrameArrived(object sender, BodyFrameArrivedEventArgs e) { if (FrameReady == null) { return; } var frame = e.FrameReference.AcquireFrame(); if (frame == null) { return; } using (frame) { frame.GetAndRefreshBodyData(Bodies); var resultingBodies = Bodies.Where(b => b.IsTracked) .Select(body => MapBody(body, Manager.SensorId)) .ToList(); FrameReady(this, new KinectItemListEventArgs<KinectBody>(Manager.SensorId, resultingBodies)); } }
/// <summary> /// Maneja los datos de frames del cuerpo que llegan del sensor. /// Handles the body frame data arriving from the sensor. /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { bool dataReceived = false; using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { //Bodies es el array de cuerpo definido arriba. if (this.bodies == null) { //Si no está definido el array creamos uno con el número de cuerpos que detectemos. this.bodies = new Body[bodyFrame.BodyCount]; } // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array. // As long as those body objects are not disposed and not set to null in the array, // those body objects will be re-used. // La primera vez que se llama a GetAndRefreshBodyData, Kinect asigna cada cuerpo en el array. // Mientras estos objetos de body no esten establecidos como nulo en el array, estos serán // reutilizados. bodyFrame.GetAndRefreshBodyData(this.bodies); dataReceived = true; } } if (dataReceived) { using (DrawingContext dc = this.drawingGroup.Open()) { if (fase1) { dc.DrawRectangle(dojo1, null, new Rect(0.0, 0.0, this.displayWidth, this.displayHeight)); } else if (fase2) { dc.DrawRectangle(dojo2, null, new Rect(0.0, 0.0, this.displayWidth, this.displayHeight)); } else if (fase3) { dc.DrawRectangle(dojo3, null, new Rect(0.0, 0.0, this.displayWidth, this.displayHeight)); } else if (fase4) { dc.DrawRectangle(dojo4, null, new Rect(0.0, 0.0, this.displayWidth, this.displayHeight)); } //dc.DrawRectangle(Brushes.Red, null, new Rect(0, this.displayHeight - ClipBoundsThickness, this.displayWidth, ClipBoundsThickness)); //dc.DrawRectangle(Brushes.Black, null, new Rect(0.0, 0.0, this.displayWidth, this.displayHeight)); // Dibuja un fondo transparente para poner el tamaño de renderizado. // Draw a transparent background to set the render size //dc.DrawRectangle(Brushes.PaleTurquoise, null, new Rect(0.0, 0.0, this.displayWidth, this.displayHeight)); int penIndex = 0; foreach (Body body in this.bodies) { Pen drawPen = this.bodyColors[penIndex++]; if (body.IsTracked) { this.DrawClippedEdges(body, dc); IReadOnlyDictionary<JointType, Joint> joints = body.Joints; // Convierte los puntos "joints" a espacio de profundidad. // convert the joint points to depth (display) space foreach (JointType jointType in joints.Keys) { // A veces la profundidad(Z) de un joint inferido puede mostrarse negativa. // // sometimes the depth(Z) of an inferred joint may show as negative // clamp down to 0.1f to prevent coordinatemapper from returning (-Infinity, -Infinity) CameraSpacePoint position = joints[jointType].Position; if (position.Z < 0) { position.Z = InferredZPositionClamp; } DepthSpacePoint depthSpacePoint = this.coordinateMapper.MapCameraPointToDepthSpace(position); jointPoints[jointType] = new Point(depthSpacePoint.X, depthSpacePoint.Y); } this.DrawBody(joints, jointPoints, dc, drawPen); Point posEsferaMusica = new Point(jointPoints[JointType.Head].X, (jointPoints[JointType.Head].Y) - 35.0); // CODIGO PROPIO // Dibujado del dojo inicial y al yinyang if (PaintStart) { dc.DrawEllipse(yinyangBrush, null, posEsferaMusica, 20, 20); } if (PosturaStart()) { tocarYin = true; } if (tocarYin) { contadorFramesMusica += 1; } if (!PosturaStart()) { contadorFramesMusica = 0; } if (tocarYin) { if (!NowPlaying) { NowPlaying = true; PaintStart = false; kungfu.Play(); fase1 = false; fase2 = true; } } //Comienzo de la primera postura if (Postura1()) { posturaInicial = true; } else if (!Postura1()) { posturaInicial = false; contadorFrames1 = 0; } if (posturaInicial) { contadorFrames1 += 1; //dc.DrawEllipse(Brushes.Blue, null, jointPoints[JointType.HandRight], 20, 20); } if (contadorFrames1 >= 40) { fase2 = false; fase3 = true; contadorFrames2 += 1; } if (Postura2()) { postura2 = true; } else if (!Postura2()) { contadorFrames2 = 0; } if (postura2) { contadorFrames2 += 1; } if (contadorFrames2 >= 40) { fase3 = false; fase4 = true; } if (RightHandOnHead()) { RHand_Head = true; } if (PosturaFinal()) { kungfu.Stop(); } /// FIN /// /// CAMBIAR DINÁMICAMENTE EL FONDO DE LA VENTAN /// } // Previene dibujar fuera de la zona de renderizado. // prevent drawing outside of our render area this.drawingGroup.ClipGeometry = new RectangleGeometry(new Rect(0.0, 0.0, this.displayWidth, this.displayHeight)); } } } }
/// <summary> /// Handles the body frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { bool dataReceived = false; using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { if (this.bodies == null) { this.bodies = new Body[bodyFrame.BodyCount]; } // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array. // As long as those body objects are not disposed and not set to null in the array, // those body objects will be re-used. bodyFrame.GetAndRefreshBodyData(this.bodies); dataReceived = true; #region this code calls the KAIT Advanced Body Tracking _bodyTrackingService.TrackBodies(this.bodies); //if (!_bodyTrackingService.IsActivePlayerStillTracked(this.bodies)) //{ // if (!_bodyTrackingService.IsActivePlayerStillTracked(this.bodies)) // { // _bodyTrackingService.GetNextPlayer(this.bodies); // } //} var activeBodyId = _bodyTrackingService.SetActivePlayer(this.bodies); RefreshBindings(); #endregion } } if (dataReceived) { using (DrawingContext dc = this.drawingGroup.Open()) { // Draw a transparent background to set the render size dc.DrawRectangle(Brushes.Black, null, new Rect(0.0, 0.0, this.displayWidth, this.displayHeight)); int penIndex = 0; foreach (Body body in this.bodies) { Pen drawPen = this.bodyColors[penIndex++]; if (body.IsTracked) { this.DrawClippedEdges(body, dc); IReadOnlyDictionary<JointType, Joint> joints = body.Joints; // convert the joint points to depth (display) space Dictionary<JointType, Point> jointPoints = new Dictionary<JointType, Point>(); foreach (JointType jointType in joints.Keys) { // sometimes the depth(Z) of an inferred joint may show as negative // clamp down to 0.1f to prevent coordinatemapper from returning (-Infinity, -Infinity) CameraSpacePoint position = joints[jointType].Position; if (position.Z < 0) { position.Z = InferredZPositionClamp; } DepthSpacePoint depthSpacePoint = this.coordinateMapper.MapCameraPointToDepthSpace(position); jointPoints[jointType] = new Point(depthSpacePoint.X, depthSpacePoint.Y); } this.DrawBody(joints, jointPoints, dc, drawPen); this.DrawHand(body.HandLeftState, jointPoints[JointType.HandLeft], dc); this.DrawHand(body.HandRightState, jointPoints[JointType.HandRight], dc); } } // prevent drawing outside of our render area this.drawingGroup.ClipGeometry = new RectangleGeometry(new Rect(0.0, 0.0, this.displayWidth, this.displayHeight)); } } }
void bodyFrameReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { var bodyFrame = e.FrameReference.AcquireFrame(); if (bodyFrame != null) { using (bodyFrame) { if (bodies == null) bodies = new Body[bodyFrame.BodyCount]; bodyFrame.GetAndRefreshBodyData(bodies); //if (bodyFrame.BodyCount > 0) //{ // var serializer = new XmlSerializer(typeof(Body)); // var writer = new StringWriter(); // serializer.Serialize(writer, bodies[0]); // writer.Close(); // Console.WriteLine(writer); //} } } }
/// <summary> /// Handles the body frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_BodyFrameArrived(object sender, BodyFrameArrivedEventArgs e) { using (var bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { // update body data bodyFrame.GetAndRefreshBodyData(this.bodies); if (this.bodies[0].HandLeftState == HandState.Closed) { this.handSignals.OnNext(this.bodies[0].HandLeftState); } using (DrawingContext dc = this.drawingGroup.Open()) { // draw the dark background dc.DrawRectangle(Brushes.Black, null, this.displayRect); bool drawFaceResult = false; // iterate through each face source for (int i = 0; i < this.bodyCount; i++) { // check if a valid face is tracked in this face source if (this.faceFrameSources[i].IsTrackingIdValid) { // check if we have valid face frame results if (this.faceFrameResults[i] != null) { // draw face frame results this.DrawFaceFrameResults(i, this.faceFrameResults[i], dc); if (!drawFaceResult) { drawFaceResult = true; } } } else { // check if the corresponding body is tracked if (this.bodies[i].IsTracked) { // update the face frame source to track this body this.faceFrameSources[i].TrackingId = this.bodies[i].TrackingId; } } } if (!drawFaceResult) { // if no faces were drawn then this indicates one of the following: // a body was not tracked // a body was tracked but the corresponding face was not tracked // a body and the corresponding face was tracked though the face box or the face points were not valid dc.DrawText( this.textFaceNotTracked, this.textLayoutFaceNotTracked); } this.drawingGroup.ClipGeometry = new RectangleGeometry(this.displayRect); } } } }
private void BodyReader_FrameArrived(object sender, BodyFrameArrivedEventArgs e) { bool dataReceived = false; using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame()) { if (bodyFrame != null) { if (this.bodies == null) { this.bodies = new Body[bodyFrame.BodyCount]; } bodyFrame.GetAndRefreshBodyData(this.bodies); dataReceived = true; } } if (dataReceived) { var jsBodies = new ArrayList(); foreach (Body body in this.bodies) { if (body.IsTracked) { IReadOnlyDictionary<JointType, Joint> joints = body.Joints; // convert the joint points to depth (display) space IDictionary<String, Object> jsJoints = new Dictionary<String, Object>(); foreach (JointType jointType in joints.Keys) { DepthSpacePoint depthSpacePoint = this.coordinateMapper.MapCameraPointToDepthSpace(joints[jointType].Position); jsJoints[jointType.ToString()] = new { x = depthSpacePoint.X, y = depthSpacePoint.Y }; } var jsBody = new { trackingId = body.TrackingId, handLeftState = body.HandLeftState, handRightState = body.HandRightState, joints = jsJoints }; jsBodies.Add(jsBody); } } this.bodyFrameCallback(jsBodies); } }