public void MultiSourceFrameHandler(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); // Acquire skeleton data using (var frame = reference.BodyFrameReference.AcquireFrame()) { if (frame == null) return; _bodies = new Body[frame.BodyFrameSource.BodyCount]; frame.GetAndRefreshBodyData(_bodies); foreach (var body in _bodies) { if (body == null || !body.IsTracked) continue; foreach (var joint in body.Joints) { if (!_configurationFlags.JointFlags[joint.Key]) continue; var jointMessage = OscSerializer.BuildJointMessage(joint.Value); _dataSender.SendMessage(jointMessage); } return; } } }
void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); // Color using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { if (viewer.Visualization == Visualization.Color) { viewer.Image = frame.ToBitmap(); } } } // Body using (var frame = reference.BodyFrameReference.AcquireFrame()) { if (frame != null) { Body body = frame.Bodies().Closest(); if (body != null) { _gestureController.Update(body); } } } }
void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); using (var colorFrame = reference.ColorFrameReference.AcquireFrame()) using (var depthFrame = reference.DepthFrameReference.AcquireFrame()) using (var bodyIndexFrame = reference.BodyIndexFrameReference.AcquireFrame()) { if (colorFrame != null && depthFrame != null && bodyIndexFrame != null) { // 3) Update the image source. camera.Source = _backgroundRemovalTool.GreenScreen(colorFrame, depthFrame, bodyIndexFrame); } } //try //{ // colorFrame.Dispose(); // depthFrame.Dispose(); // bodyIndexFrame.Dispose(); //} //catch //{ //} }
void myReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { MultiSourceFrameReference multiRef = e.FrameReference; MultiSourceFrame multiFrame = multiRef.AcquireFrame(); if (multiFrame == null) return; ColorFrameReference colorRef = multiFrame.ColorFrameReference; BodyFrameReference bodyRef = multiFrame.BodyFrameReference; using (ColorFrame colorFrame = colorRef.AcquireFrame()) { using (BodyFrame bodyFrame = bodyRef.AcquireFrame()) { if (colorFrame == null || bodyFrame == null) return; //Farbdaten konvertieren if (myPixels == null) { myPixels = new byte[colorFrame.FrameDescription.Width * colorFrame.FrameDescription.Height * ((PixelFormats.Bgr32.BitsPerPixel + 7) / 8)]; } if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra) { colorFrame.CopyRawFrameDataToArray(myPixels); } else { colorFrame.CopyConvertedFrameDataToArray(myPixels, ColorImageFormat.Bgra); } myBitmap.WritePixels( new Int32Rect(0, 0, myBitmap.PixelWidth, myBitmap.PixelHeight), myPixels, myBitmap.PixelWidth * sizeof(int), 0); //Handle Skeletal data if (myBodies == null) { myBodies = new Body[bodyFrame.BodyCount]; } bodyFrame.GetAndRefreshBodyData(myBodies); foreach (Body body in myBodies) { if (body.IsTracked) { IReadOnlyDictionary<JointType, Joint> joints = body.Joints; drawABone(joints[JointType.Head], joints[JointType.SpineBase]); drawABone(joints[JointType.FootLeft], joints[JointType.SpineBase]); drawABone(joints[JointType.FootRight], joints[JointType.SpineBase]); drawABone(joints[JointType.HandLeft], joints[JointType.SpineShoulder]); drawABone(joints[JointType.HandRight], joints[JointType.SpineShoulder]); } } } } }
private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); using (var colorFrame = reference.ColorFrameReference.AcquireFrame()) using (var depthFrame = reference.DepthFrameReference.AcquireFrame()) using (var bodyIndexFrame = reference.BodyIndexFrameReference.AcquireFrame()) { if (colorFrame != null && depthFrame != null && bodyIndexFrame != null) { _utils.GreenScreen(colorFrame, depthFrame, bodyIndexFrame); } } }
void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { //Color var reference = e.FrameReference.AcquireFrame(); using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { if (_mode == Mode.Color) { camera.Source = frame.ToBitmap(); } } } // Body using (var frame = reference.BodyFrameReference.AcquireFrame()) { if (frame != null) { canvas.Children.Clear(); _bodies = new Body[frame.BodyFrameSource.BodyCount]; frame.GetAndRefreshBodyData(_bodies); foreach (var body in _bodies) { if (body != null) { if (body.IsTracked) { foreach (Joint joint in body.Joints.Values) { String lines_temp = String.Format("{0}, {1}, {2}" + Environment.NewLine, joint.Position.X.ToString(), joint.Position.Y.ToString(), joint.Position.Z.ToString()); lines = lines + lines_temp; } if (_displayBody) { canvas.DrawSkeleton(body); } } } } } if (lines != "") { txtCreator(lines); } } }
private void MainWindow_KinectFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var frameRef = e.FrameReference.AcquireFrame(); using (var frame = frameRef.BodyFrameReference.AcquireFrame()) { if (frame != null) { var bodies = new Body[frame.BodyFrameSource.BodyCount]; frame.GetAndRefreshBodyData(bodies); foreach (var body in bodies) { if (body.IsTracked) { Joint handJoint = body.Joints[JointType.HandRight]; var tstamp = frame.RelativeTime; double timeDelta = ((double)(tstamp - LastRelativeTime).TotalMilliseconds) / 1000; double x = handJoint.Position.X; double y = handJoint.Position.Y; double z = handJoint.Position.Z; double xVel = (x - LastX) / timeDelta; double yVel = (y - LastY) / timeDelta; double zVel = (z - LastZ) / timeDelta; LastRelativeTime = tstamp; LastX = x; LastY = y; LastZ = z; LastRelativeTime = tstamp; sensorValueLabel.Content = String.Format("{0:0.000}, {1:0.000}, {2:0.000}", xVel, yVel, zVel); ((KinectDataViewModel)this.DataContext).AddDataPoint(DateTime.Now, xVel, yVel, 0); } else { //sensorValueLabel.Content = String.Format("??"); } } } } }
private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); using (var colorFrame = reference.ColorFrameReference.AcquireFrame()) { using (var depthFrame = reference.DepthFrameReference.AcquireFrame()) { var description = sensor.ColorFrameSource.CreateFrameDescription(ColorImageFormat.Bgra); if (colorFrame != null && IsSameSize(description, colorBitmap) && depthFrame != null && IsSameSize(sensor.DepthFrameSource.FrameDescription, depthBitmap)) { using (var colorBuffer = colorFrame.LockRawImageBuffer()) { colorBitmap.Lock(); colorFrame.CopyConvertedFrameDataToIntPtr( colorBitmap.BackBuffer, (uint)(description.Width * description.Height * 4), ColorImageFormat.Bgra); colorBitmap.AddDirtyRect(new Int32Rect(0, 0, colorBitmap.PixelWidth, colorBitmap.PixelHeight)); colorBitmap.Unlock(); } description = sensor.DepthFrameSource.FrameDescription; using (var depthBuffer = depthFrame.LockImageBuffer()) { if (depthPixels == null) { this.depthPixels = new ushort[description.Height * description.Width]; } ProcessDepthFrameData(depthBuffer, depthFrame); depthBitmap.WritePixels( new Int32Rect(0, 0, depthBitmap.PixelWidth, depthBitmap.PixelHeight), depthPixels, 2 * depthBitmap.PixelWidth, 0); } } } } }
void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); // Color using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { if (viewer.Visualization == Visualization.Color) { viewer.Image = frame.ToBitmap(); } } } // Depth using (var frame = reference.DepthFrameReference.AcquireFrame()) { if (frame != null) { if (viewer.Visualization == Visualization.Depth) { viewer.Image = frame.ToBitmap(); } } } // Infrared using (var frame = reference.InfraredFrameReference.AcquireFrame()) { if (frame != null) { if (viewer.Visualization == Visualization.Infrared) { viewer.Image = frame.ToBitmap(); } } } }
void multiSourceFrameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { MultiSourceFrameReference msFrameReference = e.FrameReference; try { MultiSourceFrame msFrame = msFrameReference.AcquireFrame(); if (msFrame != null) { using (msFrame) { ColorFrameReference colorFrameReference = msFrame.ColorFrameReference; BodyFrameReference bodyFrameReference = msFrame.BodyFrameReference; useColorFrame(colorFrameReference); useBodyFrame(bodyFrameReference); } } } catch (Exception ex) { } }
void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); // Color using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { if (viewer.Visualization == Visualization.Color) { viewer.Image = frame.ToBitmap(); } } } // Body using (var frame = reference.BodyFrameReference.AcquireFrame()) { if (frame != null) { var bodies = frame.Bodies(); _userReporter.Update(bodies); Body body = bodies.Closest(); if (body != null) { viewer.DrawBody(body); angle.Update(body.Joints[_start], body.Joints[_center], body.Joints[_end], 100); tblAngle.Text = ((int)angle.Angle).ToString(); } } } }
// Color frame analysis private void Frame_Arrived(object sender, MultiSourceFrameArrivedEventArgs e) { bool changeDir = false; bool pause = false; if (scoreDelay != DateTime.MinValue) { if ((float)DateTime.Now.Subtract(this.scoreDelay).TotalSeconds < 2) { pause = true; } else { scoreDelay = DateTime.MinValue; } } if (!gameOver && !pause) { MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); if (inVolley) { // If too much time passes without bounce/return, someone missed if (this.hitTime != DateTime.MinValue) { if ((float)DateTime.Now.Subtract(this.hitTime).TotalSeconds > 2) { if (this.Direction == "Left" && bounce1) { Score("P2", "Time Limit"); } else if (this.Direction == "Left") { Score("P1", "Time Limit"); } else if (this.Direction == "Right" && bounce1) { Score("P1", "Time Limit"); } else { Score("P2", "Time Limit"); } } } // Get Ball xy coordinates DataPoint BallLocation = FindBall(multiSourceFrame); int xavg = (int)BallLocation.X; int yavg = (int)BallLocation.Y; // If good data point, analyze it if (xavg > 1) { // Off (or rather under) table if (yavg < tableLevel - 100) { if (bounce1) { if (this.Direction == "Left") { Score("P2", "Below Table"); } else { Score("P1", "Below Table"); } } else { if (this.Direction == "Left") { Score("P1", "Below Table"); } else { Score("P2", "Below Table"); } } } // Determine direction and do game processing float xdelta = 0; float ydelta = 0; if (AllData.Count > 0) { xdelta = AllData[AllData.Count - 1].X - xavg; ydelta = AllData[AllData.Count - 1].Y - yavg; } if (served) { // Horizontal direction determination and direction change detection if (xdelta > 5) { if (this.Direction == "Right") { ChangeDirection(); changeDir = true; } this.Direction = "Left"; } else if (xdelta < -5) { if (this.Direction == "Left") { ChangeDirection(); changeDir = true; } this.Direction = "Right"; } //this.tempBounceXYZ = BounceLocation(PreviousDepthFrame, xavg, yavg); using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame()) { this.tempBounceXYZ = BounceLocation(depthFrame, xavg, yavg); //if (depthFrame != null) //{ // depthFrame.CopyFrameDataToArray(this.PreviousDepthFrame); //} } // Vertical direction and bounce detection if (ydelta > 5) { this.VertDir = "Down"; PossibleBounce = false; } else if (ydelta < -5) { if (this.VertDir == "Down" && !changeDir) // Log possible bounce { PossibleBounce = true; this.hitTime = DateTime.Now; // Get xyz coords for potential bounce //using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame()) //{ // this.tempBounceXYZ = BounceLocation(depthFrame, xavg, yavg); //} this.tempBounce = new DataPoint(xavg, yavg, 0, 0); } else if (PossibleBounce) // if no direction change one frame later, possible bounce is a bounce { if (!changeDir) { // Handle bounce processing Bounce(new DataPoint(tempBounce.X, tempBounce.Y, 0, (float)(DateTime.Now.Subtract(this.timeStarted).TotalSeconds))); this.Bounces.Add(tempBounceXYZ); } PossibleBounce = false; } this.VertDir = "Up"; } else if (ydelta <= 0) { if (this.VertDir == "Down" && !changeDir) // Log possible bounce { PossibleBounce = true; this.hitTime = DateTime.Now; // Get xyz coords for potential bounce using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame()) { //this.tempBounceXYZ = BounceLocation(depthFrame, xavg, yavg); } this.tempBounce = new DataPoint(xavg, yavg, 0, 0); } } } else if (inVolley) // if not served yet, check for serve hit { // Serve defined as moving in x and negative y, and decently above table if ((xdelta > 10 || xdelta < 10) && ydelta > 10) { this.served = true; this.PointScored = "Served"; this.startPosTime = DateTime.MinValue; this.VertDir = "Down"; if (xdelta > 0) { this.Direction = "Left"; } else { this.Direction = "Right"; } } } // Add current location to points list this.AllData.Add(new DataPoint(xavg, yavg, 0, (float)(DateTime.Now.Subtract(this.timeStarted).TotalSeconds))); } } else // Check for ball in start position to start volley { // Get Ball xy coordinates DataPoint BallLocation = FindBall(multiSourceFrame); int xavg = (int)BallLocation.X; int yavg = (int)BallLocation.Y; // Determine if ball in start position 1.5 seconds to signal start of volley if (Math.Abs(yavg - this.tableLevel) < 30 && (xavg > netLocation + 300 || xavg < netLocation - 300)) { if (!startPosition) // Log ball in possible start position { this.startPosTime = DateTime.Now; this.startPosition = true; this.startLocation.X = xavg; this.startLocation.Y = yavg; this.PointScored = "See it"; } else if (Math.Abs(this.startLocation.X - xavg) > 10 || Math.Abs(this.startLocation.Y - yavg) > 10) // Check ball not moving { this.startPosTime = DateTime.Now; this.startLocation.X = xavg; this.startLocation.Y = yavg; this.PointScored = "unstable"; } else if ((float)(DateTime.Now.Subtract(this.startPosTime).TotalSeconds) > 1.5) // Start volley if ball not moved in 1.5 seconds { this.PointScored = "Starting Volley"; this.startPosTime = DateTime.MinValue; this.startPosition = false; StartVolley(); } else { this.PointScored = "stable"; } } } } }
private void SReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { //obtener la referencia del multi-frame var reference = e.FrameReference.AcquireFrame(); //abrir el frame de color using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (i_camera.Visibility == Visibility.Hidden) { i_camera.Visibility = Visibility.Visible; } if (frame != null) { if (tipoFrame == Mode.Color) { i_camera.Source = ReadersFrames.ToBitmap(frame); } } } //abrir el frame de profundidad using (var frame = reference.DepthFrameReference.AcquireFrame()) { if (i_camera.Visibility == Visibility.Hidden) { i_camera.Visibility = Visibility.Visible; } if (frame != null) { if (tipoFrame == Mode.Depth) { i_camera.Source = ReadersFrames.ToBitmap(frame); } } } //abrir el frame de infrarrojo using (var frame = reference.InfraredFrameReference.AcquireFrame()) { if (i_camera.Visibility == Visibility.Hidden) { i_camera.Visibility = Visibility.Visible; } if (frame != null) { if (tipoFrame == Mode.Infrared) { i_camera.Source = ReadersFrames.ToBitmap(frame); } } } //abrir el frame del cuerpo using (var frame = reference.BodyFrameReference.AcquireFrame()) { if (frame != null) { c_body.Children.Clear(); //borramos cada esqueleto que va pasando if (tipoFrame == Mode.Body) { i_camera.Visibility = Visibility.Hidden; //escondemos el fondo bodies = new Body[frame.BodyFrameSource.BodyCount]; frame.GetAndRefreshBodyData(bodies); // ??? foreach (var bo in bodies) { if (bo != null) { if (bo.IsTracked) { // Dibujar las articulaciones... c_body.DrawSkeleton(bo); } } } } } } }
private void Reader_MultiSourceFrameArrived(MultiSourceFrameReader sender, MultiSourceFrameArrivedEventArgs e) { MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } DepthFrame depthFrame = null; ColorFrame colorFrame = null; InfraredFrame infraredFrame = null; BodyFrame bodyFrame = null; BodyIndexFrame bodyIndexFrame = null; IBuffer depthFrameDataBuffer = null; IBuffer bodyIndexFrameData = null; // Com interface for unsafe byte manipulation IBufferByteAccess bufferByteAccess = null; //lab 13 using (bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame()) { RegisterGesture(bodyFrame); } switch (CurrentDisplayFrameType) { case DisplayFrameType.Infrared: using (infraredFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame()) { ShowInfraredFrame(infraredFrame); } break; case DisplayFrameType.Color: using (colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame()) { ShowColorFrame(colorFrame); } break; case DisplayFrameType.Depth: using (depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame()) { ShowDepthFrame(depthFrame); } break; case DisplayFrameType.BodyMask: // Put in a try catch to utilise finally() and clean up frames try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } // Access the depth frame data directly via LockImageBuffer to avoid making a copy depthFrameDataBuffer = depthFrame.LockImageBuffer(); this.coordinateMapper.MapColorFrameToDepthSpaceUsingIBuffer(depthFrameDataBuffer, this.colorMappedToDepthPoints); // Process Color colorFrame.CopyConvertedFrameDataToBuffer(this.bitmap.PixelBuffer, ColorImageFormat.Bgra); // Access the body index frame data directly via LockImageBuffer to avoid making a copy bodyIndexFrameData = bodyIndexFrame.LockImageBuffer(); ShowMappedBodyFrame(depthFrame.FrameDescription.Width, depthFrame.FrameDescription.Height, bodyIndexFrameData, bufferByteAccess); } finally { if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } if (depthFrameDataBuffer != null) { // We must force a release of the IBuffer in order to ensure that we have dropped all references to it. System.Runtime.InteropServices.Marshal.ReleaseComObject(depthFrameDataBuffer); } if (bodyIndexFrameData != null) { System.Runtime.InteropServices.Marshal.ReleaseComObject(bodyIndexFrameData); } if (bufferByteAccess != null) { System.Runtime.InteropServices.Marshal.ReleaseComObject(bufferByteAccess); } } break; case DisplayFrameType.BodyJoints: using (bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame()) { ShowBodyJoints(bodyFrame); } break; case DisplayFrameType.BackgroundRemoved: // Put in a try catch to utilise finally() and clean up frames try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); if ((depthFrame == null) || (colorFrame == null)) { return; } depthFrame.CopyFrameDataToArray(depthFrameData); this.coordinateMapper.MapColorFrameToDepthSpace(depthFrameData, this.colorMappedToDepthPoints); // Process Color. colorFrame.CopyConvertedFrameDataToBuffer(this.bitmap.PixelBuffer, ColorImageFormat.Bgra); ShowMappedColorBackgroundRemoved(colorMappedToDepthPoints, depthFrameData, depthFrame.FrameDescription); } finally { if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } } break; default: break; } }
void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); // Color using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { camera.Source = frame.ToBitmap(); } } // Body using (var frame = reference.BodyFrameReference.AcquireFrame()) { if (frame != null) { canvas.Children.Clear(); _bodies = new Body[frame.BodyFrameSource.BodyCount]; frame.GetAndRefreshBodyData(_bodies); foreach (var body in _bodies) { if (body != null) { if (body.IsTracked) { // Find the joints Joint handRight = body.Joints[JointType.HandRight]; Joint thumbRight = body.Joints[JointType.ThumbRight]; Joint handLeft = body.Joints[JointType.HandLeft]; Joint thumbLeft = body.Joints[JointType.ThumbLeft]; // Draw hands and thumbs canvas.DrawHand(handRight, _sensor.CoordinateMapper); canvas.DrawHand(handLeft, _sensor.CoordinateMapper); canvas.DrawThumb(thumbRight, _sensor.CoordinateMapper); canvas.DrawThumb(thumbLeft, _sensor.CoordinateMapper); // Find the hand states //string rightHandState = "-"; //string leftHandState = "-"; switch (body.HandRightState) { case HandState.Open: rightHandState = "Open"; break; case HandState.Closed: rightHandState = "Closed"; break; case HandState.Lasso: rightHandState = "Lasso"; break; case HandState.Unknown: rightHandState = "Unknown..."; break; case HandState.NotTracked: rightHandState = "Not tracked"; break; default: break; } switch (body.HandLeftState) { case HandState.Open: leftHandState = "Open"; break; case HandState.Closed: leftHandState = "Closed"; break; case HandState.Lasso: leftHandState = "Lasso"; break; case HandState.Unknown: leftHandState = "Unknown..."; break; case HandState.NotTracked: leftHandState = "Not tracked"; break; default: break; } tblRightHandState.Text = rightHandState; tblLeftHandState.Text = leftHandState; } } } } } }
private void Reader_MultiSourceFrameArrived(MultiSourceFrameReader sender, MultiSourceFrameArrivedEventArgs e) { MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } DepthFrame depthFrame = null; ColorFrame colorFrame = null; InfraredFrame infraredFrame = null; BodyFrame bodyFrame = null; BodyIndexFrame bodyIndexFrame = null; IBuffer depthFrameDataBuffer = null; IBuffer bodyIndexFrameData = null; // Com interface for unsafe byte manipulation IBufferByteAccess bufferByteAccess = null; //lab 13 using (bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame()) { RegisterGesture(bodyFrame); } switch (CurrentDisplayFrameType) { case DisplayFrameType.Infrared: using (infraredFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame()) { ShowInfraredFrame(infraredFrame); } break; case DisplayFrameType.Color: using (colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame()) { ShowColorFrame(colorFrame); } break; case DisplayFrameType.Depth: using (depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame()) { ShowDepthFrame(depthFrame); } break; case DisplayFrameType.BodyMask: // Put in a try catch to utilise finally() and clean up frames try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } // Access the depth frame data directly via LockImageBuffer to avoid making a copy depthFrameDataBuffer = depthFrame.LockImageBuffer(); this.coordinateMapper.MapColorFrameToDepthSpaceUsingIBuffer(depthFrameDataBuffer, this.colorMappedToDepthPoints); // Process Color colorFrame.CopyConvertedFrameDataToBuffer(this.bitmap.PixelBuffer, ColorImageFormat.Bgra); // Access the body index frame data directly via LockImageBuffer to avoid making a copy bodyIndexFrameData = bodyIndexFrame.LockImageBuffer(); ShowMappedBodyFrame(depthFrame.FrameDescription.Width, depthFrame.FrameDescription.Height, bodyIndexFrameData, bufferByteAccess); } finally { if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } if (depthFrameDataBuffer != null) { // We must force a release of the IBuffer in order to ensure that we have dropped all references to it. System.Runtime.InteropServices.Marshal.ReleaseComObject(depthFrameDataBuffer); } if (bodyIndexFrameData != null) { System.Runtime.InteropServices.Marshal.ReleaseComObject(bodyIndexFrameData); } if (bufferByteAccess != null) { System.Runtime.InteropServices.Marshal.ReleaseComObject(bufferByteAccess); } } break; case DisplayFrameType.BodyJoints: using (bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame()) { ShowBodyJoints(bodyFrame); } break; case DisplayFrameType.BackgroundRemoved: // Put in a try catch to utilise finally() and clean up frames try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); if ((depthFrame == null) || (colorFrame == null)) { return; } depthFrame.CopyFrameDataToArray(depthFrameData); this.coordinateMapper.MapColorFrameToDepthSpace(depthFrameData, this.colorMappedToDepthPoints); // Process Color. colorFrame.CopyConvertedFrameDataToBuffer(this.bitmap.PixelBuffer, ColorImageFormat.Bgra); ShowMappedColorBackgroundRemoved(colorMappedToDepthPoints, depthFrameData, depthFrame.FrameDescription); } finally { if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } } break; case DisplayFrameType.FaceOnColor: using (colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame()) { ShowColorFrame(colorFrame); this.faceManager.DrawLatestFaceResults(this.FacePointsCanvas, this.faceFrameFeatures); } break; case DisplayFrameType.FaceOnInfrared: using (infraredFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame()) { ShowInfraredFrame(infraredFrame); DrawFaceOnInfrared(); } break; case DisplayFrameType.FaceGame: FaceGameLookUpdate(); break; default: break; } }
/// <summary> /// Handles the depth/color/body index frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { int depthWidth = 0; int depthHeight = 0; DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; bool isBitmapLocked = false; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } // We use a try/finally to ensure that we clean up before we exit the function. // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer. try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } // Process Depth FrameDescription depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; // Access the depth frame data directly via LockImageBuffer to avoid making a copy using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { this.coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.colorMappedToDepthPoints); } // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; // Process Color // Lock the bitmap for writing this.bitmap.Lock(); isBitmapLocked = true; colorFrame.CopyConvertedFrameDataToIntPtr(this.bitmap.BackBuffer, this.bitmapBackBufferSize, ColorImageFormat.Bgra); // We're done with the ColorFrame colorFrame.Dispose(); colorFrame = null; // We'll access the body index data directly to avoid a copy using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { byte* bodyIndexDataPointer = (byte*)bodyIndexData.UnderlyingBuffer; int colorMappedToDepthPointCount = this.colorMappedToDepthPoints.Length; fixed (DepthSpacePoint* colorMappedToDepthPointsPointer = this.colorMappedToDepthPoints) { // Treat the color data as 4-byte pixels uint* bitmapPixelsPointer = (uint*)this.bitmap.BackBuffer; // Loop over each row and column of the color image // Zero out any pixels that don't correspond to a body index for (int colorIndex = 0; colorIndex < colorMappedToDepthPointCount; ++colorIndex) { float colorMappedToDepthX = colorMappedToDepthPointsPointer[colorIndex].X; float colorMappedToDepthY = colorMappedToDepthPointsPointer[colorIndex].Y; // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { // Make sure the depth pixel maps to a valid point in color space int depthX = (int)(colorMappedToDepthX + 0.5f); int depthY = (int)(colorMappedToDepthY + 0.5f); // If the point is not valid, there is no body index there. if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { int depthIndex = (depthY * depthWidth) + depthX; // If we are tracking a body for the current pixel, do not zero out the pixel if (bodyIndexDataPointer[depthIndex] != 0xff) { continue; } } } bitmapPixelsPointer[colorIndex] = 0; } } this.bitmap.AddDirtyRect(new Int32Rect(0, 0, this.bitmap.PixelWidth, this.bitmap.PixelHeight)); } } } finally { if (isBitmapLocked) { this.bitmap.Unlock(); } if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } }
private void ProcessFrameData(MultiSourceFrameArrivedEventArgs e) { MultiSourceFrameReference frameReference = e.FrameReference; MultiSourceFrame multiSourceFrame = null; DepthFrame depthFrame = null; ColorFrame colorFrame = null; try { multiSourceFrame = frameReference.AcquireFrame(); if (multiSourceFrame != null) { lock (rawDataLock) { ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference; DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference; colorFrame = colorFrameReference.AcquireFrame(); depthFrame = depthFrameReference.AcquireFrame(); if ((depthFrame != null) && (colorFrame != null)) { FrameDescription colorFrameDescription = colorFrame.FrameDescription; int colorWidth = colorFrameDescription.Width; int colorHeight = colorFrameDescription.Height; if ((colorWidth * colorHeight * sizeof(int)) == colorImagePixels.Length) { colorFrame.CopyConvertedFrameDataToArray(colorImagePixels, ColorImageFormat.Bgra); } FrameDescription depthFrameDescription = depthFrame.FrameDescription; int depthWidth = depthFrameDescription.Width; int depthHeight = depthFrameDescription.Height; if ((depthWidth * depthHeight) == depthImagePixels.Length) { depthFrame.CopyFrameDataToArray(depthImagePixels); } } } } CaptureCurrent = false; //We got both color and depth, everything went ok, stop trying to capture this image } catch (Exception) { // ignore if the frame is no longer available } finally { // MultiSourceFrame, DepthFrame, ColorFrame, BodyIndexFrame are IDispoable if (depthFrame != null) { depthFrame.Dispose(); depthFrame = null; } if (colorFrame != null) { colorFrame.Dispose(); colorFrame = null; } if (multiSourceFrame != null) { multiSourceFrame = null; } } }
/// <summary> /// Handle the processing when Kinect frame arrived /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void MultiSourceFrameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { if (isDownApplication) { Application.Current.Shutdown(); } MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); if (multiSourceFrame != null) { using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame()) { using (BodyFrame bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame()) { using (DrawingContext drawingContext = drawingGroup.Open()) { if (depthFrame != null && bodyFrame != null) { // Refresh the foreground of 2D top view for positioning Plot.RefreshForegroundCanvas(Canvas_Position_Foreground, activities); // Find templates if (isFindingTemplate) { ushort[] depthFrameData = new ushort[depthFrame.FrameDescription.Height * depthFrame.FrameDescription.Width]; depthFrame.CopyFrameDataToArray(depthFrameData); cameraSpacePoints = new CameraSpacePoint[depthFrame.FrameDescription.Height * depthFrame.FrameDescription.Width]; kinectSensor.CoordinateMapper.MapDepthFrameToCameraSpace(depthFrameData, cameraSpacePoints); TemplateDetector.heightLow = -2.4f; TemplateDetector.heightHigh = -1.9f; TemplateDetector.canvas_width = Canvas_Position_Background.Width; TemplateDetector.canvas_height = Canvas_Position_Background.Height; TemplateDetector.canvas_environment = Canvas_Position_Environment; // AsyncTask BackgroundWorker worker = new BackgroundWorker(); worker.WorkerReportsProgress = true; worker.DoWork += TemplateDetector.DoInBackgrond; worker.ProgressChanged += TemplateDetector.OnProgress; worker.RunWorkerCompleted += TemplateDetector.OnPostExecute; worker.RunWorkerAsync(); isFindingTemplate = false; } // Display depth frame // Uncomment to enable the display for height segmentation result //if (!isHeightSegmented) if (true) { drawingContext.DrawImage(Transformation.ToBitmap(depthFrame, depthFramePixels, true), new Rect(0.0, 0.0, kinectSensor.DepthFrameSource.FrameDescription.Width, kinectSensor.DepthFrameSource.FrameDescription.Height)); } else { drawingContext.DrawImage(Transformation.ToBitmap(depthFrame, segmentedDepthFramePixels, false), new Rect(0.0, 0.0, kinectSensor.DepthFrameSource.FrameDescription.Width, kinectSensor.DepthFrameSource.FrameDescription.Height)); } // Display top view in height if (TemplateDetector.isDrawDone) { using (DrawingContext drawingContext_heightview = drawingGroup_topView.Open()) { drawingContext_heightview.DrawImage(Transformation.ToBitmap(TemplateDetector.area_width, TemplateDetector.area_height, TemplateDetector.pixels), new Rect(0.0, 0.0, TemplateDetector.area_width, TemplateDetector.area_height)); foreach (Template t in TemplateDetector.templates) { drawingContext_heightview.DrawRectangle(null, new Pen(t.Brush, 2), new Rect(new Point(t.TopLeft.X, t.TopLeft.Y), new Size(t.Width, t.Height))); } } TemplateDetector.isDrawDone = false; } // Load raw body joints info from Kinect bodyFrame.GetAndRefreshBodyData(bodies); // Update personal infomation from raw joints for (int i = 0; i < kinectSensor.BodyFrameSource.BodyCount; ++i) { if (persons[i] == null) { persons[i] = new Person(); } ulong trackingId = bodies[i].TrackingId; if (trackingId != gestureDetectorList[i].TrackingId) { gestureDetectorList[i].TrackingId = trackingId; gestureDetectorList[i].IsPaused = trackingId == 0; } if (bodies[i].IsTracked) { // Update tracking status persons[i].IsTracked = true; persons[i].ID = bodies[i].TrackingId; // Assign color to person in the top view for positioning persons[i].Color = Plot.BodyColors[i]; // Get person's 3D postion in camera's coordinate system CameraSpacePoint headPositionCamera = bodies[i].Joints[JointType.Head].Position; // Meter // Convert to 3D position in horizontal coordinate system CameraSpacePoint headPositionGournd = Transformation.RotateBackFromTilt(TILT_ANGLE, true, headPositionCamera); // Convert to 2D top view position on canvas Transformation.ConvertGroundSpaceToPlane(headPositionGournd, persons[i]); // Determine body orientation using shoulder joints CameraSpacePoint leftShoulderPositionGround = Transformation.RotateBackFromTilt(TILT_ANGLE, true, bodies[i].Joints[JointType.ShoulderLeft].Position); CameraSpacePoint rightShoulderPositionGround = Transformation.RotateBackFromTilt(TILT_ANGLE, true, bodies[i].Joints[JointType.ShoulderRight].Position); BodyOrientation.DecideOrientation(leftShoulderPositionGround, rightShoulderPositionGround, persons[i], Transformation.CountZeroInRec(depthFramePixels, kinectSensor.CoordinateMapper.MapCameraPointToDepthSpace(headPositionCamera), 16, kinectSensor.DepthFrameSource.FrameDescription.Width), Canvas_Position_Foreground); } else { persons[i].IsTracked = false; } } DrawPeopleOnDepth(drawingContext); DrawPeopleOnCanvas(); DetermineSystemStatus(); DrawSystemStatus(); // Recognize and record activities when recording requirements are satisfied if (isRecording) { CheckActivity(); DrawActivityOnCanvas(); Record(); } } } } } } }
public void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { //Get a reference to the multi-frame var reference = e.FrameReference.AcquireFrame(); //Open color frame using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { //Do something with the frame... if (_mode == Mode.Color) { camera.Source = ToBitmap(frame); // ColorFrame is IDisposable using (ColorFrame colorFrame = frame) { if (colorFrame != null) { FrameDescription colorFrameDescription = colorFrame.FrameDescription; using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer()) { this.colorBitmap.Lock(); // verify data and write the new color frame data to the display bitmap if ((colorFrameDescription.Width == this.colorBitmap.PixelWidth) && (colorFrameDescription.Height == this.colorBitmap.PixelHeight)) { colorFrame.CopyConvertedFrameDataToIntPtr( this.colorBitmap.BackBuffer, (uint)(colorFrameDescription.Width * colorFrameDescription.Height * 4), ColorImageFormat.Bgra); this.colorBitmap.AddDirtyRect(new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight)); } this.colorBitmap.Unlock(); } } } } } } //Open depth frame using (var frame = reference.DepthFrameReference.AcquireFrame()) { if (frame != null) { //do something with the frame... if (_mode == Mode.Depth) { camera.Source = ToBitmap(frame); } } } //Open Infrared frame using (var frame = reference.InfraredFrameReference.AcquireFrame()) { if (frame != null) { //do something with the frame... if (_mode == Mode.Infrared) { camera.Source = ToBitmap(frame); } } } }
void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); // Color using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { if (viewer.Visualization == Visualization.Color) { viewer.Image = frame.ToBitmap(); } } } // Depth using (var depthFrame = reference.DepthFrameReference.AcquireFrame()) using (var bodyIndexFrame = reference.BodyIndexFrameReference.AcquireFrame()) { if (depthFrame != null) { if (viewer.Visualization == Visualization.Depth) { if (bodyIndexFrame != null) { viewer.Image = depthFrame.ToBitmap(bodyIndexFrame); } else { viewer.Image = depthFrame.ToBitmap(); } } } } // Infrared using (var frame = reference.InfraredFrameReference.AcquireFrame()) { if (frame != null) { if (viewer.Visualization == Visualization.Infrared) { viewer.Image = frame.ToBitmap(); } } } // Body using (var frame = reference.BodyFrameReference.AcquireFrame()) { if (frame != null) { var bodies = frame.Bodies(); _playersController.Update(bodies); foreach (Body body in bodies) { if (_displaySkeleton) { viewer.DrawBody(body); } } } } }
private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { // Get a reference to the multi-frame var reference = e.FrameReference.AcquireFrame(); // Open color frame using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { if (cameraSource == ECameraSource.Color) { var image = KinectManager.Instance.ToBitmap(frame); camera.Source = image; ++totalFrames; if (sw.ElapsedMilliseconds >= 1000) { fps = totalFrames; lbl_fps.Content = fps; totalFrames = 0; sw.Restart(); } } } } // Open depth frame using (var frame = reference.DepthFrameReference.AcquireFrame()) { if (frame != null) { if (cameraSource == ECameraSource.Depth) { camera.Source = KinectManager.Instance.ToBitmap(frame); ++totalFrames; if (sw.ElapsedMilliseconds >= 1000) { fps = totalFrames; lbl_fps.Content = fps; totalFrames = 0; sw.Restart(); } } } } // Open infrared frame using (var frame = reference.InfraredFrameReference.AcquireFrame()) { if (frame != null) { if (cameraSource == ECameraSource.Infrared) { camera.Source = KinectManager.Instance.ToBitmap(frame); ++totalFrames; if (sw.ElapsedMilliseconds >= 1000) { fps = totalFrames; lbl_fps.Content = fps; totalFrames = 0; sw.Restart(); } } } } }
/* public void Delay(int ms) * { * int time = Environment.TickCount; * * do * { * if (Environment.TickCount - time >= ms) * return; * } while (true); * }*/ /* static void Sleep(int ms) * { * new System.Threading.ManualResetEvent(false).WaitOne(ms); * } */ private void OnMultiFrame(MultiSourceFrameArrivedEventArgs args) { _bodiesProcessed = _colorFrameProcessed = false; using (var multiSourceFrame = args.FrameReference.AcquireFrame()) { if (multiSourceFrame != null) { if (!_gestureFrameSource.IsTrackingIdValid) { // For each skeleton being tracked get the ID and tell the face trackers to track that ID using (var bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame()) { if (bodyFrame != null) { bodyFrame.GetAndRefreshBodyData(_bodies); _bodiesProcessed = true; } } } using (var color = multiSourceFrame.ColorFrameReference.AcquireFrame()) { if (color != null) { FrameDescription colorFrameDescription = color.FrameDescription; if (color.RawColorImageFormat == ColorImageFormat.Bgra) { color.CopyRawFrameDataToArray(_colorPixels); } else { color.CopyConvertedFrameDataToArray(_colorPixels, ColorImageFormat.Bgra); } _colorFrameProcessed = true; } } } if (_bodiesProcessed == true) { foreach (var body in _bodies) { if (body != null && body.IsTracked) { _gestureFrameSource.TrackingId = body.TrackingId; _gestureFrameReader.IsPaused = false; break; } } } if (_colorFrameProcessed == true) { Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () => { _colorPixels.CopyTo(_writeableBitmap.PixelBuffer); _writeableBitmap.Invalidate(); MyImage.Source = _writeableBitmap; }); } } }
// method for reading information from the sensor void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); // Displaying the camera feed via color frame // comment out this part to only display skeleton for displaying movement instructions using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { camera.Source = frame.ToBitmap(); } } // Displaying the skeleton via the body frame using (var frame = reference.BodyFrameReference.AcquireFrame()) { if (frame != null) { canvas.Children.Clear(); Bodies = new Body[frame.BodyFrameSource.BodyCount]; frame.GetAndRefreshBodyData(Bodies); foreach (var body in Bodies) { if (newGesture == null) { newGesture = new Gesture(body); } if (body != null) { if (body.IsTracked) { canvas.DrawSkeleton(body); List <double> settings = NextFrame(body); // recording mode if (Recording) { // If 1 second has passed if (++CurrentNumFrames == Sec * 30) { if ((FramesLeft -= CurrentNumFrames) >= 0) { Gesture.AddKeyframe(new KeyFrame(body, settings)); } else { // Pass newGesture into ui main window // StreamWriter file = new StreamWriter(@"C:\Users\Public\TestFolder\WriteLines2.txt")); } CurrentNumFrames = 0; } } else // alarm mode { // loop through gestList, match keyframes for number of repetitions foreach (Gesture gest in gestList) { gest.setBody(body); // use the right body instance gest.Repeat(); } } } } } } } }
private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { bool detected = false; var reference = e.FrameReference.AcquireFrame(); using (var colorFrame = reference.ColorFrameReference.AcquireFrame()) using (var depthFrame = reference.DepthFrameReference.AcquireFrame()) using (var bodyFrame = reference.BodyFrameReference.AcquireFrame()) using (var bodyIndexFrame = reference.BodyIndexFrameReference.AcquireFrame()) { if (colorFrame != null && depthFrame != null && bodyIndexFrame != null && bodyFrame != null) { // 3) Update the image source. KinectShilloute.Source = _backgroundRemovalTool.GreenScreen(colorFrame, depthFrame, bodyIndexFrame); KinectSkeleton.Source = _drawSkeleton.DrawBodySkeleton(bodyFrame); if (this.bodies == null) { this.bodies = new Body[bodyFrame.BodyCount]; } bodyFrame.GetAndRefreshBodyData(this.bodies); detected = true; } } if (detected) { bool pauseTracking = GetTrackingId(this.bodies); } }
/// <summary> /// Handles the depth/color/body index frame data arriving from the sensor. /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { int depthWidth = 0; int depthHeight = 0; DepthFrame depthFrame = null; IBuffer depthFrameData = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; IBuffer bodyIndexFrameData = null; IBufferByteAccess bodyIndexByteAccess = null; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } // We use a try/finally to ensure that we clean up before we exit the function. // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer. try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } // Process Depth FrameDescription depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; // Access the depth frame data directly via LockImageBuffer to avoid making a copy depthFrameData = depthFrame.LockImageBuffer(); this.coordinateMapper.MapColorFrameToDepthSpaceUsingIBuffer(depthFrameData, this.colorMappedToDepthPoints); // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; // We must force a release of the IBuffer in order to ensure that we have dropped all references to it. System.Runtime.InteropServices.Marshal.ReleaseComObject(depthFrameData); depthFrameData = null; // Process Color colorFrame.CopyConvertedFrameDataToBuffer(this.bitmap.PixelBuffer, ColorImageFormat.Bgra); // We're done with the ColorFrame colorFrame.Dispose(); colorFrame = null; FrameDescription bodyIndexFrameDescription = bodyIndexFrame.FrameDescription; // Access the body index frame data directly via LockImageBuffer to avoid making a copy bodyIndexFrameData = bodyIndexFrame.LockImageBuffer(); int colorMappedToDepthPointCount = this.colorMappedToDepthPoints.Length; unsafe { bodyIndexByteAccess = (IBufferByteAccess)bodyIndexFrameData; byte *bodyIndexBytes = null; bodyIndexByteAccess.Buffer(out bodyIndexBytes); fixed(DepthSpacePoint *colorMappedToDepthPointsPointer = this.colorMappedToDepthPoints) { IBufferByteAccess bitmapBackBufferByteAccess = (IBufferByteAccess)this.bitmap.PixelBuffer; byte *bitmapBackBufferBytes = null; bitmapBackBufferByteAccess.Buffer(out bitmapBackBufferBytes); // Treat the color data as 4-byte pixels uint *bitmapPixelsPointer = (uint *)bitmapBackBufferBytes; // Loop over each row and column of the color image // Zero out any pixels that don't correspond to a body index for (int colorIndex = 0; colorIndex < colorMappedToDepthPointCount; ++colorIndex) { float colorMappedToDepthX = colorMappedToDepthPointsPointer[colorIndex].X; float colorMappedToDepthY = colorMappedToDepthPointsPointer[colorIndex].Y; // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { // Make sure the depth pixel maps to a valid point in color space int depthX = (int)(colorMappedToDepthX + 0.5f); int depthY = (int)(colorMappedToDepthY + 0.5f); // If the point is not valid, there is no body index there. if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { int depthIndex = (depthY * depthWidth) + depthX; // If we are tracking a body for the current pixel, do not zero out the pixel if (bodyIndexBytes[depthIndex] != 0xff) { continue; } } } bitmapPixelsPointer[colorIndex] = 0; } } this.bitmap.Invalidate(); } } finally { if (depthFrame != null) { depthFrame.Dispose(); } if (depthFrameData != null) { // We must force a release of the IBuffer in order to ensure that we have dropped all references to it. System.Runtime.InteropServices.Marshal.ReleaseComObject(depthFrameData); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } if (bodyIndexFrameData != null) { // We must force a release of the IBuffer in order to ensure that we have dropped all references to it. System.Runtime.InteropServices.Marshal.ReleaseComObject(bodyIndexFrameData); } if (bodyIndexByteAccess != null) { // We must force a release of the IBuffer in order to ensure that we have dropped all references to it. System.Runtime.InteropServices.Marshal.ReleaseComObject(bodyIndexByteAccess); } } }
private void OnMultipleFramesArrivedHandler(object sender, MultiSourceFrameArrivedEventArgs e) { init = true; // Retrieve multisource frame reference MultiSourceFrameReference multiRef = e.FrameReference; MultiSourceFrame multiFrame = null; try { AllFrameWatch.Again(); multiFrame = multiRef.AcquireFrame(); if (multiFrame == null) { AllFrameWatch.Stop(); return; } HandleDepthFrame(multiFrame.DepthFrameReference); // Motion check if (Task.StandBy) { AllFrameWatch.Stop(); return; } HandleColorFrame(multiFrame.ColorFrameReference); HandleBodyFrame(multiFrame.BodyFrameReference); HandleBodyIndexFrame(multiFrame.BodyIndexFrameReference); AllFrameWatch.Stop(); } catch (Exception) { /* ignore if the frame is no longer available */ } finally { } }
void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); // Color using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { camera.Source = frame.ToBitmap(); } } // Body using (var frame = reference.BodyFrameReference.AcquireFrame()) { if (frame != null) { canvas.Children.Clear(); _bodies = new Body[frame.BodyFrameSource.BodyCount]; frame.GetAndRefreshBodyData(_bodies); #region drawing_ranad Rectangle ranad_teeth1 = new Rectangle(); DrawingRanadTeeth(ranad_teeth1, 30, 200, 120, 600); canvas.Children.Add(ranad_teeth1); Rectangle ranad_teeth2 = new Rectangle(); DrawingRanadTeeth(ranad_teeth2, 30, 200, 160, 600); canvas.Children.Add(ranad_teeth2); Rectangle ranad_teeth3 = new Rectangle(); DrawingRanadTeeth(ranad_teeth3, 30, 200, 200, 600); canvas.Children.Add(ranad_teeth3); Rectangle ranad_teeth4 = new Rectangle(); DrawingRanadTeeth(ranad_teeth4, 30, 200, 240, 600); canvas.Children.Add(ranad_teeth4); Rectangle ranad_teeth5 = new Rectangle(); DrawingRanadTeeth(ranad_teeth5, 30, 200, 280, 600); canvas.Children.Add(ranad_teeth5); Rectangle ranad_teeth6 = new Rectangle(); DrawingRanadTeeth(ranad_teeth6, 30, 200, 320, 600); canvas.Children.Add(ranad_teeth6); Rectangle ranad_teeth7 = new Rectangle(); DrawingRanadTeeth(ranad_teeth7, 30, 200, 360, 600); canvas.Children.Add(ranad_teeth7); Rectangle ranad_teeth8 = new Rectangle(); DrawingRanadTeeth(ranad_teeth8, 30, 200, 400, 600); canvas.Children.Add(ranad_teeth8); Rectangle ranad_teeth9 = new Rectangle(); DrawingRanadTeeth(ranad_teeth9, 30, 200, 440, 600); canvas.Children.Add(ranad_teeth9); Rectangle ranad_teeth10 = new Rectangle(); DrawingRanadTeeth(ranad_teeth10, 30, 200, 480, 600); canvas.Children.Add(ranad_teeth10); Rectangle ranad_teeth11 = new Rectangle(); DrawingRanadTeeth(ranad_teeth11, 30, 200, 520, 600); canvas.Children.Add(ranad_teeth11); Rectangle ranad_teeth12 = new Rectangle(); DrawingRanadTeeth(ranad_teeth12, 30, 200, 560, 600); canvas.Children.Add(ranad_teeth12); Rectangle ranad_teeth13 = new Rectangle(); DrawingRanadTeeth(ranad_teeth13, 30, 200, 600, 600); canvas.Children.Add(ranad_teeth13); Rectangle ranad_teeth14 = new Rectangle(); DrawingRanadTeeth(ranad_teeth14, 30, 200, 640, 600); canvas.Children.Add(ranad_teeth14); Rectangle ranad_teeth15 = new Rectangle(); DrawingRanadTeeth(ranad_teeth15, 30, 200, 680, 600); canvas.Children.Add(ranad_teeth15); Rectangle ranad_teeth16 = new Rectangle(); DrawingRanadTeeth(ranad_teeth16, 30, 200, 720, 600); canvas.Children.Add(ranad_teeth16); Rectangle ranad_teeth17 = new Rectangle(); DrawingRanadTeeth(ranad_teeth17, 30, 200, 760, 600); canvas.Children.Add(ranad_teeth17); Rectangle ranad_teeth18 = new Rectangle(); DrawingRanadTeeth(ranad_teeth18, 30, 200, 800, 600); canvas.Children.Add(ranad_teeth18); Rectangle ranad_teeth19 = new Rectangle(); DrawingRanadTeeth(ranad_teeth19, 30, 200, 840, 600); canvas.Children.Add(ranad_teeth19); Rectangle ranad_teeth20 = new Rectangle(); DrawingRanadTeeth(ranad_teeth20, 30, 200, 880, 600); canvas.Children.Add(ranad_teeth20); Rectangle ranad_teeth21 = new Rectangle(); DrawingRanadTeeth(ranad_teeth21, 30, 200, 920, 600); canvas.Children.Add(ranad_teeth21); Rectangle ranad_teeth22 = new Rectangle(); DrawingRanadTeeth(ranad_teeth22, 30, 200, 960, 600); canvas.Children.Add(ranad_teeth22); #endregion #region drawing_circle_example //Draw the drum //Ellipse circle1 = new Ellipse(); //circle1.StrokeThickness = 5; //circle1.Stroke = Brushes.Black; //circle1.Width = 140; //circle1.Height = 55; //Canvas.SetLeft(circle1, 120); //Canvas.SetTop(circle1, 600); //canvas.Children.Add(circle1); //Ellipse circle2 = new Ellipse(); //circle2.StrokeThickness = 5; //circle2.Stroke = Brushes.Black; //circle2.Width = 180; //circle2.Height = 70; //Canvas.SetLeft(circle2, 270); //Canvas.SetTop(circle2, 605); //canvas.Children.Add(circle2); //Ellipse circle3 = new Ellipse(); //circle3.StrokeThickness = 5; //circle3.Stroke = Brushes.Black; //circle3.Width = 180; //circle3.Height = 70; //Canvas.SetLeft(circle3, 410); //Canvas.SetTop(circle3, 620); //canvas.Children.Add(circle3); #endregion foreach (var body in _bodies) { if (body != null) { if (body.IsTracked) { // Find the joints Joint handRight = body.Joints[JointType.HandRight]; Joint thumbRight = body.Joints[JointType.ThumbRight]; double rightX = handRight.Position.X; double rightY = handRight.Position.Y; Joint handLeft = body.Joints[JointType.HandLeft]; Joint thumbLeft = body.Joints[JointType.ThumbLeft]; double leftX = handLeft.Position.X * (-100); double leftY = handLeft.Position.Y * 100; double leftZ = handLeft.Position.Z * 100; // Draw hands and thumbs canvas.DrawHand(handRight, _sensor.CoordinateMapper); canvas.DrawHand(handLeft, _sensor.CoordinateMapper); //canvas.DrawThumb(thumbRight, _sensor.CoordinateMapper); //canvas.DrawThumb(thumbLeft, _sensor.CoordinateMapper); double hitFirstMax = 70; double hitFirstMin = 60; //set direction double direction = 1; if (leftY > 5) { direction = 1; } else { direction = 0; } // Find the hand states string rightHandState = "-"; string leftHandState = "-"; rightHandState = System.Convert.ToString(leftX); leftHandState = System.Convert.ToString(leftY); tblRightHandState.Text = rightHandState; tblLeftHandState.Text = leftHandState; if ((leftX <= 107) && (leftX >= 102) && ((leftY >= 7.9) && (leftY <= 10.1))) { System.Media.SoundPlayer startSound = new System.Media.SoundPlayer(@"C:\Users\USER\Desktop\project thesis kinect\sound\air.wav"); if (direction == 1) { startSound.Play(); } } else if ((leftX <= 50) && (leftX >= 40) && ((leftY >= 7.9) && (leftY <= 10.1))) { System.Media.SoundPlayer midSound = new System.Media.SoundPlayer(@"C:\Users\USER\Desktop\project thesis kinect\sound\bullet.wav"); if (direction == 1) { midSound.Play(); } } else if ((leftX <= 30) && (leftX >= 20) && ((leftY >= 7.9) && (leftY <= 10.1))) { System.Media.SoundPlayer test = new System.Media.SoundPlayer(@"C:\Users\USER\Desktop\project thesis kinect\sound\cry.wav"); if (direction == 1) { test.Play(); } } } } } } } }
void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); // Color using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { if (_mode == Mode.Color) { camera.Source = frame.ToBitmap(); } } } // Depth using (var frame = reference.DepthFrameReference.AcquireFrame()) { if (frame != null) { if (_mode == Mode.Depth) { camera.Source = frame.ToBitmap(); } } } // Infrared using (var frame = reference.InfraredFrameReference.AcquireFrame()) { if (frame != null) { if (_mode == Mode.Infrared) { camera.Source = frame.ToBitmap(); } } } // Body using (var frame = reference.BodyFrameReference.AcquireFrame()) { if (frame != null) { canvas.Children.Clear(); _bodies = new Body[frame.BodyFrameSource.BodyCount]; frame.GetAndRefreshBodyData(_bodies); foreach (var body in _bodies) { if (body != null) { if (body.IsTracked) { // Draw skeleton. if (_displayBody) { canvas.DrawSkeleton(body); } Joint head = body.Joints[JointType.Head]; Joint lefthand = body.Joints[JointType.HandLeft]; Joint righthand = body.Joints[JointType.HandRight]; Joint waist = body.Joints[JointType.SpineBase]; //Swiping right if (righthand.Position.X > waist.Position.X + 0.6) { if (!isForwardGestureActive) { System.Windows.Forms.SendKeys.SendWait("{Right}"); isForwardGestureActive = true; } } else { isForwardGestureActive = false; } //Swiping left if (lefthand.Position.X < waist.Position.X - 0.6) { if (!isBackGestureActive) { System.Windows.Forms.SendKeys.SendWait("{Left}"); isBackGestureActive = true; } } else { isBackGestureActive = false; } if (righthand.Position.Y > head.Position.Y - 0.5) { if (!isZoomin) { System.Windows.Forms.SendKeys.SendWait("^{ADD}"); isZoomin = true; } } else { isZoomin = false; } if (lefthand.Position.Y > head.Position.Y - 0.5) { System.Windows.Forms.SendKeys.SendWait("^{SUBTRACT}"); isZoomout = true; } else { isZoomout = false; } } } } } } }
// *** Only StoreApp with WindowsPreview.Kinect; // private void msfr_MultiSourceFrameArrived(MultiSourceFrameReader sender, MultiSourceFrameArrivedEventArgs args) private void msfr_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { // using (MultiSourceFrame msf = e.FrameReference.AcquireFrame()) *** Only StoreApp with WindowsPreview.Kinect; // { MultiSourceFrame msf = e.FrameReference.AcquireFrame(); if (msf != null) { using (BodyFrame bodyframe = msf.BodyFrameReference.AcquireFrame()) { using (InfraredFrame irf = msf.InfraredFrameReference.AcquireFrame()) { if (bodyframe != null && irf != null) { /* *** only with Windows.UI.Xaml.Media.Imaging; * irf.CopyFrameDataToArray(irData); * for (int i = 0; i < irData.Length; i++) * { * * byte intensity = (byte)(irData[i] >> 8); * irDataConverted[i * 4] = intensity; * irDataConverted[i * 4 + 1] = intensity; * irDataConverted[i * 4 + 2] = intensity; * irDataConverted[i * 4 + 3] = 255; * } * * irDataConverted.CopyTo(irBitmap.PixelBuffer); * irBitmap.Invalidate(); */ // The below is from Kinect Studio WPF infrared sample // irf.CopyFrameDataToArray(irData); using (Microsoft.Kinect.KinectBuffer infraredBuffer = irf.LockImageBuffer()) { // verify data and write the new infrared frame data to the display bitmap if (((this.fd.Width * this.fd.Height) == (infraredBuffer.Size / this.fd.BytesPerPixel)) && (this.fd.Width == this.irBitmap.PixelWidth) && (this.fd.Height == this.irBitmap.PixelHeight)) { this.ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size); } } bodyframe.GetAndRefreshBodyData(bodies); bodyCanvas.Children.Clear(); foreach (Body b in bodies) { if (b.IsTracked) { Joint hand = b.Joints[JointType.HandLeft]; if (hand.TrackingState == TrackingState.Tracked) { DepthSpacePoint dsp = ksensor.CoordinateMapper.MapCameraPointToDepthSpace(hand.Position); var circle = CreateCircle(dsp); tbox.Content = "x:" + (dsp.X / 2).ToString() + " y" + (dsp.Y / 2).ToString(); // bodyCanvas.Children.Add(circle); DetectPageTurn(dsp, circle); // Canvas.SetLeft(circle, dsp.X); //Canvas.SetTop(circle, dsp.Y); } } } } } } msf = null; } //} }
/// <summary> /// kinect frame reader /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); //color using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { if (viewer.Visualization == Visualization.Color) { viewer.Image = frame.ToBitmap(); } } } //Body using (var frame = reference.BodyFrameReference.AcquireFrame()) { if (frame != null) { var bodies = frame.Bodies(); _playersController.Update(bodies); Body body = bodies.Closest(); if (body != null) { //define the joints after skeleton has been tracked Head = body.Joints[JTHead]; Neck = body.Joints[JTNeck]; SpineShoulder = body.Joints[JTSpineShoulder]; SpineMid = body.Joints[JTSpineMid]; SpineBase = body.Joints[JTSpineBase]; ShoulderLeft = body.Joints[JTShoulderLeft]; ElbowLeft = body.Joints[JTElbowLeft]; WristLeft = body.Joints[JTWristLeft]; HandLeft = body.Joints[JTHandLeft]; ShoulderRight = body.Joints[JTShoulderRight]; ElbowRight = body.Joints[JTElbowRight]; WristRight = body.Joints[JTWristRight]; HandRight = body.Joints[JTHandRight]; AnkleLeft = body.Joints[JTAnkleLeft]; AnkleRight = body.Joints[JTAnkleRight]; FootLeft = body.Joints[JTFootLeft]; FootRight = body.Joints[JTFootRight]; HandTipLeft = body.Joints[JTHandTipLeft]; HandTipRight = body.Joints[JTHandTipRight]; HipLeft = body.Joints[JTHipLeft]; HipRight = body.Joints[JTHipRight]; KneeLeft = body.Joints[JTKneeLeft]; KneeRight = body.Joints[JTKneeRight]; ThumbLeft = body.Joints[JTThumbLeft]; ThumbRight = body.Joints[JTThumbRight]; if (body.IsTracked) { viewer.DrawBody(body); //Draw body skeleton view //apply angles to skeleton body view AngleLeftElbow.Update(ShoulderLeft, ElbowLeft, WristLeft, 100); AngleLeftShoulder.Update(SpineShoulder, ShoulderLeft, ElbowLeft, 100); AngleRightElbow.Update(WristRight, ElbowRight, ShoulderRight, 100); AngleRightShoulder.Update(ElbowRight, ShoulderRight, SpineShoulder, 100); //if rec button is clicked if (RecBtnClick == true) { TotalFrameCount++; //keep record of frame count /* * Write joint data to file * 1. Head, 2. Neck, 3. Spine_Shoulder,4. Spine_Mid, 5. Spine_Base, * 6. Shoulder_left, 7. Elbow_left, 8. Wrist_Left, 9. Hand_Left, 10. Hand_Tip_Left, 11. Thumb_Left, * 12. Shoulder_right, 13. Elbow_Right, 14. Wrist_Right, 15. Hand_Right, 16. Hand_Tip_Left, 17. Thumb Right, * 18. Hip_Left, 19. Knee_Left, 20. Ankle_Left, 21. Foot_Left, * 22. Hip_Right, 23. Knee_Right, 24. Ankle_Right, 25. Foot_Right */ File.Write(TotalFrameCount + ","); GetPositionCood(Head, File); GetPositionCood(Neck, File); GetPositionCood(SpineShoulder, File); GetPositionCood(SpineMid, File); GetPositionCood(SpineBase, File); GetPositionCood(ShoulderLeft, File); GetPositionCood(ElbowLeft, File); GetPositionCood(WristLeft, File); GetPositionCood(HandLeft, File); GetPositionCood(HandTipLeft, File); GetPositionCood(ThumbLeft, File); GetPositionCood(ShoulderRight, File); GetPositionCood(ElbowRight, File); GetPositionCood(WristRight, File); GetPositionCood(HandRight, File); GetPositionCood(HandTipRight, File); GetPositionCood(ThumbRight, File); GetPositionCood(HipLeft, File); GetPositionCood(KneeLeft, File); GetPositionCood(AnkleLeft, File); GetPositionCood(FootLeft, File); GetPositionCood(HipRight, File); GetPositionCood(KneeRight, File); GetPositionCood(AnkleRight, File); GetPositionCood(FootRight, File); File.Write("\r\n"); } } } } } //Depth using (var depthframe = reference.DepthFrameReference.AcquireFrame()) { if (depthframe != null) { depthviewer.Image = depthframe.ToBitmap(); } } }
private void FrameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var frame = e.FrameReference.AcquireFrame(); if (frame == null) { return; } var colorFrame = frame.ColorFrameReference.AcquireFrame(); var depthFrame = frame.DepthFrameReference.AcquireFrame(); try { // we need both frames to update the point cloud, if we don't both, bail here if (depthFrame == null || colorFrame == null) { return; } ; KinectData kinectData = new KinectData { ColorPixels = new byte[Kinect2Metrics.ColorBufferLength], DepthPixels = new ushort[Kinect2Metrics.DepthFrameWidth * Kinect2Metrics.DepthFrameHeight] }; // get the color pixels var pinnedArray = System.Runtime.InteropServices.GCHandle.Alloc(kinectData.ColorPixels, System.Runtime.InteropServices.GCHandleType.Pinned); try { IntPtr colorPointer = pinnedArray.AddrOfPinnedObject(); if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra) { colorFrame.CopyRawFrameDataToIntPtr(colorPointer, (uint)Kinect2Metrics.ColorBufferLength); } else { colorFrame.CopyConvertedFrameDataToIntPtr(colorPointer, (uint)Kinect2Metrics.ColorBufferLength, ColorImageFormat.Bgra); } } finally { pinnedArray.Free(); } var pinnedDepthArray = System.Runtime.InteropServices.GCHandle.Alloc(kinectData.DepthPixels, System.Runtime.InteropServices.GCHandleType.Pinned); try { IntPtr depthPointer = pinnedDepthArray.AddrOfPinnedObject(); depthFrame.CopyFrameDataToIntPtr(depthPointer, (uint)Kinect2Metrics.DepthBufferLength); } finally { pinnedDepthArray.Free(); } // map the depth pixels to CameraSpace for 3D rendering MapperUtils.MapDepthFrameToCameraSpace(this.sensor.CoordinateMapper, kinectData); // map depth pixels to colour space so we can color the depth map MapperUtils.MapDepthFrameToColorSpace(this.sensor.CoordinateMapper, kinectData); // update the scene using the kinect data this.KinectScene.Update(kinectData); } finally { // if we don't dispose these, kinect won't send another one if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } } }
///<summary> ///canvas extension function, get frame streams upon receiving frame source /// </summary> public static void Reader_MultiSourceFrameArrived(this Canvas canvas, object sender, MultiSourceFrameArrivedEventArgs e, SpaceMode _mode, Image cameraIMG, IList <Body> _bodies, CoordinateMapper coMapper, bool _displayBody) { // Get a reference to the multi-frame var reference = e.FrameReference.AcquireFrame(); // Open color frame using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { if (_mode == SpaceMode.Color) { int width = frame.FrameDescription.Width; int height = frame.FrameDescription.Height; PixelFormat format = PixelFormats.Bgr32; byte[] pixels = new byte[width * height * ((PixelFormats.Bgr32.BitsPerPixel + 7) / 8)]; if (frame.RawColorImageFormat == ColorImageFormat.Bgra) { frame.CopyRawFrameDataToArray(pixels); } else { frame.CopyConvertedFrameDataToArray(pixels, ColorImageFormat.Bgra); } int stride = width * format.BitsPerPixel / 8; //pass the frame bitmap to MainWindow <Image> cameraIMG.Source = BitmapSource.Create(width, height, 96, 96, format, null, pixels, stride);; } } } // Open depth frame using (var frame = reference.DepthFrameReference.AcquireFrame()) { if (frame != null) { // Do something with the frame... if (_mode == SpaceMode.Depth) { int width = frame.FrameDescription.Width; int height = frame.FrameDescription.Height; PixelFormat format = PixelFormats.Bgr32; ushort minDepth = 0; ushort maxDepth = ushort.MaxValue; bool reliable = true; if (reliable) { //minDepth = frame.DepthMinReliableDistance; minDepth = 2000; //frame.DepthMinReliableDistance; maxDepth = 5000; //frame.DepthMaxReliableDistance; //Debug.WriteLine($"Use Reliable Depth: {minDepth}.min, {maxDepth}.max"); } ushort[] depthData = new ushort[width * height]; byte[] pixelData = new byte[width * height * (PixelFormats.Bgr32.BitsPerPixel + 7) / 8]; frame.CopyFrameDataToArray(depthData); int colorIndex = 0; for (int depthIndex = 0; depthIndex < depthData.Length; ++depthIndex) { ushort depth = depthData[depthIndex]; //byte intensity = (byte)(depth >= minDepth && depth <= maxDepth ? 255 - ((depth - minDepth) * 256 / (maxDepth - minDepth)) : 0); ushort intensity = (ushort)(depth >= minDepth && depth <= maxDepth ? depth % 256 : 0); //pixelData[colorIndex++] = intensity; // Blue //pixelData[colorIndex++] = intensity; // Green //pixelData[colorIndex++] = intensity; // Red pixelData[colorIndex++] = (byte)(intensity <= 127 ? intensity * 2 : 255 - intensity * 2); // Blue pixelData[colorIndex++] = (byte)(intensity <= 127 ? 255 - intensity * 2 : intensity * 2); // Green pixelData[colorIndex++] = (byte)(intensity <= 127 ? 255 - intensity * 2 : intensity * 2); // Red ++colorIndex; } int stride = width * format.BitsPerPixel / 8; //pass the frame bitmap to MainWindow <Image> //true ==> use reliable depth cameraIMG.Source = BitmapSource.Create(width, height, 96, 96, format, null, pixelData, stride); } } } // Open infrared frame using (var frame = reference.InfraredFrameReference.AcquireFrame()) { if (frame != null) { // Do something with the frame... if (_mode == SpaceMode.Infrared) { int width = frame.FrameDescription.Width; int height = frame.FrameDescription.Height; PixelFormat format = PixelFormats.Bgr32; ushort[] frameData = new ushort[width * height]; byte[] pixels = new byte[width * height * (format.BitsPerPixel + 7) / 8]; frame.CopyFrameDataToArray(frameData); int colorIndex = 0; for (int infraredIndex = 0; infraredIndex < frameData.Length; infraredIndex++) { ushort ir = frameData[infraredIndex]; byte intensity = (byte)(ir >> 7); pixels[colorIndex++] = (byte)(intensity / 1); // Blue pixels[colorIndex++] = (byte)(intensity / 1); // Green pixels[colorIndex++] = (byte)(intensity / 0.4); // Red colorIndex++; } int stride = width * format.BitsPerPixel / 8; //pass the frame bitmap to MainWindow <Image> cameraIMG.Source = BitmapSource.Create(width, height, 96, 96, format, null, pixels, stride); } } } // Open body frame and draw Skeleton using (var frame = reference.BodyFrameReference.AcquireFrame()) { if (frame != null) { canvas.Children.Clear(); _bodies = new Body[frame.BodyFrameSource.BodyCount]; frame.GetAndRefreshBodyData(_bodies); foreach (var body in _bodies) { //Do something with the body... if (body != null) { if (body.IsTracked) { if (_displayBody) { canvas.DrawSkeleton(body, coMapper, _mode); } } } } } } }
private static void OnMultipleFramesArrivedHandler(object sender, MultiSourceFrameArrivedEventArgs e) { // Retrieve multisource frame reference MultiSourceFrameReference multiRef = e.FrameReference; MultiSourceFrame multiFrame = multiRef.AcquireFrame(); if (multiFrame == null) return; // Retrieve data stream frame references BodyFrameReference bodyRef = multiFrame.BodyFrameReference; using (BodyFrame bodyFrame = bodyRef.AcquireFrame()) { if (bodyFrame == null) return; if (bodyFrame != null) { _bodies = new Body[bodyFrame.BodyFrameSource.BodyCount]; bodyFrame.GetAndRefreshBodyData(_bodies); foreach (var body in _bodies) { if (body != null) { if (body.IsTracked) { // Find the joints Joint handRight = body.Joints[JointType.HandRight]; Joint thumbRight = body.Joints[JointType.ThumbRight]; Joint handLeft = body.Joints[JointType.HandLeft]; Joint thumbLeft = body.Joints[JointType.ThumbLeft]; if (_joints == null || _joints.Length != 4) { _joints = new Joint[4]; } Joint j = handRight; CameraSpacePoint skeletonPoint = j.Position; ColorSpacePoint colorPoint = sensor.CoordinateMapper.MapCameraPointToColorSpace(skeletonPoint); // 2D coordinates in pixels // Skeleton-to-Color mapping screenX = Screen.PrimaryScreen.Bounds; Point point = new Point(); point.X = (int)colorPoint.X; point.Y = (int)colorPoint.Y; VirtualMouse.MoveTo(point.X,point.Y); //Console.WriteLine(string.Format("\r{0},{1}", Cursor.Position.X, Cursor.Position.Y)); //Console.WriteLine(body.HandRightState); if (body.HandRightState != HandState.NotTracked && (((long)DateTime.UtcNow.TimeOfDay.TotalMilliseconds - lasttime) >= 1000)) { if (body.HandRightState == HandState.Closed) { lasttime = (long)DateTime.UtcNow.TimeOfDay.TotalMilliseconds; VirtualMouse.LeftClick(); } if (body.HandRightState == HandState.Lasso) { lasttime = (long)DateTime.UtcNow.TimeOfDay.TotalMilliseconds; VirtualMouse.RightClick(); } } Console.WriteLine((((long)DateTime.UtcNow.TimeOfDay.TotalMilliseconds - lasttime))); } } } } } }
private unsafe void FrameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { _Stopwatch.Restart(); MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); if (multiSourceFrame != null) { ColorFrame colorFrame = null; DepthFrame depthFrame = null; BodyFrame bodyFrame = null; BodyIndexFrame bodyIndexFrame = null; try { bool allRequiredDataReceived = true; if (_FrameSourceTypes.HasFlag(FrameSourceTypes.Color)) { colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); if (colorFrame != null) { fixed(byte *colorBytesPointer = ColorPixels) { IntPtr colorPtr = (IntPtr)colorBytesPointer; uint size = (uint)(_ColorFrameDescription.Width * _ColorFrameDescription.Height * _ColorFrameDescription.BytesPerPixel); if (colorFrame.RawColorImageFormat == ImageFormat) { colorFrame.CopyRawFrameDataToIntPtr(colorPtr, size); } else { colorFrame.CopyConvertedFrameDataToIntPtr(colorPtr, size, ImageFormat); } } } else { allRequiredDataReceived = false; } } if (_FrameSourceTypes.HasFlag(FrameSourceTypes.Depth) && allRequiredDataReceived) { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); if (depthFrame != null) { fixed(ushort *depthBytesPointer = DepthPixels) { IntPtr depthPtr = (IntPtr)depthBytesPointer; depthFrame.CopyFrameDataToIntPtr(depthPtr, (uint)(_DepthFrameDescription.Width * _DepthFrameDescription.Height * _DepthFrameDescription.BytesPerPixel)); } } else { allRequiredDataReceived = false; } } if (_FrameSourceTypes.HasFlag(FrameSourceTypes.Body) && allRequiredDataReceived) { bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame(); if (bodyFrame != null) { bodyFrame.GetAndRefreshBodyData(Bodies); } else { allRequiredDataReceived = false; } } if (_FrameSourceTypes.HasFlag(FrameSourceTypes.BodyIndex) && allRequiredDataReceived) { bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); if (bodyIndexFrame != null) { fixed(byte *bodyIndexBytesPointer = BodyIndexPixels) { IntPtr bodyIndexPtr = (IntPtr)bodyIndexBytesPointer; bodyIndexFrame.CopyFrameDataToIntPtr(bodyIndexPtr, (uint)(_BodyIndexFrameDescription.Width * _BodyIndexFrameDescription.Height * _BodyIndexFrameDescription.BytesPerPixel)); } } else { allRequiredDataReceived = false; } } if (allRequiredDataReceived) { _KinectFrameArrivedEventArgs.ColorPixels = ColorPixels; _KinectFrameArrivedEventArgs.DepthPixels = DepthPixels; _KinectFrameArrivedEventArgs.Bodies = Bodies; _KinectFrameArrivedEventArgs.BodyIndexPixels = BodyIndexPixels; _KinectFrameArrivedEventArgs.KinectSensor = multiSourceFrame.KinectSensor; _KinectFrameArrivedEventArgs.FrameNumber = _FrameNumber; EventHandler <KinectFrameArrivedEventArgs> handler = FrameArrived; if (handler != null) { handler(this, _KinectFrameArrivedEventArgs); } } } finally { if (colorFrame != null) { colorFrame.Dispose(); } if (depthFrame != null) { depthFrame.Dispose(); } if (bodyFrame != null) { bodyFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } } _Stopwatch.Stop(); RaiseKinectFrameComplete(_Stopwatch.Elapsed); _FrameNumber++; }
void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); // Color using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { if (_mode == Mode.Color) { camera.Source = frame.ToBitmap(); } } } // Depth using (var frame = reference.DepthFrameReference.AcquireFrame()) { if (frame != null) { if (_mode == Mode.Depth) { camera.Source = frame.ToBitmap(); } } } // Infrared using (var frame = reference.InfraredFrameReference.AcquireFrame()) { if (frame != null) { if (_mode == Mode.Infrared) { camera.Source = frame.ToBitmap(); } } } // Body using (var frame = reference.BodyFrameReference.AcquireFrame()) { if (frame != null) { canvas.ClearSkeletons(); tblHeights.Text = string.Empty; _bodies = frame.Bodies().Where(body => body.IsTracked); foreach (var body in _bodies) { if (body != null) { // Update skeleton gestures. _gestureController.Update(body); // Draw skeleton. canvas.DrawSkeleton(body); // Display user height. tblHeights.Text += string.Format("\nUser {0}: {1}cm", body.TrackingId, body.Height()); } } } } }
private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); // Color // ... // Depth // ... // Infrared // ... // Body frameLock.WaitOne(); using (var frame = reference.BodyFrameReference.AcquireFrame()) { if (frame != null) { bodies = new Microsoft.Kinect.Body[frame.BodyFrameSource.BodyCount]; frame.GetAndRefreshBodyData(bodies); int index = 0; double[] angles = new double[actions[currentExercise].Length]; Body b = null; foreach (var body in bodies) { if (body != null) { if (body.IsTracked) { b = body; break; } } } if (b == null) { return; } bool pass = true; bool good = true; foreach (var act in actions[currentExercise]) { Joint j1 = b.Joints[(JointType)act.x]; Vector3 v1 = new Vector3(j1); Joint j2 = b.Joints[(JointType)act.y]; Vector3 v2 = new Vector3(j2); Joint j3 = b.Joints[(JointType)act.z]; Vector3 v3 = new Vector3(j3); double angle = Calc.getAngle(v1, v2, v3); angles[index] = angle; if (started) { if (angle > exercises[currentExercise][exercises[currentExercise].Length - 1][index] * 1.3 || angle < exercises[currentExercise][exercises[currentExercise].Length - 1][index] * 0.7) { good = false; } } else { if (Math.Abs(exercises[currentExercise][0][index] - angle) > exercises[currentExercise][0][index] * 0.3) { pass = false; } } index++; } if (started == false && pass) { started = true; Console.WriteLine("Started"); } else if (started && good) { count[currentExercise]++; started = false; Console.WriteLine("Count: " + count[currentExercise]); } Console.Clear(); foreach (double a in angles) { Console.WriteLine("angle: " + a); } Console.WriteLine("Count: " + count[currentExercise]); } } frameLock.ReleaseMutex(); }
void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); int counter = 0; // Color using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { camera.Source = frame.ToBitmap(); } } // Body using (var frame = reference.BodyFrameReference.AcquireFrame()) { //check if we got connection to the connect if (frame != null) { //setup canvas.Children.Clear(); Bodies = new Body[frame.BodyFrameSource.BodyCount]; frame.GetAndRefreshBodyData(Bodies); counter++; //iterate through the bodies and get their hands foreach (var body in Bodies) { //checks if there are any bodies needing to be tracked if (body != null) { if (body.IsTracked) { // Find the joints being used Joint handRight = body.Joints[JointType.HandRight]; Joint thumbRight = body.Joints[JointType.ThumbRight]; Joint elbowRight = body.Joints[JointType.ElbowRight]; Joint handLeft = body.Joints[JointType.HandLeft]; Joint thumbLeft = body.Joints[JointType.ThumbLeft]; Joint elbowLeft = body.Joints[JointType.ElbowLeft]; Joint head = body.Joints[JointType.Head]; Joint shoulderLeft = body.Joints[JointType.ShoulderLeft]; Joint shoulderRight = body.Joints[JointType.ShoulderRight]; Joint hipLeft = body.Joints[JointType.HipLeft]; Joint hipRight = body.Joints[JointType.HipRight]; Joint bodyMiddle = body.Joints[JointType.SpineMid]; Joint kneeLeft = body.Joints[JointType.KneeLeft]; Joint kneeRight = body.Joints[JointType.KneeRight]; // Draw hands and thumbs //canvas.DrawHand(handRight, Sensor.CoordinateMapper); //canvas.DrawHand(handLeft, Sensor.CoordinateMapper); //canvas.DrawThumb(thumbRight, Sensor.CoordinateMapper); //canvas.DrawThumb(thumbLeft, Sensor.CoordinateMapper); //draws entire skeleton canvas.DrawSkeleton(body, Sensor.CoordinateMapper); // get true values for right hand x and right hand y (rhY) double rhx = canvas.ActualWidth * handRight.Position.X + canvas.ActualWidth / 2; double rhy = canvas.ActualHeight * -handRight.Position.Y + canvas.ActualHeight / 2; rightHandLocation.X = rhx; rightHandLocation.Y = rhy; //Console.WriteLine("width: " + canvas.ActualWidth); //get the right hand state //pose 1: arms crossed around of chest //arms crossed like thug fires off //arms on top of each other also fires off // if (handRight.Position.X < handLeft.Position.X && handRight.Position.X < head.Position.X && handLeft.Position.X > head.Position.X && handRight.Position.Y > elbowRight.Position.Y && handLeft.Position.Y > elbowLeft.Position.Y && Math.Abs(handRight.Position.Y - elbowRight.Position.Y) > .05 && Math.Abs(handLeft.Position.Y - elbowLeft.Position.Y) > .05 && handRight.Position.Y < head.Position.Y && handLeft.Position.Y < head.Position.Y && handRight.Position.Y > bodyMiddle.Position.Y && handLeft.Position.Y > bodyMiddle.Position.Y) { if (predetermined_state != 1) { predetermined_state = 1; submitToFinal(predetermined_state); } } //pose 2: right hand over head & left arm is down by the left leg //energy channeling fires off (saluting will fire off as well) // if (handRight.Position.Y > head.Position.Y && handRight.Position.X > shoulderLeft.Position.X && handRight.Position.X < shoulderRight.Position.X && handLeft.Position.Y < hipLeft.Position.Y) { if (predetermined_state != 2) { predetermined_state = 2; submitToFinal(predetermined_state); } } Console.WriteLine("hand right difference: " + Math.Abs(handRight.Position.Y - hipRight.Position.Y)); Console.WriteLine("hand left difference: " + Math.Abs(handLeft.Position.Y - hipLeft.Position.Y)); //pose 3: hands on hips if (handRight.Position.X > handLeft.Position.X && handRight.Position.X > hipRight.Position.X && handLeft.Position.X < hipLeft.Position.X && Math.Abs(handRight.Position.X - hipRight.Position.X) < .16 && Math.Abs(handLeft.Position.X - hipLeft.Position.X) < .16 && handRight.Position.Y > hipRight.Position.Y && handLeft.Position.Y > hipLeft.Position.Y && Math.Abs(handRight.Position.Y - hipRight.Position.Y) > .06 && Math.Abs(handLeft.Position.Y - hipLeft.Position.Y) > .06 && Math.Abs(handRight.Position.Y - hipRight.Position.Y) < .2 && Math.Abs(handLeft.Position.Y - hipLeft.Position.Y) < .2 && handLeft.Position.Y < bodyMiddle.Position.Y) { if (predetermined_state != 3) { predetermined_state = 3; submitToFinal(predetermined_state); } } //pose 4: both hands above the head if (handRight.Position.X < head.Position.X && handLeft.Position.X > head.Position.X && handRight.Position.Y > head.Position.Y && handLeft.Position.Y > head.Position.Y && handRight.Position.Y > elbowRight.Position.Y && handLeft.Position.Y > elbowLeft.Position.Y) { if (predetermined_state != 4) { predetermined_state = 4; submitToFinal(predetermined_state); } } //pose 5: hands on knees (the conditions aren't smoothed out yet) if (handRight.Position.X < shoulderRight.Position.X && handLeft.Position.X > head.Position.X && handRight.Position.Y > head.Position.Y && handLeft.Position.Y > head.Position.Y && handRight.Position.Y > elbowRight.Position.Y && handLeft.Position.Y > elbowLeft.Position.Y) { if (predetermined_state != 4) { predetermined_state = 5; submitToFinal(predetermined_state); } } //pose 6: dabbing if (handRight.Position.X < head.Position.X && handLeft.Position.X > head.Position.X && handRight.Position.Y > head.Position.Y && handLeft.Position.Y > head.Position.Y && handRight.Position.Y > elbowRight.Position.Y && handLeft.Position.Y > elbowLeft.Position.Y) { if (predetermined_state != 4) { predetermined_state = 6; submitToFinal(predetermined_state); } } // kinect captures approximately 30 frames-per-second // therefore we wait approximately 5 seconds before posting // we also do not post unless the state of the wall has been changed since the previous submit /*if(predetermined_state != 0) * { * checkCounter(); * predetermined_state = 0; * }*/ } } } } } }
/// <summary> /// Kinectのデータ取得時に呼ばれる処理 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void FrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // カラー画像取得時の処理 using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame()) { if (colorFrame != null) { // OpenCVの画像にKinectのカラー画像を複製 colorFrame.CopyConvertedFrameDataToIntPtr( this.colorImage.Data, // カラー画像Matのデータ部分ポインタ (uint)(this.colorImage.Total() * this.colorImage.ElemSize()), // 全画素数 × 一画素のバイト数 ColorImageFormat.Bgra // RGBでなくBGRAの順番 ); // これでOpenCVでの画像処理ができる // (例としてRGBごとの閾値処理) Cv2.Threshold(this.colorImage, this.colorOutputImage, 127.0, 255.0, ThresholdTypes.Binary); // PictureBoxへ描画 UpdatePictureBox(this.colorOutputImage); // this.colorImage を渡すと元のカラー画像が見られる } } // 深度画像取得時の処理 using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame()) { if (depthFrame != null) { // OpenCVの画像にKinectの深度画像を複製 depthFrame.CopyFrameDataToIntPtr( this.depthImage.Data, (uint)(this.depthImage.Total() * this.depthImage.ElemSize()) // 全画素数 × 一画素のバイト数 ); // 0mm~8000mmの16ビット深度画像を256階調に変換 this.depthImage.ConvertTo(this.depthOutputImage, this.depthOutputImage.Type(), 255.0 / 8000.0); // 深度画像からPictureBoxへの描画 UpdateDepthPictureBox(this.depthOutputImage); // ここでは this.depthImage は渡せない。色深度が異なる。 } } }
private void Reader_MultiSourceFrameArrived(MultiSourceFrameReader sender, MultiSourceFrameArrivedEventArgs e) { MultiSourceFrame multiSouceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if(multiSouceFrame==null) { return; } switch (currentDisplayFrameType) { case DisplayFrameType.Infrared: using (InfraredFrame infraredFrame = multiSouceFrame.InfraredFrameReference.AcquireFrame()) { ShowInfraredFrame(infraredFrame); } break; case DisplayFrameType.Color: using (ColorFrame colorFrame = multiSouceFrame.ColorFrameReference.AcquireFrame()) { ShowColorFrame(colorFrame); } break; case DisplayFrameType.Depth: using(DepthFrame depthFrame= multiSouceFrame.DepthFrameReference.AcquireFrame()) { ShowDepthFrame(depthFrame); } break; default: break; } }
/// <summary> /// センサーから骨格データを受け取り処理します /// </summary> private void OnMultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var frame = e.FrameReference.AcquireFrame(); if (frame == null) return; // BodyFrameに関してフレームを取得する using (var bodyFrame = frame.BodyFrameReference.AcquireFrame()) { if (bodyFrame != null) { if (bodies == null) bodies = new Body[bodyFrame.BodyCount]; // 骨格データを格納 bodyFrame.GetAndRefreshBodyData(bodies); // FaceTrackingが開始されていないか確認 if (!this.hdFaceFrameSource.IsTrackingIdValid) { // トラッキング先の骨格を選択 var target = (from body in this.bodies where body.IsTracked select body).FirstOrDefault(); if (target != null) { // 検出されたBodyに対してFaceTrackingを行うよう、FaceFrameSourceを設定 hdFaceFrameSource.TrackingId = target.TrackingId; // FaceModelBuilderを初期化 if (this.faceModelBuilder != null) { this.faceModelBuilder.Dispose(); this.faceModelBuilder = null; } this.faceModelBuilder = this.hdFaceFrameSource.OpenModelBuilder(DefaultAttributes); // FaceModelBuilderがモデルの構築を完了した時に発生するイベント this.faceModelBuilder.CollectionCompleted += this.OnModelBuilderCollectionCompleted; // FaceModelBuilderの状態を報告するイベント this.faceModelBuilder.CaptureStatusChanged += faceModelBuilder_CaptureStatusChanged; this.faceModelBuilder.CollectionStatusChanged += faceModelBuilder_CollectionStatusChanged; // キャプチャの開始 this.faceModelBuilder.BeginFaceDataCollection(); } } } } }
private void Reader_MultiSourceFrameArrived(MultiSourceFrameReader sender, MultiSourceFrameArrivedEventArgs e) { MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } DepthFrame depthFrame = null; ColorFrame colorFrame = null; InfraredFrame infraredFrame = null; BodyFrame bodyFrame = null; //BodyIndexFrame bodyIndexFrame = null; //IBuffer depthFrameData = null; //IBuffer bodyIndexFrameData = null; // Com interface for unsafe byte manipulation //IBufferByteAccess bodyIndexByteAccess = null; switch (currentDisplayFrameType) { case DisplayFrameType.Infrared: using (infraredFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame()) { ShowInfraredFrame(infraredFrame); } break; case DisplayFrameType.Color: using (colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame()) { ShowColorFrame(colorFrame); } break; case DisplayFrameType.Depth: using (depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame()) { ShowDepthFrame(depthFrame); } break; case DisplayFrameType.BodyJoints: using (bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame()) { ShowBodyJoints(bodyFrame); } break; default: break; } }
private void MultiSourceFrameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { bool dataReceived = false; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame()) { if (colorFrame != null) { using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer()) { colorBitmap.Lock(); if ((colorFrameDescription.Width == colorBitmap.PixelWidth) && (colorFrameDescription.Height == colorBitmap.PixelHeight)) { colorFrame.CopyConvertedFrameDataToIntPtr( colorBitmap.BackBuffer, (uint)(colorFrameDescription.Width * colorFrameDescription.Height * 4), ColorImageFormat.Bgra); colorBitmap.AddDirtyRect(new Int32Rect(0, 0, colorBitmap.PixelWidth, colorBitmap.PixelHeight)); } colorBitmap.Unlock(); } } } using (BodyFrame bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame()) { if (bodyFrame != null) { if (bodies == null) { bodies = new Body[bodyFrame.BodyCount]; } bodyFrame.GetAndRefreshBodyData(bodies); dataReceived = true; } } if (dataReceived) { canvas.Children.Clear(); foreach (Body body in bodies.Where(b => b.IsTracked)) { int colorIndex = 0; foreach (var joint in body.Joints) { SolidColorBrush colorBrush = bodyColors[colorIndex++]; Dictionary <JointType, Point> jointColorPoints = new Dictionary <JointType, Point>(); CameraSpacePoint position = joint.Value.Position; if (position.Z < 0) { position.Z = 0.1f; } ColorSpacePoint colorSpacePoint = coordinateMapper.MapCameraPointToColorSpace(position); jointColorPoints[joint.Key] = new Point(colorSpacePoint.X, colorSpacePoint.Y); if (joint.Value.TrackingState == TrackingState.Tracked) { DrawJoint(new Point(colorSpacePoint.X, colorSpacePoint.Y), new SolidColorBrush(Colors.Purple)); } if (joint.Value.TrackingState == TrackingState.Inferred) { DrawJoint(new Point(colorSpacePoint.X, colorSpacePoint.Y), new SolidColorBrush(Colors.LightGray)); } foreach (var bone in bones) { DrawBone(body.Joints, jointColorPoints, bone.Item1, bone.Item2, colorBrush); } DrawClippedEdges(body); DrawHandStates(body.HandRightState, jointColorPoints[JointType.HandRight]); DrawHandStates(body.HandLeftState, jointColorPoints[JointType.HandLeft]); } } } }
private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var frame = e.FrameReference.AcquireFrame(); #region colorFrame using (ColorFrame colorFrame = frame.ColorFrameReference.AcquireFrame()) { if (colorFrame != null) { FrameDescription frameDescription = colorFrame.FrameDescription; using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer()) { WriteableBitmap.Lock(); if ((frameDescription.Width == WriteableBitmap.PixelWidth) && (frameDescription.Height == WriteableBitmap.PixelHeight)) { colorFrame.CopyConvertedFrameDataToIntPtr(WriteableBitmap.BackBuffer, (uint)(frameDescription.Width * frameDescription.Height * 4), ColorImageFormat.Bgra); WriteableBitmap.AddDirtyRect(new Int32Rect(0, 0, WriteableBitmap.PixelWidth, WriteableBitmap.PixelHeight)); } WriteableBitmap.Unlock(); } } camera.Source = WriteableBitmap; } #endregion #region handTracking bool dataReceived = false; canvas.Children.Clear(); using (BodyFrame bodyFrame = frame.BodyFrameReference.AcquireFrame()) { if (bodyFrame != null) { if (bodies == null) { bodies = new Body[bodyFrame.BodyCount]; } bodyFrame.GetAndRefreshBodyData(bodies); dataReceived = true; } } if (dataReceived) { //body = bodies.Where(b => b.IsTracked).FirstOrDefault(); foreach (Body body in bodies) { if (body.IsTracked) { Dictionary <JointType, Joint> joints = new Dictionary <JointType, Joint>(); joints[JointType.ShoulderRight] = body.Joints[JointType.ShoulderRight]; joints[JointType.ElbowRight] = body.Joints[JointType.ElbowRight]; joints[JointType.WristRight] = body.Joints[JointType.WristRight]; joints[JointType.HandRight] = body.Joints[JointType.HandRight]; joints[JointType.ThumbRight] = body.Joints[JointType.ThumbRight]; joints[JointType.HandTipRight] = body.Joints[JointType.HandTipRight]; joints[JointType.ShoulderLeft] = body.Joints[JointType.ShoulderLeft]; joints[JointType.ElbowLeft] = body.Joints[JointType.ElbowLeft]; joints[JointType.WristLeft] = body.Joints[JointType.WristLeft]; joints[JointType.HandLeft] = body.Joints[JointType.HandLeft]; joints[JointType.ThumbLeft] = body.Joints[JointType.ThumbLeft]; joints[JointType.HandTipLeft] = body.Joints[JointType.HandTipLeft]; Dictionary <JointType, Point> jointPoints = new Dictionary <JointType, Point>(); foreach (JointType jointType in joints.Keys) { Point colorSpacePoint = Scale(joints[jointType], coordinateMapper); jointPoints[jointType] = new Point(colorSpacePoint.X, colorSpacePoint.Y); } if (position_idx == 1) { RecognizeStart(jointPoints); } DrawRec(canvas, jointPoints[JointType.HandLeft], jointPoints[JointType.ShoulderLeft]); DrawBody(joints, jointPoints, canvas); } } } #endregion #region fingerTracking using (DepthFrame depthFrame = frame.DepthFrameReference.AcquireFrame()) { if (depthFrame != null) { using (KinectBuffer kinectBuffer = depthFrame.LockImageBuffer()) { foreach (Body body in bodies) { handsController.Update(kinectBuffer.UnderlyingBuffer, body); } } } } #endregion }
/// <summary> /// Handles the depth/color/body index frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; BodyFrame bodyFrame = null; bool isBitmapLocked = false; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } // We use a try/finally to ensure that we clean up before we exit the function. // This includes calling Dispose on any Frame objects that we may have and unlocking the _kinectColorBitmap back buffer. try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame(); if (bodyFrame != null) { this.RenderBodyFrame(bodyFrame); } // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } } finally { if (isBitmapLocked) { this._kinectColorBitmap.Unlock(); } if (depthFrame != null) { depthFrame.Dispose(); } if (bodyFrame != null) { bodyFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } }
public void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { }
async void multiReader_MultiSourceFrameArrived( MultiSourceFrameReader sender, MultiSourceFrameArrivedEventArgs args ) { var multiFrame = args.FrameReference.AcquireFrame(); if ( multiFrame == null ) { return; } // 各種データを取得する UpdateColorFrame( multiFrame ); UpdateBodyIndexFrame( multiFrame ); UpdateDepthFrame( multiFrame ); // それぞれの座標系で描画する if ( IsColorCoodinate.IsChecked == true ) { await DrawColorCoodinate(); } else { await DrawDepthCoodinate(); } }
void reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { // Get a reference to the multi-frame var reference = e.FrameReference.AcquireFrame(); // Open color frame using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { ColorImage.Source = ToBitmap(frame); } } // Open depth frame using (var frame = reference.DepthFrameReference.AcquireFrame()) { if (frame != null) { // Do something with the frame... } } // Open body frame using (var frame = reference.BodyFrameReference.AcquireFrame()) { if (frame != null) { bodies = new Body[frame.BodyCount]; frame.GetAndRefreshBodyData(bodies); } Body trackedBody = FirstTrackedBody(bodies); currentBodies.Add(trackedBody); // Animation stuff - Implement this later if (trackedBody == null) { // Implement this later } else { // Implement this later ableToControl = true; } // COntrol of map Implementation if (ableToControl == true) { if (handPointer == null || handPointer.Properties.BodyTrackingId != trackedBody.TrackingId) { return; } JointType currentHand = 0; try { if (handPointer.Properties.HandType == HandType.LEFT) { currentHand = JointType.HandLeft; } else if (handPointer.Properties.HandType == HandType.RIGHT) { currentHand = JointType.HandRight; } } catch (Exception ex1) { } // And FINALLY perform the action on the map if (handPointer != null && trackedBody.Joints.Where(j => j.Key == currentHand).FirstOrDefault().Value.TrackingState == TrackingState.Tracked) { ESRI.ArcGIS.Client.Geometry.Envelope newEx = new ESRI.ArcGIS.Client.Geometry.Envelope(); /*newEx = MyMap.Extent; * if (MyMap.Extent.YMax >= 88) * { * newEx.YMax = 85; * newEx.YMin = MyMap.Extent.YMin + 1; * newEx.XMax = MyMap.Extent.XMax; * newEx.XMin = MyMap.Extent.XMin; * } * else if (MyMap.Extent.YMin <= -88) * { * newEx.YMin = -85; * newEx.YMax = MyMap.Extent.YMax - 1; * newEx.XMin = MyMap.Extent.XMin; * newEx.XMax = MyMap.Extent.XMax; * } * MyMap.Extent = newEx;*/ // Track the right amount of tracked data /* if (actions.Count > 11) * actions.RemoveAt(0); * * if (zooms.Count > 10) * zooms.RemoveAt(0); * * if (handPointerX.Count > 3 && handPointerY.Count > 3) * { * handPointerX.RemoveAt(0); * handPointerY.RemoveAt(0); * } */ MoveTheMapPointer(handPointer, 5, trackedBody); } } } }
public void WritingData(MultiSourceFrameReader sender, MultiSourceFrameArrivedEventArgs e) { if (buttonWasClicked) { MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); BodyFrame bodyFrame = null; using (bodyFrame=multiSourceFrame.BodyFrameReference.AcquireFrame()) { Body[] bodies = new Body[kinectSensor.BodyFrameSource.BodyCount]; bool dataReceived = false; if (bodyFrame!=null) { bodyFrame.GetAndRefreshBodyData(bodies); dataReceived=true; } if (dataReceived) { positionBuffor.UpdateBodies(bodies,howmanytimes); howmanytimes++; } } } buttonWasClicked = false; }
private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { MultiSourceFrame frameReference = e.FrameReference.AcquireFrame(); using (ColorFrame colorFrame = frameReference.ColorFrameReference.AcquireFrame()) { if (colorFrame != null) { mainWindow.cameraImage.Source = colorFrame.ToBitmap(); } } bool dataReceived = false; using (BodyFrame bodyFrame = frameReference.BodyFrameReference.AcquireFrame()) { if (bodyFrame != null) { mainWindow.bodyCanvas.Children.Clear(); if (this.bodies == null) { this.bodies = new Body[bodyFrame.BodyCount]; } bodyFrame.GetAndRefreshBodyData(bodies); dataReceived = true; } } if (dataReceived) { int bodyColor = 0; if (this.bodies != null) { mainWindow.client.Bodies = this.bodies; int maxBodies = this.kinectSensor.BodyFrameSource.BodyCount; int trackedBodies = 0; for (int i = 0; i < maxBodies; i++) { if (bodies[i] != null) { if (bodies[i].IsTracked) { trackedBodies++; IReadOnlyDictionary <JointType, Joint> joints = bodies[i].Joints; Dictionary <JointType, Point> jointPoints = new Dictionary <JointType, Point>(); foreach (JointType jointType in joints.Keys) { CameraSpacePoint position = joints[jointType].Position; if (position.Z < 0) { position.Z = InferredZPosition; } ColorSpacePoint colorSpacePoint = this.coordinateMapper.MapCameraPointToColorSpace(position); double x = float.IsInfinity(colorSpacePoint.X) ? 0 : colorSpacePoint.X / COLOR_SPACE_MAX_WIDTH * mainWindow.bodyCanvas.ActualWidth; double y = float.IsInfinity(colorSpacePoint.Y) ? 0 : colorSpacePoint.Y / COLOR_SPACE_MAX_HEIGHT * mainWindow.bodyCanvas.ActualHeight; jointPoints[jointType] = new Point(x, y); } mainWindow.bodyCanvas.DrawSkeleton(joints, jointPoints, color[2 * bodyColor], color[2 * bodyColor + 1]); } } bodyColor++; } changeNumberOfTrackedPeople(trackedBodies); } } }
private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { // Get a reference to the multi-frame var reference = e.FrameReference.AcquireFrame(); // Open color frame using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { //camera.Source = ToBitmap(frame); //Display the camera } } // Open depth frame using (var frame = reference.DepthFrameReference.AcquireFrame()) { if (frame != null) { // Do something with the frame... } } // Open infrared frame using (var frame = reference.InfraredFrameReference.AcquireFrame()) { if (frame != null) { // Do something with the frame... } } using (var frame2 = reference.BodyFrameReference.AcquireFrame()) { if (frame2 != null) { canvas.Children.Clear(); _bodies = new Body[frame2.BodyFrameSource.BodyCount]; frame2.GetAndRefreshBodyData(_bodies); foreach (var body in _bodies) { if (body != null) { if (body.IsTracked) { //Should be drawing here but ballec } DrawSkeleton(canvas, body); /*foreach (Joint joint in body.Joints.Values) { DrawPoint(canvas, joint); } */ } } } } }
/// <summary> /// Handles the body frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> //TODO ほんとにこれでよい? private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { bool dataReceived = false; using (BodyFrame bodyFrame = e.FrameReference.AcquireFrame().BodyFrameReference.AcquireFrame()) { if (bodyFrame != null) { if (this.bodies == null) { this.bodies = new Body[bodyFrame.BodyCount]; } // The first time GetAndRefreshBodyData is called, Kinect will allocate each Body in the array. // As long as those body objects are not disposed and not set to null in the array, // those body objects will be re-used. bodyFrame.GetAndRefreshBodyData(this.bodies); dataReceived = true; } } using (ColorFrame colorFrame = e.FrameReference.AcquireFrame().ColorFrameReference.AcquireFrame()) { if (colorFrame != null) { if (this.bodyColors == null) { } } } using (DrawingContext dc = this.drawingGroup.Open()) { if (dataReceived) { // Draw a transparent background to set the render size dc.DrawRectangle(Brushes.Black, null, new Rect(0.0, 0.0, this.displayWidth, this.displayHeight)); int penIndex = 0; foreach (Body body in this.bodies) { Pen drawPen = this.bodyColors[penIndex++]; if (body.IsTracked) { if (!bodyDrawDictionary.ContainsKey(body)) { List <List <Point> > newPointListList = new List <List <Point> >(); newPointListList.Add(new List <Point>()); bodyDrawDictionary.Add(body, newPointListList); } List <List <Point> > pointListList = bodyDrawDictionary[body]; if (pointListList.Count > 10) { pointListList.RemoveAt(0); } this.DrawClippedEdges(body, dc); IReadOnlyDictionary <JointType, Joint> joints = body.Joints; // convert the joint points to depth (display) space Dictionary <JointType, Point> jointPoints = new Dictionary <JointType, Point>(); foreach (JointType jointType in joints.Keys) { // sometimes the depth(Z) of an inferred joint may show as negative // clamp down to 0.1f to prevent coordinatemapper from returning (-Infinity, -Infinity) CameraSpacePoint position = joints[jointType].Position; if (position.Z < 0) { position.Z = InferredZPositionClamp; } DepthSpacePoint depthSpacePoint = this.coordinateMapper.MapCameraPointToDepthSpace(position); jointPoints[jointType] = new Point(depthSpacePoint.X, depthSpacePoint.Y); } this.DrawBody(joints, jointPoints, dc, drawPen); this.DrawHand(body.HandLeftState, jointPoints[JointType.HandLeft], dc); this.DrawHand(body.HandRightState, jointPoints[JointType.HandRight], dc); // クリア(全消去)する //if ( body.HandLeftState == HandState.Closed ) //{ // pointListList = new List<List<Point>>(); // pointListList.Add(new List<Point>()); //} List <Point> lastPointList = pointListList[pointListList.Count - 1]; // 長過ぎる線は先端から消していく if (lastPointList.Count > 100) { lastPointList.RemoveAt(0); } // 条件を満たしたとき pointListに追加する if (lastPointList.Count > 0 && body.HandRightState == HandState.Closed) { Point tmpPoint = jointPoints[JointType.HandRight]; Point lastPoint = lastPointList[lastPointList.Count - 1]; if (Math.Abs(tmpPoint.X - lastPoint.X) > 10 || Math.Abs(tmpPoint.Y - lastPoint.Y) > 10) { lastPointList.Add(tmpPoint); } } HandState prevHandState; if (prevHandStateDictionary.ContainsKey(body)) { prevHandState = prevHandStateDictionary[body]; } else { prevHandState = HandState.Unknown; } if (body.HandRightState != HandState.NotTracked && body.HandRightState != HandState.Unknown && prevHandState != body.HandRightState) { if (body.HandRightState == HandState.Closed) { //閉じた手に変わった瞬間 isRightHandClosedDictionary[body] = false; String str = ""; str += " " + DateTime.Now.ToString(); str += " Start"; Console.WriteLine(str); controlState = ControlState.Start; startPoint = jointPoints[JointType.HandRight]; lastPointList.Add(startPoint); } else if (prevHandState == HandState.Closed) { //閉じた手だったのがそれ以外に変わった瞬間 isRightHandClosedDictionary[body] = true; String str = ""; str += body.ToString(); str += " " + DateTime.Now.ToString(); str += " End"; str += " tmpHandState:" + prevHandState; str += " body.HandRightState: " + body.HandRightState; Console.WriteLine(str); controlState = ControlState.None; endPoint = jointPoints[JointType.HandRight]; lastPointList.Add(endPoint); pointListList.Add(new List <Point>()); } } if (body.HandRightState != HandState.Unknown && body.HandRightState != HandState.NotTracked) { prevHandStateDictionary[body] = body.HandRightState; } if (body.HandLeftState == HandState.Lasso) { Point currentPoint = jointPoints[JointType.HandLeft]; double cX = currentPoint.X; double cY = currentPoint.Y; for (int j = 0; j < pointListList.Count; j++) { List <Point> pointList = pointListList[j]; for (int i = 0; i < pointList.Count - 1; i++) { Point prevPoint = pointList[i]; Point nextPoint = pointList[i + 1]; double dX = nextPoint.X - prevPoint.X; double dY = nextPoint.Y - prevPoint.Y; double d = Math.Abs(dY / dX * cX - cY); if (d < 100) { pointListList.RemoveAt(j); break; } } } } } } // prevent drawing outside of our render area this.drawingGroup.ClipGeometry = new RectangleGeometry(new Rect(0.0, 0.0, this.displayWidth, this.displayHeight)); } foreach (KeyValuePair <Body, List <List <Point> > > pair in bodyDrawDictionary) { List <List <Point> > pointListList = pair.Value; /* 線の描画 */ for (int j = 0; j < pointListList.Count; j++) { List <Point> pointList = pointListList[j]; for (int i = 0; i < pointList.Count - 1; i++) { Point lineStartPoint = pointList[i]; Point lineEndPoint = pointList[i + 1]; dc.DrawLine(pen, lineStartPoint, lineEndPoint); } } } } }
// The method where everything happens void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); // Color using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { camera.Source = frame.ToBitmap(); } } // Body using (var frame = reference.BodyFrameReference.AcquireFrame()) { if (frame != null) { canvas.Children.Clear(); _bodies = new Body[frame.BodyFrameSource.BodyCount]; frame.GetAndRefreshBodyData(_bodies); foreach (var body in _bodies) { if (body != null) { if (body.IsTracked) { Joint head = body.Joints[JointType.Head]; Joint SpineShoulderJoint = body.Joints[JointType.SpineShoulder]; Joint ShoulderLeft = body.Joints[JointType.ShoulderLeft]; Joint ShoulderRight= body.Joints[JointType.ShoulderRight]; Joint ElbowRightJoint = body.Joints[JointType.ElbowRight]; Joint ElbowLeftJoint = body.Joints[JointType.ElbowLeft]; Joint WristLeftJoint = body.Joints[JointType.WristLeft]; Joint WristRightJoint = body.Joints[JointType.WristRight]; Joint handRight = body.Joints[JointType.HandRight]; Joint thumbRight = body.Joints[JointType.ThumbRight]; Joint handLeft = body.Joints[JointType.HandLeft]; Joint thumbLeft = body.Joints[JointType.ThumbLeft]; jointArray[0] = head; jointArray[1] = handLeft; jointArray[2] = handRight; switch (actualState){ case GameState.Initialize: for (int i = 0; i < structRectangleArray.Length; i++) UpdateUIRectangleMain(ref structRectangleArray[i]); actualState = GameState.MainScreen; countdown.Start(); break; case GameState.MainScreen: foreach (Joint joint in jointArray) if (joint.Intersection(Reset)) { actualState = GameState.InitialPosition; ResetText.Visibility = System.Windows.Visibility.Hidden; Logo.Visibility = System.Windows.Visibility.Hidden; Circle1.Visibility = System.Windows.Visibility.Hidden; Circle2.Visibility = System.Windows.Visibility.Hidden; Arrow1.Visibility = System.Windows.Visibility.Hidden; Arrow2.Visibility = System.Windows.Visibility.Hidden; TutorialText1.Visibility = System.Windows.Visibility.Hidden; TutorialText2.Visibility = System.Windows.Visibility.Hidden; pink.Visibility = System.Windows.Visibility.Visible; break; } else for (int i = 0; i < structRectangleArray.Length; i++) InteractionRectangle(ref structRectangleArray[i], joint); break; case GameState.InitialPosition: CheckInitialConditions(head); break; case GameState.ShowStart: if(countdown.Elapsed.Seconds == 2){ countdown.Reset(); actualState = GameState.Running; for (int i = 0; i < structRectangleArray.Length; i++ ) UpdateUIRectangle(ref structRectangleArray[i]); ScoreBox.Visibility = System.Windows.Visibility.Visible; TimerBox.Visibility = System.Windows.Visibility.Visible; countdown.Start(); } break; case GameState.Running: if (InGameConditions(head)){ TimerBox.Text = (60 - countdown.Elapsed.Seconds).ToString(); Point handPoint = handRight.Scale(_sensor.CoordinateMapper); rightHandState = body.HandRightState; leftHandState = body.HandLeftState; for (int i = 0; i < structRectangleArray.Length; i++) InteractionRectangle(ref structRectangleArray[i],jointArray); ScoreBox.Text = score.ToString(); if (countdown.Elapsed.Seconds > 29) { lowRandom = 4; if (countdown.Elapsed.Seconds > 58) { actualState = GameState.Ended; countdown.Reset(); ScoreBox.Visibility = System.Windows.Visibility.Hidden; TimerBox.Visibility = System.Windows.Visibility.Hidden; Countdown.Text = "Your Swaggy score is " + score.ToString(); GameOver.Visibility = System.Windows.Visibility.Visible; ResetText.Text = "Try again!"; ResetText.Visibility = System.Windows.Visibility.Visible; for (int i = 0; i < structRectangleArray.Length; i++) UpdateRectangleThings(ref structRectangleArray[i]); } } } break; case GameState.Ended: foreach(Joint joint in jointArray) if (joint.Intersection(Reset)) { actualState = GameState.ShowStart; Countdown.Text = "STAAAAAART!!!"; ResetText.Visibility = System.Windows.Visibility.Hidden; GameOver.Visibility = System.Windows.Visibility.Hidden; TimerBox.Text = "60"; countdown.Start(); break; } break; default: break; } canvas.DrawPoint(head,_sensor.CoordinateMapper); canvas.DrawPoint(SpineShoulderJoint,_sensor.CoordinateMapper); canvas.DrawPoint(ShoulderLeft, _sensor.CoordinateMapper); canvas.DrawPoint(ShoulderRight, _sensor.CoordinateMapper); canvas.DrawPoint(ElbowRightJoint, _sensor.CoordinateMapper); canvas.DrawPoint(ElbowLeftJoint, _sensor.CoordinateMapper); canvas.DrawPoint(WristLeftJoint, _sensor.CoordinateMapper); canvas.DrawPoint(WristRightJoint, _sensor.CoordinateMapper); // Draw hands and thumbs canvas.DrawPoint(handRight, _sensor.CoordinateMapper); canvas.DrawPoint(handLeft, _sensor.CoordinateMapper); canvas.DrawPoint(thumbRight, _sensor.CoordinateMapper); canvas.DrawPoint(thumbLeft, _sensor.CoordinateMapper); canvas.DrawLine(WristLeftJoint, handLeft, _sensor.CoordinateMapper); canvas.DrawLine(WristRightJoint, handRight, _sensor.CoordinateMapper); canvas.DrawLine(head,SpineShoulderJoint, _sensor.CoordinateMapper); canvas.DrawLine(SpineShoulderJoint, ShoulderLeft, _sensor.CoordinateMapper); canvas.DrawLine(SpineShoulderJoint, ShoulderRight, _sensor.CoordinateMapper); canvas.DrawLine(ShoulderLeft, ElbowLeftJoint, _sensor.CoordinateMapper); canvas.DrawLine(ShoulderRight, ElbowRightJoint, _sensor.CoordinateMapper); canvas.DrawLine(ElbowRightJoint, WristRightJoint, _sensor.CoordinateMapper); canvas.DrawLine(ElbowLeftJoint, WristLeftJoint, _sensor.CoordinateMapper); //canvas.DrawSkeleton(body, _sensor.CoordinateMapper); // Find the hand states string RightHandState = "-"; string LeftHandState = "-"; switch (body.HandRightState) { case HandState.Open: RightHandState = "Open"; break; case HandState.Closed: RightHandState = "Closed"; break; case HandState.Lasso: RightHandState = "Lasso"; break; case HandState.Unknown: RightHandState = "Unknown..."; break; case HandState.NotTracked: RightHandState = "Not tracked"; break; default: break; } switch (body.HandLeftState) { case HandState.Open: LeftHandState = "Open"; break; case HandState.Closed: LeftHandState = "Closed"; break; case HandState.Lasso: LeftHandState = "Lasso"; break; case HandState.Unknown: LeftHandState = "Unknown..."; break; case HandState.NotTracked: LeftHandState = "Not tracked"; break; default: break; } tblRightHandState.Text = RightHandState; tblLeftHandState.Text = LeftHandState; //To prevent Kinect for detecting other bodies break; } } } } } }
// multi frame reader event handler private void ReaderMultiFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { // Get a reference to the multi-frame var reference = e.FrameReference.AcquireFrame(); // depth using (DepthFrame depthFrame = reference.DepthFrameReference.AcquireFrame()) { string label_coords_blob = "";//label_blobsの文字列 if (depthFrame != null) { FrameDescription depthFrameDescription = depthFrame.FrameDescription; int width = depthFrameDescription.Width; int height = depthFrameDescription.Height; var depthData = new ushort[width * height]; depthFrame.CopyFrameDataToArray(depthData); this.depthBitmap.WritePixels(depthRect, depthData, width * (int)depthFrameDescription.BytesPerPixel, 0); // depthData -> camera space data CameraSpacePoint[] cs_points = new CameraSpacePoint[width * height]; _sensor.CoordinateMapper.MapDepthFrameToCameraSpace(depthData, cs_points); // calculate camera space coordinates of each marker(blob) // plan: 以下のループは別functionにする float[,] coord_blobs_center = new float[list_arr_index.Count(), 3]; // blob中心座標の2次元配列 //label_sample.Content = list_arr_index.Count().ToString(); int i_blob = 0; // blobのindex foreach (int[] arr_index in list_arr_index) { // 各blobのcamera space pointからx, y, z座標を取り出して配列 -> 平均 float[] coord_blob_center = new float[3];//blob (反射マーカー)の中心座標を入れる // select camera space points corresponding each blob CameraSpacePoint[] cs_points_blob = new CameraSpacePoint[arr_index.Length];// camera space配列宣言 // x,y,z座標のlist List <float> list_x_cs_points_blob = new List <float>(); List <float> list_y_cs_points_blob = new List <float>(); List <float> list_z_cs_points_blob = new List <float>(); // x,y,z座標の平均 float x_coord_cs_points_blob = 0; float y_coord_cs_points_blob = 0; float z_coord_cs_points_blob = 0; // listの初期化. 念のため list_x_cs_points_blob.Clear(); list_y_cs_points_blob.Clear(); list_z_cs_points_blob.Clear(); // for loop int i_points_blob = 0; // blob内のcs_pointsのindex //int i_coord_blob = 0; // blob内の座標のindex foreach (int i_point in arr_index) { // arr_index: blobのcamera space pointsに対応するindexes // cs_points_blobをまとめる cs_points_blob[i_points_blob] = cs_points[i_point]; i_points_blob += 1; // x,y,z座標のlistを求める: infinityを外す if (!Double.IsInfinity(cs_points[i_point].X)) { list_x_cs_points_blob.Add(cs_points[i_point].X); list_y_cs_points_blob.Add(cs_points[i_point].Y); list_z_cs_points_blob.Add(cs_points[i_point].Z); // 座標の足し算 x_coord_cs_points_blob += cs_points[i_point].X; y_coord_cs_points_blob += cs_points[i_point].Y; z_coord_cs_points_blob += cs_points[i_point].Z; } } // listを配列に変換 float[] arr_x_cs_points_blob = list_x_cs_points_blob.ToArray(); float[] arr_y_cs_points_blob = list_y_cs_points_blob.ToArray(); float[] arr_z_cs_points_blob = list_z_cs_points_blob.ToArray(); // cs_points_blobからblobの中心座標を求める //////////////////// // infの割合を求める float ratio_valid_points_blob = (float)arr_x_cs_points_blob.Length / (float)arr_index.Length;// blobの内infinityでなかったpointの割合 // infの割合が1割以以上だったら中心座標の計算 if (ratio_valid_points_blob > 0.0) { // 足し算したものを数で割る x_coord_cs_points_blob = x_coord_cs_points_blob / (float)arr_x_cs_points_blob.Count(); y_coord_cs_points_blob = y_coord_cs_points_blob / (float)arr_y_cs_points_blob.Count(); // 分母はどれも同じ z_coord_cs_points_blob = z_coord_cs_points_blob / (float)arr_z_cs_points_blob.Count(); // 分母はどれも同じ } else { x_coord_cs_points_blob = 0; y_coord_cs_points_blob = 0; z_coord_cs_points_blob = 0; } coord_blob_center = new float[] { x_coord_cs_points_blob, y_coord_cs_points_blob, z_coord_cs_points_blob }; // 座標coord_blob_centerを二次元配列にまとめる+ label_coordsのstringを生成 for (int i_xyz = 0; i_xyz < 3; i_xyz++) { coord_blobs_center[i_blob, i_xyz] = coord_blob_center[i_xyz]; } label_coords_blob += string.Format("X: {0:+000.0;-000.0;+ 0.0}, ", coord_blob_center[0] * 100) + string.Format("Y: {0:+000.0;-000.0;+ 0.0}, ", coord_blob_center[1] * 100) + string.Format("Z: {0:+000.0;-000.0;+ 0.0}\n", coord_blob_center[2] * 100); i_blob += 1; } // coord_blobs_centerを画面に出力 label_coords.Content = label_coords_blob; } } // IR using (InfraredFrame infraredFrame = reference.InfraredFrameReference.AcquireFrame()) { if (infraredFrame != null) { FrameDescription infraredFrameDescription = infraredFrame.FrameDescription; int width = infraredFrameDescription.Width; int height = infraredFrameDescription.Height; //ushort[] infraredData = new ushort[width * height]; // http://www.naturalsoftware.jp/entry/2014/07/25/020750 var infraredData = new ushort[width * height]; // ushort array infraredFrame.CopyFrameDataToArray(infraredData); this.infraredBitmap.Lock(); this.infraredBitmap.WritePixels(infraredRect, infraredData, width * (int)infraredFrameDescription.BytesPerPixel, 0); //depthImage.WritePixels(depthRect, depthBuffer, depthStride, 0);// template this.infraredBitmap.Unlock(); ColorImage.Source = this.infraredBitmap; // OpenCV: Count blobs and CountBlobs(this.infraredBitmap); } } }
/// <summary> /// Handles the depth/color/body index frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { MultiSourceFrameReference frameReference = e.FrameReference; // If you hit an E_FAIL here and the Kinect is starting up and shutting down repeatedly, // check your "USB Suspend" and "Link state power management" advanced power settings: // see https://social.msdn.microsoft.com/Forums/en-US/fb5d5590-4cb9-4c99-918a-4af18017b86f/kinect-service-shutting-down?forum=kinectv2sdk&prof=required MultiSourceFrame multiSourceFrame = frameReference.AcquireFrame(); DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; BodyFrame bodyFrame = null; try { if (multiSourceFrame != null) { DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference; ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference; BodyIndexFrameReference bodyIndexFrameReference = multiSourceFrame.BodyIndexFrameReference; BodyFrameReference bodyFrameReference = multiSourceFrame.BodyFrameReference; depthFrame = depthFrameReference.AcquireFrame(); colorFrame = colorFrameReference.AcquireFrame(); bodyIndexFrame = bodyIndexFrameReference.AcquireFrame(); bodyFrame = bodyFrameReference.AcquireFrame(); if ((depthFrame != null) && (colorFrame != null) && (bodyFrame != null) && (bodyIndexFrame != null)) { FrameDescription depthFrameDescription = depthFrame.FrameDescription; FrameDescription colorFrameDescription = colorFrame.FrameDescription; FrameDescription bodyIndexFrameDescription = bodyIndexFrame.FrameDescription; int depthWidth = depthFrameDescription.Width; int depthHeight = depthFrameDescription.Height; int colorWidth = colorFrameDescription.Width; int colorHeight = colorFrameDescription.Height; int bodyIndexWidth = bodyIndexFrameDescription.Width; int bodyIndexHeight = bodyIndexFrameDescription.Height; ++m_totalFrames; DateTime cur = DateTime.Now; if (cur.Subtract(m_lastTime) > TimeSpan.FromSeconds(1)) { int frameDiff = m_totalFrames - m_lastFrames; m_lastFrames = m_totalFrames; m_lastTime = cur; // Title = frameDiff.ToString() + " fps"; } BodyFrameReady(bodyFrame); // Done with bodyFrame bodyFrame.Dispose(); bodyFrame = null; GreenScreenMappingDepthToColorSplats(ref depthFrame, ref colorFrame, ref bodyIndexFrame, depthWidth, depthHeight, colorWidth, colorHeight); } } } catch (Exception ex) { HoloDebug.Assert(false, ex.Message); } finally { // MultiSourceFrame, DepthFrame, ColorFrame, BodyIndexFrame are IDispoable if (depthFrame != null) { depthFrame.Dispose(); depthFrame = null; } if (colorFrame != null) { colorFrame.Dispose(); colorFrame = null; } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); bodyIndexFrame = null; } if (bodyFrame != null) { bodyFrame.Dispose(); bodyFrame = null; } } }
/// <summary> /// Handles the depth/color/body index frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { int depthWidth = 0; int depthHeight = 0; DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; bool isBitmapLocked = false; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } // We use a try/finally to ensure that we clean up before we exit the function. // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer. try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } // Process Depth FrameDescription depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; // Access the depth frame data directly via LockImageBuffer to avoid making a copy using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { this.coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.colorMappedToDepthPoints); } // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; // Process Color // Lock the bitmap for writing this.bitmap.Lock(); isBitmapLocked = true; colorFrame.CopyConvertedFrameDataToIntPtr(this.bitmap.BackBuffer, this.bitmapBackBufferSize, ColorImageFormat.Bgra); // We're done with the ColorFrame colorFrame.Dispose(); colorFrame = null; // We'll access the body index data directly to avoid a copy using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { byte *bodyIndexDataPointer = (byte *)bodyIndexData.UnderlyingBuffer; int colorMappedToDepthPointCount = this.colorMappedToDepthPoints.Length; fixed(DepthSpacePoint *colorMappedToDepthPointsPointer = this.colorMappedToDepthPoints) { // Treat the color data as 4-byte pixels uint *bitmapPixelsPointer = (uint *)this.bitmap.BackBuffer; // Loop over each row and column of the color image // Zero out any pixels that don't correspond to a body index for (int colorIndex = 0; colorIndex < colorMappedToDepthPointCount; ++colorIndex) { float colorMappedToDepthX = colorMappedToDepthPointsPointer[colorIndex].X; float colorMappedToDepthY = colorMappedToDepthPointsPointer[colorIndex].Y; // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { // Make sure the depth pixel maps to a valid point in color space int depthX = (int)(colorMappedToDepthX + 0.5f); int depthY = (int)(colorMappedToDepthY + 0.5f); // If the point is not valid, there is no body index there. if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { int depthIndex = (depthY * depthWidth) + depthX; // If we are tracking a body for the current pixel, do not zero out the pixel if (bodyIndexDataPointer[depthIndex] != 0xff) { continue; } } } bitmapPixelsPointer[colorIndex] = 0; } } this.bitmap.AddDirtyRect(new Int32Rect(0, 0, this.bitmap.PixelWidth, this.bitmap.PixelHeight)); } } } finally { if (isBitmapLocked) { this.bitmap.Unlock(); } if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } }
void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); // Color using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { if (viewer.Visualization == Visualization.Color) { viewer.Image = frame.ToBitmap(); } } } // Depth using (var frame = reference.DepthFrameReference.AcquireFrame()) { if (frame != null) { if (viewer.Visualization == Visualization.Depth) { viewer.Image = frame.ToBitmap(); } } } // Infrared using (var frame = reference.InfraredFrameReference.AcquireFrame()) { if (frame != null) { if (viewer.Visualization == Visualization.Infrared) { viewer.Image = frame.ToBitmap(); } } } // Body using (var frame = reference.BodyFrameReference.AcquireFrame()) { if (frame != null) { var bodies = frame.Bodies(); _userReporter.Update(bodies); foreach (Body body in bodies) { if (_displaySkeleton) { viewer.DrawBody(body); } } } } }
void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); // Color using (var frame = reference.ColorFrameReference.AcquireFrame()) { if (frame != null) { camera.Source = frame.ToBitmap(); } } // Body using (var frame = reference.BodyFrameReference.AcquireFrame()) { if (frame != null) { canvas.Children.Clear(); _bodies = new Body[frame.BodyFrameSource.BodyCount]; frame.GetAndRefreshBodyData(_bodies); foreach (var body in _bodies) { if (body != null) { if (body.IsTracked) { // Find the joints Joint handRight = body.Joints[JointType.HandRight]; Joint thumbRight = body.Joints[JointType.ThumbRight]; Joint handLeft = body.Joints[JointType.HandLeft]; Joint thumbLeft = body.Joints[JointType.ThumbLeft]; // Draw hands and thumbs canvas.DrawHand(handRight, _sensor.CoordinateMapper); canvas.DrawHand(handLeft, _sensor.CoordinateMapper); canvas.DrawThumb(thumbRight, _sensor.CoordinateMapper); canvas.DrawThumb(thumbLeft, _sensor.CoordinateMapper); // Find the hand states string rightHandState = "-"; string leftHandState = "-"; int initFlag = 0; int startFlag = 0; if (body.HandRightState == HandState.Closed) { System.Threading.Thread.Sleep(2000); if (body.HandRightState == HandState.Closed) //&& body.HandLeftState == HandState.Closed { System.Diagnostics.Debug.WriteLine("drd0"); initFlag = 1; } } // System.Threading.Thread.Sleep(2000); if (initFlag == 1) { while (body.HandRightState != HandState.Open) { } System.Diagnostics.Debug.WriteLine('1'); startFlag = 1; } if (startFlag == 1) { while (body.HandRightState != HandState.Lasso) { // get the individual points of the right hand double rightX = handRight.Position.X; double rightY = handRight.Position.Y; double rightZ = handRight.Position.Z; System.Diagnostics.Debug.WriteLine(rightX); System.Diagnostics.Debug.WriteLine(rightY); System.Diagnostics.Debug.WriteLine(rightZ); System.Diagnostics.Debug.WriteLine("---------------------------------"); } } switch (body.HandRightState) { case HandState.Open: rightHandState = "Open"; break; case HandState.Closed: rightHandState = "Closed"; break; case HandState.Lasso: rightHandState = "Lasso"; break; case HandState.Unknown: rightHandState = "Unknown..."; break; case HandState.NotTracked: rightHandState = "Not tracked"; break; default: break; } switch (body.HandLeftState) { case HandState.Open: leftHandState = "Open"; break; case HandState.Closed: leftHandState = "Closed"; break; case HandState.Lasso: leftHandState = "Lasso"; break; case HandState.Unknown: leftHandState = "Unknown..."; break; case HandState.NotTracked: leftHandState = "Not tracked"; break; default: break; } tblRightHandState.Text = rightHandState; tblLeftHandState.Text = leftHandState; } } } } } }
private void KFrameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var T = Task.Factory.StartNew(() => { // Retrieve multisource frame reference MultiSourceFrameReference multiRef = e.FrameReference; MultiSourceFrame multiFrame; try { multiFrame = multiRef.AcquireFrame(); } catch (InvalidOperationException) { return; } if (multiFrame == null) return; // Retrieve data stream frame references ColorFrameReference colorRef = multiFrame.ColorFrameReference; BodyFrameReference bodyRef = multiFrame.BodyFrameReference; BodyIndexFrameReference bodyIndexRef = multiFrame.BodyIndexFrameReference; DepthFrameReference depthRef = multiFrame.DepthFrameReference; Task.Factory.StartNew(() => ProcessRGBVideo(colorRef, bodyIndexRef, depthRef)); Task.Factory.StartNew(() => ProcessJoints(bodyRef)); }).ContinueWith((aTask) => iProcessingTasks.Remove(aTask)); iProcessingTasks.Add(T); }
private void MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference.AcquireFrame(); // process color frame using (ColorFrame colorFrame = reference.ColorFrameReference.AcquireFrame()) { if (colorFrame != null) { FrameDescription colorFrameDescription = colorFrame.FrameDescription; using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer()) { this.rawColorBitmap.Lock(); // verify data and write the new color frame data to the display bitmap if ((colorFrameDescription.Width == this.rawColorBitmap.PixelWidth) && (colorFrameDescription.Height == this.rawColorBitmap.PixelHeight)) { colorFrame.CopyConvertedFrameDataToIntPtr( this.rawColorBitmap.BackBuffer, (uint)(colorFrameDescription.Width * colorFrameDescription.Height * 4), ColorImageFormat.Bgra); this.rawColorBitmap.AddDirtyRect(new Int32Rect(0, 0, this.rawColorBitmap.PixelWidth, this.rawColorBitmap.PixelHeight)); } this.rawColorBitmap.Unlock(); } } } // process depth frame bool depthFrameProcessed = false; // query depth frame using (DepthFrame depthFrame = reference.DepthFrameReference.AcquireFrame()) { if (depthFrame != null) { // the fastest way to process the body index data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)) && (this.depthFrameDescription.Width == this.depthBitmap.PixelWidth) && (this.depthFrameDescription.Height == this.depthBitmap.PixelHeight)) { // Note: In order to see the full range of depth (including the less reliable far field depth) // we are setting maxDepth to the extreme potential depth threshold ushort maxDepth = ushort.MaxValue; // If you wish to filter by reliable depth distance, uncomment the following line: //// maxDepth = depthFrame.DepthMaxReliableDistance this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth); depthFrameProcessed = true; // align color image ushort[] depthFrameData = new ushort[depthFrameDescription.Width * depthFrameDescription.Height]; // align color image depthFrame.CopyFrameDataToArray(depthFrameData); MapColorToDepth(depthFrameData); } } } } if (depthFrameProcessed) { this.RenderDepthPixels(); } // save frames //new Task(() => SaveFrame(string.Format("{0}{1}_color.png", saveRoot, frameId), colorBitmap)).Start(); //new Task(() => SaveFrame(string.Format("{0}{1}_depth.png", saveRoot, frameId), depthBitmap)).Start(); allColorImgs.Add(colorBitmap.Clone()); allDepthImgs.Add(depthBitmap.Clone()); frameId++; }