// Event for handling depth stream updates from the Kinect. Calls the finger update function private void SensorDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { // "using" param ensures depth frame is disposed after use. using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { // Set depth frame parameters DFP.Bitmask = DepthImageFrame.PlayerIndexBitmaskWidth; DFP.Width = depthFrame.Width; DFP.Height = depthFrame.Height; #region DEBUG_HAND_OUTLINE depthFrame.CopyPixelDataTo(this.depthPixels); short[] tempDepth = new short[depthPixels.Length]; depthFrame.CopyPixelDataTo(tempDepth); // Updates the hand position based on the kinematic data. Dampened by a multiple to reduce oscillation. //Hands[0].position_Pixels += 0.8 * Hands[0].elapsedMillis * Hands[0].velocity_Pixels; HandFrame HF = ProcessDepthImage.zoomOnHand(tempDepth, DFP, Hands[0]); IntPtr dataStartPtr = bmpData[0].Scan0; int dataLength = bmpData[0].Stride * bmpData[0].Height; // Print to debug left hand rgbValues = new byte[bmpData[0].Stride * bmpData[0].Height]; for (int y = 1; y < HF.dx.GetLength(0); y++) { for (int x = 1; x < HF.dy.GetLength(1); x++) { int dD = (byte)(Math.Sqrt(HF.dx[y, x] * HF.dx[y, x] + HF.dy[y, x] * HF.dy[y, x])); int val = debugBitmapSizes[0].Width * (y + (debugBitmapSizes[0].Height - HF.dx.GetLength(0)) / 2) + x + (debugBitmapSizes[0].Width - HF.dx.GetLength(1)) / 2; if (dD > 5 && x + (debugBitmapSizes[0].Width - HF.dx.GetLength(1)) / 2 > 0 && x + (debugBitmapSizes[0].Width - HF.dx.GetLength(1)) / 2 < debugBitmapSizes[0].Width && y + (debugBitmapSizes[0].Height - HF.dx.GetLength(0)) / 2 > 0 && y + (debugBitmapSizes[0].Height - HF.dx.GetLength(0)) / 2 < debugBitmapSizes[0].Height) { rgbValues[4 * val + 0] = 255; rgbValues[4 * val + 1] = 255; rgbValues[4 * val + 2] = 255; rgbValues[4 * val + 3] = 255; } } } System.Runtime.InteropServices.Marshal.Copy(rgbValues, 0, dataStartPtr, dataLength); #endregion if (checkBox2.Checked) { Hands[1].tracked = false; foreach (ObjectPoint f in Hands[1].fingers) { f.tracked = false; } } ProcessDepthImage.updateFingersFromDepth(depthPixels, DFP, Hands); this.FPScount++; } }
void sensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (var frame = e.OpenDepthImageFrame()) { if (frame != null) { this.imageCanvas.Background = new ImageBrush(frame.ToBitmapSource()); } } }
private void SensorOnDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame != null) { stream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp); } } }
private void KinectDepthFrameReady(object sender, DepthImageFrameReadyEventArgs args) { using (DepthImageFrame depthFrame = args.OpenDepthImageFrame()) { if (depthFrame == null) { return; } FramesReady.DepthFrameReady(depthFrame, ref _interactionStream); } }
private void nuiRuntime_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { var frame = e.OpenDepthImageFrame(); if (frame != null) { this.queue.Enqueue(frame); //System.Diagnostics.Debug.WriteLine(this.queue.Count); } }
private void KinectSensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (var cuadro = e.OpenDepthImageFrame()) { if (cuadro != null) { pbCaptura.Image = cuadro.ToBitmap(); } } }
void myKinect_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (depthImageFrame = e.OpenDepthImageFrame()) { if (depthImageFrame != null) { depthImageFrame.CopyPixelDataTo(depthRawValues); } } }
private void SensorDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if(pic==2) if (depthFrame != null) { // Copy the pixel data from the image to a temporary array depthFrame.CopyDepthImagePixelDataTo(this.depthPixels); // Get the min and max reliable depth for the current frame int minDepth = depthFrame.MinDepth; int maxDepth = depthFrame.MaxDepth; // Convert the depth to RGB int colorPixelIndex = 0; for (int i = 0; i < this.depthPixels.Length; ++i) { // Get the depth for this pixel short depth = depthPixels[i].Depth; // To convert to a byte, we're discarding the most-significant // rather than least-significant bits. // We're preserving detail, although the intensity will "wrap." // Values outside the reliable depth range are mapped to 0 (black). // Note: Using conditionals in this loop could degrade performance. // Consider using a lookup table instead when writing production code. // See the KinectDepthViewer class used by the KinectExplorer sample // for a lookup table example. byte intensity = (byte)(depth >= minDepth && depth <= maxDepth ? depth : 0); // Write out blue byte this.colorPixels[colorPixelIndex++] = intensity; // Write out green byte this.colorPixels[colorPixelIndex++] = intensity; // Write out red byte this.colorPixels[colorPixelIndex++] = intensity; // We're outputting BGR, the last byte in the 32 bits is unused so skip it // If we were outputting BGRA, we would write alpha here. ++colorPixelIndex; } // Write the pixel data into our bitmap this.colorBitmap.WritePixels( new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight), this.colorPixels, this.colorBitmap.PixelWidth * sizeof(int), 0); } } }
private void SensorDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (var depthFrame = e.OpenDepthImageFrame()) { if (depthFrame == null) { return; } depthFrame.CopyDepthImagePixelDataTo(this._depthPixels); } }
// Event handler for sensor's DepthFrameReady event // depth stream, changed the color depends on distance private void SensorDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame != null) { depthFrame.CopyDepthImagePixelDataTo(depthPixels); byte[] depthdata = new byte[depthPixels.Length * 4]; int minDepth = depthFrame.MinDepth; int maxDepth = depthFrame.MaxDepth; for (int i = 0; i < depthPixels.Length; ++i) { short depth = depthPixels[i].Depth; if (depth == -1) { depthdata[i * 4 + 0] = 0; depthdata[i * 4 + 1] = 0; depthdata[i * 4 + 2] = 255; depthdata[i * 4 + 3] = 0; } else if (depth == 0) { depthdata[i * 4 + 0] = 0; depthdata[i * 4 + 1] = 255; depthdata[i * 4 + 2] = 255; depthdata[i * 4 + 3] = 0; } else if (depth >= 800 && depth <= 1800) { depthdata[i * 4 + 0] = 0; depthdata[i * 4 + 1] = 255; depthdata[i * 4 + 2] = 0; depthdata[i * 4 + 3] = 0; } else { depthdata[i * 4 + 0] = 255; depthdata[i * 4 + 1] = 255; depthdata[i * 4 + 2] = 255; depthdata[i * 4 + 3] = 0; } } // Write the pixel data into our bitmap depthBitmap.WritePixels( new Int32Rect(0, 0, this.depthBitmap.PixelWidth, this.depthBitmap.PixelHeight), depthdata, depthBitmap.PixelWidth * sizeof(int), 0); } } }
private void sensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { //onRaisedVoiceCommand(new VoiceCommandEventArgs("I'm in state" + state + " and I got depth data")); using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame != null) { inStream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp); } } }
// depth image private void KinectDevice_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { DepthImageFrame frame = e.OpenDepthImageFrame(); if (frame != null) { frame.CopyPixelDataTo(_DepthImagePixelData); _RawDepthImage.WritePixels(_RawDepthImageRect, _DepthImagePixelData, _RawDepthImageStride, 0); } FramesPerSecondElement.Text = string.Format("{0:0} fps", (this._TotalFrames++ / DateTime.Now.Subtract(this._StartFrameTime).TotalSeconds)); }
private void Kinect_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame frame = e.OpenDepthImageFrame()) { if (frame != null) { short[] pixelData = new short[frame.PixelDataLength]; frame.CopyPixelDataTo(pixelData); this._DepthImageBitmap.WritePixels(this._DepthImageBitmapRect, pixelData, this._DepthImageStride, 0); } } }
void DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (var imageData = e.OpenDepthImageFrame()) { if (imageData == null || imageData.PixelDataLength == 0) { return; } this.image1.Source = imageData.ToBitmapSource(); } }
/// <summary> /// Kinect Depth Polling Method /// </summary> /// <param name="sender">originator of event</param> /// <param name="e">event ready identifier</param> private void DepthImageReady(object sender, DepthImageFrameReadyEventArgs e) { if (count == 0) { this.kinectImager.Source = kinectInterp.DepthImageReady(sender, e); } count++; if (count > 9) { count = 0; } }
private void SensorDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame != null) { short[] pixelsFromFrame = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(pixelsFromFrame); Publish("/i5/kinect/" + Index, $"pixelCount:{pixelsFromFrame.Length}"); } } }
void kinect_depthframe_ready(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame frame = e.OpenDepthImageFrame()) { if (frame == null) { return; } kinect_data_manager.UpdateDepthData(frame); } }
void kinect_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame == null) { return; } interStream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp); } }
void mykinect_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame frame = e.OpenDepthImageFrame()) { if (frame != null) { depthpixelData = new short[frame.PixelDataLength]; frame.CopyPixelDataTo(depthpixelData); _DepthImageBitmap.WritePixels(_DepthImageBitmapRect, depthpixelData, _DepthImageStride, 0); } } }
/// <summary> /// Event handler for Kinect sensor's DepthFrameReady event /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void SensorDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame != null) { // Copy the pixel data from the image to a temporary array depthFrame.CopyDepthImagePixelDataTo(this.depthPixels); // Get the min and max reliable depth for the current frame int minDepth = depthFrame.MinDepth; int maxDepth = depthFrame.MaxDepth; // Load depth values to 1D array short[] depthArray1D = new short[this.depthPixels.Length]; for (int i = 0; i < this.depthPixels.Length; ++i) { depthArray1D[i] = depthPixels[i].Depth; } // Convert the 1D data to the desired 2D array TableVision.DepthArray2D = make2DArray(depthArray1D, this.depthBitmap.PixelHeight, this.depthBitmap.PixelWidth); // Convert the depth to RGB int colorPixelIndex = 0; for (int i = 0; i <= TableVision.DepthArray2D.GetUpperBound(0); i++) { for (int j = 0; j <= TableVision.DepthArray2D.GetUpperBound(1); j++) { short depth = TableVision.DepthArray2D[i, j]; byte intensity = (byte)(depth >= minDepth && depth <= maxDepth ? depth : 0); this.renderedDepthPixels[colorPixelIndex++] = intensity; this.renderedDepthPixels[colorPixelIndex++] = intensity; this.renderedDepthPixels[colorPixelIndex++] = intensity; ++colorPixelIndex; } } // Write the pixel data into our bitmap this.depthBitmap.WritePixels( new Int32Rect(0, 0, this.depthBitmap.PixelWidth, this.depthBitmap.PixelHeight), this.renderedDepthPixels, this.depthBitmap.PixelWidth * sizeof(int), 0); // If crop box is valid, draw crop box if (this.cropBox.TLX != -1) { this.depthBitmap.DrawRectangle(this.cropBox.TLX, this.cropBox.TLY, this.cropBox.BRX, this.cropBox.BRY, System.Windows.Media.Color.FromRgb(255, 0, 0)); } } } }
void depth_ready(object sensor, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame depthImage = e.OpenDepthImageFrame()) { if (depthImage != null) { pixelData = new short[depthImage.PixelDataLength]; depthImage.CopyPixelDataTo(pixelData); depthshow.Source = BitmapSource.Create(depthImage.Width, depthImage.Height, 96, 96, PixelFormats.Gray16, null, pixelData, depthImage.Width * depthImage.BytesPerPixel); } } }
void kinectSensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (var frame = e.OpenDepthImageFrame()) { if (frame == null) { return; } ProcessDepthFrame(frame); } }
// Calibrate sensor with a single frame of the sensor private void CalibrationDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame != null && this.calibrationNeeded) { Console.WriteLine("Calibrating..."); // Toggle Calibration image visibility if (this.Image.Visibility == Visibility.Collapsed) { this.Image.Visibility = Visibility.Visible; } else { this.Image.Visibility = Visibility.Collapsed; } // Copy the pixel data from the image to a temporary array depthFrame.CopyDepthImagePixelDataTo(this.depthPixelsCalibration); this.calibrationNeeded = false; this.calibrated = true; for (int i = 0; i < this.depthPixelsCalibration.Length; ++i) { // Get the depth for this pixel short depth = depthPixels[i].Depth; if (this.minPixelDepth == 0) { this.minPixelDepth = depth; } else if (depth < this.minPixelDepth && depth != 0) { this.minPixelDepth = depth; } if (depth > maxPixelDepth) { this.maxPixelDepth = depth; } this.avgPixelDepth += depth; } // Calculate calibrated wall threshold this.avgPixelDepth = this.avgPixelDepth / this.depthPixelsCalibration.Length; this.wallDistThresh = (int)((this.maxPixelDepth - this.avgPixelDepth) * .80); Console.WriteLine("Pixel: " + minPixelDepth.ToString() + " : " + maxPixelDepth.ToString() + " : " + avgPixelDepth.ToString() + " : " + this.wallDistThresh.ToString()); } } }
void DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { DepthImageFrame imageFrame = e.OpenDepthImageFrame(); if (imageFrame != null) { dephPreview.Source = DepthToBitmapSource( imageFrame); } }
private void ActiveSensorOnDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (var frame = e.OpenDepthImageFrame()) { if (frame == null) { return; } _eventAggregator.GetEvent <KinectDepthImageFrameReadyEvent>().Publish(frame); } }
void kinectDevice_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame != null) { short[] depthPixelDate = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(depthPixelDate); depthImageBitMap.WritePixels(depthImageBitmapRect, depthPixelDate, depthImageStride, 0); } } }
private void OnDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame frame = e.OpenDepthImageFrame()) { if (frame == null) { return; } depthImage.Source = frame.ToBitmap(DepthImageMode.Colors); //depthImage.Source = frame.ToBitmapSource(); } //throw new NotImplementedException(); }
void Sensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (var frame = e.OpenDepthImageFrame()) { if (frame != null) { if (_mode == Mode.Depth) { camera.Source = frame.ToBitmap(); } } } }
/// <summary> /// 处理每一帧 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void _nui_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame frame = e.OpenDepthImageFrame()) { if (frame == null) { return; } byte[] pixels = GenerateColoredBytes(frame); // 通知对象开始处理 _subject.Notify(pixels, frame.Width, frame.Height); } }
void kinectSensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame == null) { return; } byte[] pixels = GenerateDepthColorPixels(depthFrame); DrawKinectImage(depthFrame, pixels, KinectDepthImage); } }
/// <summary> /// Handler for the Kinect sensor's DepthFrameReady event /// </summary> /// <param name="sender">object sending the event</param> /// <param name="depthImageFrameReadyEventArgs">event arguments</param> private void SensorDepthFrameReady(object sender, DepthImageFrameReadyEventArgs depthImageFrameReadyEventArgs) { // Even though we un-register all our event handlers when the sensor // changes, there may still be an event for the old sensor in the queue // due to the way the KinectSensor delivers events. So check again here. if (this.KinectSensor != sender) { return; } using (DepthImageFrame depthFrame = depthImageFrameReadyEventArgs.OpenDepthImageFrame()) { if (null != depthFrame) { try { // Hand data to Interaction framework to be processed this.interactionStream.ProcessDepth(depthFrame.GetRawPixelData(), depthFrame.Timestamp); } catch (InvalidOperationException) { // DepthFrame functions may throw when the sensor gets // into a bad state. Ignore the frame in that case. } } } }
private void KinectSensorOnDepthFrameReady(object sender, DepthImageFrameReadyEventArgs depthImageFrameReadyEventArgs) { if ((this.KinectSensor != null) && (this.KinectRegion != null)) { using (var frame = depthImageFrameReadyEventArgs.OpenDepthImageFrame()) { if (frame != null) { try { this.WriteToBitmap(frame); } catch (InvalidOperationException) { // DepthFrame functions may throw when the sensor gets // into a bad state. Ignore the frame in that case. } } } } }