private void Reader_DepthFrameArrived(object sender, DepthFrameArrivedEventArgs e) { ushort minDepth = 0; // con esta variable vamos a controlar el rango mínimo de visión que nos interese analizar ushort maxDepth = 0; // con esta variable vamos a controlar el rango máximo de visión que nos interese analizar bool depthFrameProcessed = false; using (DepthFrame depthFrame = e.FrameReference.AcquireFrame())// DepthFrame es IDisposable y en el estará toda la información del frame en un respectivo tiempo (FPS) { if (depthFrame != null) { FrameDescription depthFrameDescription = depthFrame.FrameDescription; // verificando la información que hay en el frame if (((depthFrameDescription.Width * depthFrameDescription.Height) == this.depthFrameData.Length) && (depthFrameDescription.Width == this.bitmap.PixelWidth) && (depthFrameDescription.Height == this.bitmap.PixelHeight)) { depthFrame.CopyFrameDataToArray(this.depthFrameData); //Si todo va bien, entonces copiamos esa información del frame al depthFrameData, aquí toda la información es acerca de distancias minDepth = 900; // establecemos el rango que nos interesa. Se coloca en milimetros. maxDepth = 4000; depthFrameProcessed = true; } } } if (depthFrameProcessed) // Si ya tenemos el frame y todo cumple con lo que requerimos, vamos a convertir esas distancias en color y a pintarlas { ConvertDepthData(minDepth, maxDepth); //método para convertir las distancias de nuestro interés en color CreateDepthHistogram(minDepth, maxDepth, this.depthFrameData); //método para crear la visualización de la distribución de datos en Histogramas. this.InvalidateArrange(); depthPixels.CopyTo(this.bitmap.PixelBuffer); this.bitmap.Invalidate(); } }
private async void _depthReader_FrameArrived(DepthFrameReader sender, DepthFrameArrivedEventArgs e) { bool shouldRecord = _recorder != null && _recorder.IsStarted && DepthCheckBox.IsChecked.GetValueOrDefault(); bool shouldDisplay = _displayType == FrameTypes.Depth; ushort minDepth = 0, maxDepth = 0; if (shouldRecord || shouldDisplay) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { frame.CopyFrameDataToArray(_depthData); minDepth = frame.DepthMinReliableDistance; maxDepth = frame.DepthMaxReliableDistance; if (shouldRecord) { _recorder.RecordFrame(frame, _depthData); } } else { shouldDisplay = false; System.Diagnostics.Debug.WriteLine("!!! FRAME SKIPPED (Depth in MainPage)"); } } if (shouldDisplay) { await _depthBitmap.UpdateAsync(_depthData, minDepth, maxDepth); } } }
public event EventHandler <DepthFrameArrivedEventArgs> DepthFrameReady; // update void sensor_DepthFrameReady(object sender, DepthFrameArrivedEventArgs e) // update: Depth Frame arriving event handler { if (this.DepthFrameReady != null) { this.DepthFrameReady(this, e); } }
private void Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { bool depthFrameProcessed = false; using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { if (((this.depthframdescrioption.Width * this.depthframdescrioption.Height) == (depthBuffer.Size / this.depthframdescrioption.BytesPerPixel)) && (this.depthframdescrioption.Width == this.depthmap.PixelWidth) && (this.depthframdescrioption.Height == this.depthmap.PixelHeight)) { ushort maxDepth = ushort.MaxValue; this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth); depthFrameProcessed = true; } } } } if (depthFrameProcessed) { this.RenderDepthPixels(); } }
/// <summary> /// Handles the depth frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { bool depthFrameProcessed = false; using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { depthFrame.CopyFrameDataToArray(this.depthValues); ProcessDepthFrameData(depthFrame.FrameDescription.LengthInPixels, depthFrame.DepthMinReliableDistance, depthFrame.DepthMaxReliableDistance); depthFrameProcessed = true; } } if (depthFrameProcessed) { this.RenderDepthPixels(); } if (FrameCounter % 3 == 0 && IsRecording) { WriteBinFrame(); } levelAvg += CalculateLevel(); if (FrameCounter % 30 == 0) { levelAvg /= 30; Degree.Text = levelAvg.ToString("F3"); levelAvg = 0; } FrameCounter++; }
private void Reader_DepthFrameArrived(object sender, DepthFrameArrivedEventArgs e) { bool depthFrameProcessed = false; using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { using (KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { if ((depthFrameDescription.Width * depthFrameDescription.Height) == (depthBuffer.Size / depthFrameDescription.BytesPerPixel)) { ushort maxDepth = ushort.MaxValue; ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth); depthFrameProcessed = true; } } } } if (depthFrameProcessed) { depthBitmap.WritePixels( new Int32Rect(0, 0, depthBitmap.PixelWidth, depthBitmap.PixelHeight), depthPixels, depthBitmap.PixelWidth, 0); } }
private void DepthFrameArrived(object sender, DepthFrameArrivedEventArgs e) { if (null == e.FrameReference) { return; } // If you do not dispose of the frame, you never get another one... using (DepthFrame _DepthFrame = e.FrameReference.AcquireFrame()) { if (null == _DepthFrame) { return; } BitmapToDisplay.Lock(); _DepthFrame.CopyFrameDataToIntPtr( BitmapToDisplay.BackBuffer, Convert.ToUInt32(BitmapToDisplay.BackBufferStride * BitmapToDisplay.PixelHeight)); BitmapToDisplay.AddDirtyRect( new Int32Rect( 0, 0, _DepthFrame.FrameDescription.Width, _DepthFrame.FrameDescription.Height)); BitmapToDisplay.Unlock(); } }
private void DepthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { _depthCameraIntrinsics = e.CameraIntrinsics; var bitmap = e.GetDisplayableBitmap(); bitmap = Interlocked.Exchange(ref _depthBackBuffer, bitmap); bitmap?.Dispose(); #pragma warning disable CS4014 // Because this call is not awaited, execution of the current method continues before the call is completed DepthOutput.Dispatcher.RunAsync( Windows.UI.Core.CoreDispatcherPriority.Normal, async() => { if (Interlocked.CompareExchange(ref _isRenderingDepth, 1, 0) == 0) { try { SoftwareBitmap availableFrame = null; while ((availableFrame = Interlocked.Exchange(ref _depthBackBuffer, null)) != null) { await((SoftwareBitmapSource)DepthOutput.Source).SetBitmapAsync(availableFrame); availableFrame.Dispose(); } } finally { Interlocked.Exchange(ref _isRenderingDepth, 0); } } }); #pragma warning restore CS4014 // Because this call is not awaited, execution of the current method continues before the call is completed }
private void _depthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { if (_displayType == FrameTypes.Depth) { _depthBitmap.Update(e.FrameReference); } }
void DepthFrameReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { if (mIs15Fps) { if (mDiscardFrame) { mDiscardFrame = false; return; } else { mDiscardFrame = true; } } using (KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { if (((mFrameDescription.Width * mFrameDescription.Height) == (depthBuffer.Size / mFrameDescription.BytesPerPixel))) { ProcessDepthFrameData(depthBuffer); mFeedPixels(mDepthPixels); } } } } }
private unsafe void Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { bool depthFrameProcessed = false; using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { // the fastest way to process the body index data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel))) { // Note: In order to see the full range of depth (including the less reliable far field depth) // we are setting maxDepth to the extreme potential depth threshold ushort maxDepth = ushort.MaxValue; // If you wish to filter by reliable depth distance, uncomment the following line: //// maxDepth = depthFrame.DepthMaxReliableDistance this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth); frameCount += 3; int left_index; int right_index; left_index = 0; right_index = 0; int left_largest = getMin(depthPixels, 'l', &left_index); int right_largest = getMin(depthPixels, 'r', &right_index); int leftXpos = left_index % depthFrameDescription.Width; int leftYpos = left_index / depthFrameDescription.Width; int rightXpos = right_index % depthFrameDescription.Width; int rightYpos = right_index / depthFrameDescription.Width; if (frameCount >= 10) { frameCount = 0; if (left_largest > 0 || true) { LEFT.Text = left_largest.ToString(); LXpos.Text = leftXpos.ToString(); LYpos.Text = leftYpos.ToString(); } if (right_largest > 0 || true) { RIGHT.Text = right_largest.ToString(); RXpos.Text = rightXpos.ToString(); RYpos.Text = rightYpos.ToString(); } } depthFrameProcessed = true; } } } } }
private void DepthFrameReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { using (DepthFrame frame = e.FrameReference.AcquireFrame()) { if (frame != null) { GraphicsDevice.Textures[2] = null; GraphicsDevice.Textures[3] = null; if (depth == null) { depth = new ushort[frame.FrameDescription.LengthInPixels]; for (int i = 0; i < depthcount; i++) { depths.Add(new Texture2D(GraphicsDevice, frame.FrameDescription.Width, frame.FrameDescription.Height, false, SurfaceFormat.Bgra4444)); } } frame.CopyFrameDataToArray(depth); depths.Insert(0, depths.Last()); depths.RemoveAt(depths.Count - 1); //float[] fdepth = new float[depth.Length]; //for(int i = 0; i < depth.Length; i++) // fdepth[i] = ((float)depth[i] / frame.DepthMaxReliableDistance); //depths[0].SetData(fdepth); depths[0].SetData(depth); depthframecount++; } } }
//unsafe void sensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) // older version //{ // using (var image = e.OpenDepthImageFrame()) // { // if (image != null) // { // var data = new short[image.PixelDataLength]; // image.CopyPixelDataTo(data); // BitmapData bitmapData = this.CurrentValue.LockBits(new System.Drawing.Rectangle(0, 0, this.Width, this.Height), ImageLockMode.WriteOnly, System.Drawing.Imaging.PixelFormat.Format24bppRgb); // int pointer = 0; // int width = this.Width; // int height = this.Height; // for (int y = 0; y < height; y++) // { // byte* pDest = (byte*)bitmapData.Scan0.ToPointer() + y * bitmapData.Stride ; // for (int x = 0; x < width; x++, pointer++, pDest += 3) // { // int realDepth = data[pointer] >> DepthImageFrame.PlayerIndexBitmaskWidth; // byte intensity = (byte)(~(realDepth >> 4)); // pDest[0] = intensity; // pDest[1] = intensity; // pDest[2] = intensity; // } // } // this.CurrentValue.UnlockBits(bitmapData); // this.OnNewDataAvailable(); // } // } //} unsafe void sensor_DepthFrameReady(object sender, DepthFrameArrivedEventArgs e) { //using (var image = e.OpenDepthImageFrame()) // older version using (var image = e.FrameReference.AcquireFrame()) // update: fetching new depth frame from the sensor { if (image != null) { //var data = new short[image.PixelDataLength]; // older version var data = new ushort[image.FrameDescription.Width * image.FrameDescription.Height]; // update: using unsigned 16 bit array //image.CopyPixelDataTo(data); // older version image.CopyFrameDataToArray(data); // update: copping frame data to the array BitmapData bitmapData = this.CurrentValue.LockBits(new System.Drawing.Rectangle(0, 0, this.Width, this.Height), ImageLockMode.WriteOnly, System.Drawing.Imaging.PixelFormat.Format24bppRgb); int pointer = 0; int width = this.Width; int height = this.Height; for (int y = 0; y < height; y++) { byte *pDest = (byte *)bitmapData.Scan0.ToPointer() + y * bitmapData.Stride; for (int x = 0; x < width; x++, pointer++, pDest += 3) { //int realDepth = data[pointer] >> DepthImageFrame.PlayerIndexBitmaskWidth; // older version int realDepth = data[pointer]; // update: real depth data are the one in the array and it doesn't come with any added data byte intensity = (byte)(~(realDepth >> 4)); pDest[0] = intensity; pDest[1] = intensity; pDest[2] = intensity; } } this.CurrentValue.UnlockBits(bitmapData); this.OnNewDataAvailable(); } } }
/* * The data for this frame is stored as 16-bit unsigned integers, where each value represents the distance in millimeters. * The maximum depth distance is 8 meters, although reliability starts to degrade at around 4.5 meters. * Developers can use the depth frame to build custom tracking algorithms in cases where the BodyFrame isn’t enough. */ private void updateDepthEvent(object sender, DepthFrameArrivedEventArgs e) { // DepthFrame is IDisposable using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { FrameDescription depthFrameDescription = depthFrame.FrameDescription; ushort[] frameData = new ushort[depthFrameDescription.Width * depthFrameDescription.Height]; WriteableBitmap Bitmap = BitmapFactory.New(depthFrameDescription.Width, depthFrameDescription.Height); depthFrame.CopyFrameDataToArray(frameData); ushort minDepth = depthFrame.DepthMinReliableDistance; ushort maxDepth = depthFrame.DepthMaxReliableDistance; int mapDepthToByte = maxDepth / 256; for (int y = 0; y < depthFrameDescription.Height; y++) { for (int x = 0; x < depthFrameDescription.Width; x++) { int index = y * depthFrameDescription.Width + x; ushort depth = frameData[index]; byte intensity = (byte)(depth >= minDepth && depth <= maxDepth ? (depth / mapDepthToByte) : 0); Bitmap.SetPixel(x, y, 255, intensity, intensity, intensity); } } SendImage(Bitmap, "kinectdepth"); } } }
/// <summary> /// creates a depth picture from the depthframe data package and broadcasts it /// </summary> /// <param name="e">the depthframe data package</param> void CalculateDepthPicture(DepthFrameArrivedEventArgs e) { using (DepthFrame df = e.FrameReference.AcquireFrame()) { if (df != null) { using (Microsoft.Kinect.KinectBuffer depthBuffer = df.LockImageBuffer()) { WriteableBitmap depthBitmap = new WriteableBitmap(df.FrameDescription.Width, df.FrameDescription.Height, 96.0, 96.0, PixelFormats.Gray8, null); if (((df.FrameDescription.Width * df.FrameDescription.Height) == (depthBuffer.Size / df.FrameDescription.BytesPerPixel)) && (df.FrameDescription.Width == depthBitmap.PixelWidth) && (df.FrameDescription.Height == depthBitmap.PixelHeight)) { depthReturnStruc dd = ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, df); byte[] depthPixels = dd.pictureData; depthBitmap.WritePixels( new Int32Rect(0, 0, depthBitmap.PixelWidth, depthBitmap.PixelHeight), depthPixels, depthBitmap.PixelWidth, 0); depthBitmap.Freeze(); OnDepthPictureEvent.BeginInvoke(depthBitmap, null, null); OnDepthDataEvent.BeginInvoke(dd.depthData, null, null); } } } } }
private void DepthFrameReady(object sender, DepthFrameArrivedEventArgs e) { var frame = e.FrameReference.AcquireFrame(); if (frame != null) { using (frame) { frame.CopyFrameDataToArray(this.depthwrite); this.runtime.Runtime.CoordinateMapper.MapDepthFrameToCameraSpace(this.depthwrite, this.camerawrite); lock (m_depthlock) { int pixels = 512 * 424; for (int i = 0; i < pixels; i++) { this.colorwrite[i].X = this.camerawrite[i].X; this.colorwrite[i].Y = this.camerawrite[i].Y; this.colorwrite[i].Z = this.camerawrite[i].Z; } Vector4[] swap = this.colorread; this.colorread = this.colorwrite; this.colorwrite = swap; } this.FInvalidate = true; } } }
private void Reader_DepthFrameArrived(object sender, DepthFrameArrivedEventArgs e) { if (!hasDepthArrived) { hasArrived.Signal(); Console.WriteLine("Signal at depth"); hasDepthArrived = true; } if (recordMode != RecordMode.Playingback) { using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { try { using (KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { depthFrame.CopyFrameDataToArray(depthValues); BitmapData bmapdata = depthBitmap.LockBits( new Rectangle(0, 0, depthFrameDescription.Width, depthFrameDescription.Height), ImageLockMode.WriteOnly, depthBitmap.PixelFormat); IntPtr ptr = bmapdata.Scan0; for (int i = 0; i < depthFrameDescription.Width * depthFrameDescription.Height; i++) { depthValuesToByte[4 * i] = depthValuesToByte[4 * i + 1] = depthValuesToByte[4 * i + 2] = (byte)(depthValues[i] / scale); } Marshal.Copy(depthValuesToByte, 0, ptr, depthFrameDescription.Width * depthFrameDescription.Height * 4); depthBitmap.UnlockBits(bmapdata); this.depthBoard.Image = depthBitmap; //this.depthBoard.Image = depthImage.Bitmap; if (recordMode == RecordMode.Recording && this.depthWriter != null) { if (tmspStartRecording.HasValue) { var currentTime = DateTime.Now.TimeOfDay; TimeSpan elapse = currentTime - tmspStartRecording.Value; WriteDepthIntoFileAsync(elapse); } } } } catch (Exception ex) { Console.WriteLine(ex); } } } } }
void Runtime_DepthFrameReady(object sender, DepthFrameArrivedEventArgs e) { if (this.DepthFrameReady != null) { this.DepthFrameReady(sender, e); } }
void _depthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { bool depthFrameProcessed = false; using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { // the fastest way to process the body index data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((this._depthFrameDescription.Width * this._depthFrameDescription.Height) == (depthBuffer.Size / this._depthFrameDescription.BytesPerPixel))) { // Note: In order to see the full range of depth (including the less reliable far field depth) // we are setting maxDepth to the extreme potential depth threshold ushort maxDepth = ushort.MaxValue; // If you wish to filter by reliable depth distance, uncomment the following line: //// maxDepth = depthFrame.DepthMaxReliableDistance this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth); depthFrameProcessed = true; } } } } if (depthFrameProcessed) { this.DepthBytes.OnNext(_depthPixels); } }
/// <summary> /// Handles the depth frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { bool depthFrameProcessed = false; using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { // the fastest way to process the body index data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)) && (this.depthFrameDescription.Width == this.depthBitmap.PixelWidth) && (this.depthFrameDescription.Height == this.depthBitmap.PixelHeight)) { // Note: In order to see the full range of depth (including the less reliable far field depth) // we are setting maxDepth to the extreme potential depth threshold ushort maxDepth = ushort.MaxValue; // If you wish to filter by reliable depth distance, uncomment the following line: //// maxDepth = depthFrame.DepthMaxReliableDistance this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth); depthFrameProcessed = true; } } } } if (depthFrameProcessed) { this.RenderDepthPixels(); } }
private void Reader_DepthFrameArrived(object sender, DepthFrameArrivedEventArgs e) { //check if depth frame processing is enabled if (!depthRadioButton.IsChecked ?? false) { return; } using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame == null) { return; } FrameDescription depthFrameDescription = depthFrame.FrameDescription; using (Mat depthMat = new Mat(depthFrameDescription.Height, depthFrameDescription.Width, DepthType.Cv16U, 1)) using (Mat convertedMat = new Mat(depthFrameDescription.Height, depthFrameDescription.Width, DepthType.Cv8U, 1)) { depthFrame.CopyFrameDataToIntPtr(depthMat.DataPointer, depthFrameDescription.BytesPerPixel * depthFrameDescription.LengthInPixels); depthMat.ConvertTo(convertedMat, DepthType.Cv8U, 1 / 256d); CvInvoke.Imshow("depth", convertedMat); } } }
private void DepthFrameArrived(object sender, DepthFrameArrivedEventArgs e) { if (!isRecording) { return; } frameTick += 1; var props = sensor.DepthFrameSource.FrameDescription; if ((frameTick % Resolution) == 0) { bool doProcess = false; var buffer = new ushort[props.Width * props.Height]; using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { frame.CopyFrameDataToArray(buffer); doProcess = true; } } if (doProcess) { if (depthQueue.Count < 5) // Just drop this frame if 5 frames are already enqueued { depthQueue.Add(buffer); } } } }
private void Depth_Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { bool depthFrameProcessed = false; //chart1.Series[0].Points.Clear(); using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)) && (this.depthFrameDescription.Width == this.depthBitmap.PixelWidth) && (this.depthFrameDescription.Height == this.depthBitmap.PixelHeight)) { // Note: In order to see the full range of depth (including the less reliable far field depth) // we are setting maxDepth to the extreme potential depth threshold ushort maxDepth = ushort.MaxValue; // If you wish to filter by reliable depth distance, uncomment the following line: maxDepth = depthFrame.DepthMaxReliableDistance; this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth); depthFrameProcessed = true; if (ctr < 1) { writer.WriteLine("\nmax_depth: " + depthFrame.DepthMaxReliableDistance); writer.WriteLine("min_depth: " + depthFrame.DepthMinReliableDistance); writer.WriteLine("Cur_max" + cur_mux); } } } } } if (depthFrameProcessed) { // if (ctr < 1) // { // writer.WriteLine("Depth_length:"+this.depthPixels.Length); // int sum=0; // for (int i = 0; i < this.depthPixels.Length; i++) // { // //Console.Write("Depth["+i+"] = " + depthPixels[i] +"\n"); // sum += hist[i]; // writer.Write(hist[i] + "\t"); // } // writer.Write("\n\nsum= \t"+sum); // } //for(int i = 0; i < this.depthPixels.Length; i++) // chart1.Series[0].Points.Add(hist[i]); //chart1.SaveImage(hist_display.Source); ctr++; this.RenderDepthPixels(); } }
/// <summary> /// Handles the depth frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { TraitementFrameDeProfondeur(e, this.m_depthFrameDescription, this.m_depthBitmap); //this.m_isDepthFrameProcessed = true; this.RenderDepthPixels(); }
/// <summary> /// Dispatcher to update the depth image control /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Depth_ShowImage(Object sender, DepthFrameArrivedEventArgs e) { Console.WriteLine("Just for test !"); this.Dispatcher.BeginInvoke( (Action) (() => this.RenderDepthImage(ref this.depthBitmap, this.depthImage, e)) ); }
/// <summary> /// The Depth reader event /// </summary> /// <param name="sender"></param> /// <param name="e"></param> protected void Reader_DepthFrameArrived(object sender, DepthFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { DepthView.Source = KinectUtils.ToBitmap(frame); } } }
private void DepthFrameReaderOnFrameArrived(object sender, DepthFrameArrivedEventArgs e) { var depthFrame = e.FrameReference.AcquireFrame(); if (depthFrame != null) { _depthFrameReady?.Invoke(this, new DepthFrameReadyEventArgs(new DepthFrame(depthFrame))); } }
private void Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { _pixels = new ushort[KinectSettings.DEPTH_HEIGHT * KinectSettings.DEPTH_WIDTH]; var frame = e.FrameReference.AcquireFrame(); if (frame != null) { frame.CopyFrameDataToArray(_pixels); } }
private void depthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { using (DepthFrame frame = e.FrameReference.AcquireFrame()) { if (frame != null) { PixelArray = ToImageBitmap(frame); } } }
/// <summary> /// Display the depth data when a frame is received /// </summary> private void DepthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { DepthFrameReference frameReference = e.FrameReference; DepthFrame frame = frameReference.AcquireFrame(); if (frame != null) { Dispatcher.Invoke(() => { depthRenderer.RenderDepthFrame(e); frame.Dispose(); }); } }
private void DepthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { using (DepthFrame frame = e.FrameReference.AcquireFrame()) { if (frame != null) { Camera.Source = frame.Bitmap(); } } }
void OnFrameArrived(object sender, DepthFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { camera.Source = ProcessFrame(frame); } } }
public void Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { frameCounter++; Console.Write("."); bool depthFrameProcessed = false; using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel))) { ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size); depthFrameProcessed = true; } } } } if (depthFrameProcessed) { WriteableBitmap depthBitmap = new WriteableBitmap(this.depthFrameDescription.Width, this.depthFrameDescription.Height, 96.0, 96.0, PixelFormats.Gray16, null); depthBitmap.WritePixels( new Int32Rect(0, 0, depthBitmap.PixelWidth, depthBitmap.PixelHeight), this.depthPixels, depthBitmap.PixelWidth * (int)this.depthFrameDescription.BytesPerPixel, 0); // Create a png bitmap encoder which knows how to save a .png file BitmapEncoder encoder = new PngBitmapEncoder(); // Create frame from the writable bitmap and add to encoder encoder.Frames.Add(BitmapFrame.Create(depthBitmap)); string filePath = Path.Combine(this.outputFolder, "Depth-" + frameCounter.ToString().PadLeft(10, '0') + ".png"); // Try to write the new file to disk try { // using as FileStream is IDisposable using (FileStream fs = new FileStream(filePath, FileMode.Create)) { encoder.Save(fs); } } catch (IOException) { Console.WriteLine("Unable to save image to disk."); } } }
/// <summary> /// Handles depth /// </summary> /// <param name="depthCollectedCancelTokenSource">cancelTokenSource used to stop the task</param> private static void HandleDepth(CancellationTokenSource depthCollectedCancelTokenSource) { DepthFrameArrivedEventArgs e = null; String frameNumber = String.Empty; DepthFrame depthFrame; while (true) { depthFrame = null; if (depthCollectedCancelTokenSource.IsCancellationRequested) { break; } if (depthFrameQueue.Count != 0) { lock (FramesAndPaths.allFrameInfo) { if ((FramesAndPaths.allFrameInfo.allFrameFlag & 2) != 0) { continue; } try { e = depthFrameQueue.Dequeue(); } catch (InvalidOperationException) { continue; } try { depthFrame = e.FrameReference.AcquireFrame(); } catch (NullReferenceException) {} if (depthFrame == null) { continue; } frameNumber = FramesAndPaths.allFrameInfo.frameNumber; FramesAndPaths.allFrameInfo.allFrameFlag |= 2; if ((FramesAndPaths.allFrameInfo.allFrameFlag ^ 31) == 0) { FramesAndPaths.allFrameInfo.allFrameFlag = 0; FramesAndPaths.FrameNumberIncrement(); ++writtenCount; } } StoreFramesData.Handle_DepthFrame(depthFrame, frameNumber); } } }
// Depthフレームの更新 private void UpdateDepthFrame( DepthFrameArrivedEventArgs e ) { using ( var depthFrame = e.FrameReference.AcquireFrame() ) { if ( depthFrame == null ) { return; } // Depthデータを取得する depthFrame.CopyFrameDataToArray( depthBuffer ); } }
void depthFrameReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { var depthFrame = e.FrameReference.AcquireFrame(); if (depthFrame != null) { using (depthFrame) { depthFrame.CopyFrameDataToIntPtr(depthImage.DataIntPtr, Kinect2Calibration.depthImageWidth*Kinect2Calibration.depthImageHeight*2); // convert depth image coords to color image coords int x = 100, y = 100; ushort depthImageValue = depthImage[x, y]; // depth image values are in mm if (depthImageValue == 0) { Console.WriteLine("Sorry, depth value input coordinates is zero"); return; } float depth = (float)depthImageValue / 1000f; // convert to m, to match our calibration and the rest of the Kinect SDK double colorX, colorY; calibration.DepthImageToColorImage(x, y, depth, out colorX, out colorY); //// when converting many points, it may be faster to precompute pass in the distortion table: //var depthFrameToCameraSpaceTable = calibration.ComputeDepthFrameToCameraSpaceTable(); //calibration.DepthImageToColorImage(x, y, depth, depthFrameToCameraSpaceTable, out colorX, out colorY); Console.WriteLine("our color coordinates: {0} {1}", colorX, colorY); // compare to Kinect SDK var depthSpacePoint = new DepthSpacePoint(); depthSpacePoint.X = x; depthSpacePoint.Y = y; var colorSpacePoint = kinectSensor.CoordinateMapper.MapDepthPointToColorSpace(depthSpacePoint, depthImageValue); Console.WriteLine("SDK's color coordinates: {0} {1}", colorSpacePoint.X, colorSpacePoint.Y); // convert back to depth image Matrix depthPoint; double depthX, depthY; calibration.ColorImageToDepthImage(colorX, colorY, depthImage, out depthPoint, out depthX, out depthY); //// when converting many points, it may be faster to precompute and pass in the distortion table: //var colorFrameToCameraSapceTable = calibration.ComputeColorFrameToCameraSpaceTable(); //calibration.ColorImageToDepthImage((int)colorX, (int)colorY, depthImage, colorFrameToCameraSapceTable, out depthPoint, out depthX, out depthY); Console.WriteLine("convert back to depth: {0} {1}", depthX, depthY); } } }
void depthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { var frame = e.FrameReference.AcquireFrame(); if (frame != null) { frame.CopyFrameDataToIntPtr(this.frameData.DataPointer, (uint)this.frameData.SizeInBytes); frame.Dispose(); if (this.FrameReceived != null) { this.FrameReceived(this, new DepthFrameDataEventArgs(this.frameData)); } } }
private void DepthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { canvas.Children.Clear(); using (DepthFrame frame = e.FrameReference.AcquireFrame()) { if (frame != null) { // 2) Update the HandsController using the array (or pointer) of the depth depth data, and the tracked body. using (KinectBuffer buffer = frame.LockImageBuffer()) { _handsController.Update(buffer.UnderlyingBuffer, _body); } } } }
void KinnectFrameArrived(object sender, DepthFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { var width = frame.FrameDescription.Width; var height = frame.FrameDescription.Height; var depths = new ushort[width * height]; frame.CopyFrameDataToArray(depths); FrameArrived(sender, new FrameArrivedArgs(depths, width, height)); } } }
public void DepthFrame_Arrived(object sender, DepthFrameArrivedEventArgs e) { using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { //フレームがなければ終了、あれば格納 if (depthFrame == null) return; int[] depthBitdata = new int[depthBuffer.Length]; depthFrame.CopyFrameDataToArray(this.depthBuffer); // 0~8000 を 0~65535にする(16bit) for (int i = 0; i < depthBuffer.Length; i++) { depthBuffer[i] = (ushort)(depthBuffer[i] * 65535 / 8000); } OnDataReceived(depthBuffer); } }
unsafe private static void Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { using (DepthFrame frame = e.FrameReference.AcquireFrame()) { if (frame != null) { using (var buffer = frame.LockImageBuffer()) { ushort* data = (ushort*)buffer.UnderlyingBuffer; for (int i = 0; i < 424 * 512; ++i) { byte lsb = (byte)(data[i] & 0xFFu); byte msb = (byte)((data[i] >> 8) & 0xFFu); writer.Write(msb); writer.Write(lsb); writer.Write((byte)0); } writer.Flush(); } } } }
unsafe private static void Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { using (DepthFrame frame = e.FrameReference.AcquireFrame()) { if (frame != null) { using (var buffer = frame.LockImageBuffer()) { ushort* data = (ushort*)buffer.UnderlyingBuffer; writer.Seek(0, SeekOrigin.Begin); for (int i = 0; i < 424 * 512; ++i) { byte lsb = (byte)(data[i] & 0xFFu); byte msb = (byte)((data[i] >> 8) & 0xFFu); writer.Write(msb); writer.Write(lsb); writer.Write((byte)0); // to be put in a WebGL texture read as ArrayBuffer using a Int8Array view } writer.Flush(); } } } }
private void depthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (frame != null) { var width = frame.FrameDescription.Width; var height = frame.FrameDescription.Height; var depths = new ushort[width * height]; frame.CopyFrameDataToArray(depths); for (int i = 0; i < depths.Length; ++i) { depths[i] = depths[i] > Distance ? default(ushort) : depths[i]; } var pixels = this.ConvertToByteArray(depths, frame.DepthMinReliableDistance, frame.DepthMaxReliableDistance); DepthImage.Source = BitmapSource.Create(width, height, 96, 96, PixelFormats.Bgr32, BitmapPalettes.WebPalette, pixels, width * PixelFormats.Bgr32.BitsPerPixel / 8); } } }
void depthFrameReader_FrameArrived( object sender, DepthFrameArrivedEventArgs e ) { UpdateDepthFrame(e); DrawDepthFrame(); }
void depthFrameReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { var depthFrame = e.FrameReference.AcquireFrame(); if (depthFrame != null) { using (depthFrame) { if (depthFrameReady.Count > 0) { lock (depthShortBuffer) depthFrame.CopyFrameDataToArray(depthShortBuffer); lock (depthFrameReady) foreach (var autoResetEvent in depthFrameReady) autoResetEvent.Set(); } } } }
/* void Runtime_AllFramesReady(object sender, AllFramesReadyEventArgs e) { if (this.AllFrameReady != null) { this.AllFrameReady(sender, e); } }*/ void Runtime_DepthFrameReady(object sender, DepthFrameArrivedEventArgs e) { if (this.DepthFrameReady != null) { this.DepthFrameReady(sender, e); } }
private void DepthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { DepthFrameReference frameReference = e.FrameReference; try { DepthFrame frame = frameReference.AcquireFrame(); if (frame != null) { // DepthFrame is IDisposable using (frame) { this.Dispatcher.Invoke(new Action(delegate() { depthRenderer2.RenderDepthFrame(e); })); } } } catch (Exception) { //ignore if the frame is no longer available } }
void depthReader_FrameArrived( object sender, DepthFrameArrivedEventArgs e ) { using ( var depthFrame = e.FrameReference.AcquireFrame() ) { if ( depthFrame != null ) { ImageDepth.Source = ConverDepthToImage( depthFrame ); } } }
//深度情報 void DepthFrame_Arrived(object sender, DepthFrameArrivedEventArgs e) { DepthFrame depthFrame = e.FrameReference.AcquireFrame(); //フレームがなければ終了、あれば格納 if (depthFrame == null) return; int[] depthBitdata = new int[depthBuffer.Length]; depthFrame.CopyFrameDataToArray(this.depthBuffer); this.package.K_DdepthBuffer = this.depthBuffer; //破棄 depthFrame.Dispose(); }
private void Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { if(MouseInfoText.Text!="No Mouse") { updateMouseInfo(MousePointOnImage); } }
private void DepthFrameReady(object sender, DepthFrameArrivedEventArgs e) { DepthFrame frame = e.FrameReference.AcquireFrame(); if (frame != null) { using (frame) { lock (m_depthlock) { frame.CopyFrameDataToIntPtr(depthData, 512 * 424 * 2); this.runtime.Runtime.CoordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr(depthData, 512 * 424 * 2, colpoints, 1920 * 1080 * 8); } this.FInvalidate = true; } } }
public void RenderDepthFrame(DepthFrameArrivedEventArgs e){ DepthReader_FrameArrived(e); }
private void DepthFrameReady(object sender, DepthFrameArrivedEventArgs e) { DepthFrame frame = e.FrameReference.AcquireFrame(); if (frame != null) { using (frame) { lock (m_depthlock) { frame.CopyFrameDataToArray(depthData); this.runtime.Runtime.CoordinateMapper.MapDepthFrameToColorSpace(this.depthData, this.colpoints); } this.FInvalidate = true; } } }
void depthFrameReader_FrameArrived( DepthFrameReader sender, DepthFrameArrivedEventArgs args ) { UpdateDepthFrame( args ); DrawDepthFrame(); }
void depth_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { using (DepthFrame frame = depth.AcquireLatestFrame()) { if (frame == null) return; int minDist = (int)NearSlider.Value; int maxDist = (int)FarSlider.Value; int modelHeight = (int)ModelHeightSlider.Value; int modelWidth = (int)ModelWidthSlider.Value; ushort max = depth.DepthFrameSource.DepthMaxReliableDistance; ushort min = depth.DepthFrameSource.DepthMinReliableDistance; double maxScale = max - maxDist; double minScale = minDist - min; double scale = maxDist - minDist; int intScale = maxDist - minDist; int width = frame.FrameDescription.Width; int height = frame.FrameDescription.Height; if (depthData == null) depthData = new ushort[frame.FrameDescription.LengthInPixels]; if (gridHeights == null) gridHeights = new ushort[frame.FrameDescription.LengthInPixels]; frame.CopyFrameDataToArray(depthData); if (depthColorImage == null) depthColorImage = new byte[frame.FrameDescription.LengthInPixels * 4]; int depthColorImagePos = 0; average = imageAverage(depthData); for (int i = 0; i < average.Length; i++) { int depthValue = average[i]; // Check for the invalid values if (depthValue > maxDist) { double fraction = ((double)depthValue - maxDist) / maxScale; byte depthByte = (byte)(255 - (255.0 * fraction)); depthColorImage[depthColorImagePos++] = depthByte; // Blue depthColorImage[depthColorImagePos++] = 0; // Green depthColorImage[depthColorImagePos++] = 0; // Red gridHeights[i] = (ushort)0; } else if (depthValue < minDist) { double fraction = ((double)depthValue - min) / minScale; byte depthByte = (byte)(255 - (255.0 * fraction)); depthColorImage[depthColorImagePos++] = 0; // Blue depthColorImage[depthColorImagePos++] = 0; // Green depthColorImage[depthColorImagePos++] = depthByte; // Red gridHeights[i] = (ushort)0; } else { int absoluteDepth = depthValue - minDist; double fraction = ((double)absoluteDepth) / scale; byte depthByte = (byte)(255 - (255.0 * fraction)); depthColorImage[depthColorImagePos++] = depthByte; // Blue depthColorImage[depthColorImagePos++] = depthByte; // Green depthColorImage[depthColorImagePos++] = depthByte; // Red gridHeights[i] = (ushort)(intScale - absoluteDepth); } // transparency depthColorImagePos++; } if (depthImageBitmap == null) { this.depthImageBitmap = new WriteableBitmap( frame.FrameDescription.Width, frame.FrameDescription.Height, 96, // DpiX 96, // DpiY PixelFormats.Bgr32, null); kinectDepthImage.Width = frame.FrameDescription.Width; kinectDepthImage.Height = frame.FrameDescription.Height; kinectDepthImage.Source = depthImageBitmap; } checkSelfie(); if (takeSnapShot) { takeSnapShot = false; string filename = FileNameTextBox.Text.Trim(); if (filename.Length == 0) { MessageBox.Show("Please enter a filename", "File save failed"); return; } string fullFilename = filepath + "\\" + filename + ".stl"; try { StoreSTLMesh(gridHeights, (int)kinectDepthImage.Height, (int)kinectDepthImage.Width, modelWidth, modelHeight, fullFilename); // Set the start of the screen flash lastSnapshotTime = DateTime.Now; } catch (Exception ex) { MessageBox.Show("File name: " + fullFilename + " not written.\n" + ex.Message, "File save failed"); } } DateTime now = DateTime.Now; // Invert the screen for 200 milliseconds after taking a picture to show it has done something if ((now - lastSnapshotTime).TotalMilliseconds < 200) { for (int i = 0; i < depthColorImage.Length; i++) { depthColorImage[i] ^= 255; } } this.depthImageBitmap.WritePixels( new Int32Rect(0, 0, frame.FrameDescription.Width, frame.FrameDescription.Height), depthColorImage, // video data frame.FrameDescription.Width * 4, // stride, 0 // offset into the array - start at 0 ); } }
private void DepthFrameReady(object sender, DepthFrameArrivedEventArgs e) { var frame = e.FrameReference.AcquireFrame(); if (frame != null) { using (frame) { lock (m_lock) { frame.CopyFrameDataToIntPtr(this.depthwrite, 512 * 424 * 2); IntPtr swap = this.depthread; this.depthread = this.depthwrite; this.depthwrite = swap; } this.FInvalidate = true; this.frameindex = frame.RelativeTime.Ticks; } } }
void depthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { FrameDescription desc = depthFrame.FrameDescription; KinectBase.DepthFrameEventArgs depthE = new KinectBase.DepthFrameEventArgs(); depthE.bytesPerPixel = 2; //This is fixed to 2 because we are using a ushort to hold the depth image depthE.perPixelExtra = 2; //We always have an extra two bytes per pixel because we are storing a Gray16 in a bgr32 format depthE.height = desc.Height; depthE.width = desc.Width; depthE.kinectID = kinectID; depthE.timeStamp = depthFrame.RelativeTime; depthE.reliableMin = (float)depthFrame.DepthMinReliableDistance / (float)ushort.MaxValue; depthE.reliableMax = (float)depthFrame.DepthMaxReliableDistance / (float)ushort.MaxValue; //Get all the data for the depth, and store the bytes for the Gray16 in the blue and green channels of a bgr32 IntPtr depthImagePtr = Marshal.AllocHGlobal((int)(depthE.bytesPerPixel * desc.LengthInPixels)); depthFrame.CopyFrameDataToIntPtr(depthImagePtr, (uint)depthE.bytesPerPixel * desc.LengthInPixels); //depthE.image = new byte[desc.LengthInPixels * (depthE.perPixelExtra + depthE.bytesPerPixel)]; depthE.image = depthImagePool.GetObject(); unsafe { fixed (byte* pDst = depthE.image) { ushort* pD = (ushort*)pDst; ushort* pS = (ushort*)depthImagePtr.ToPointer(); for (int n = 0; n < desc.LengthInPixels; n++) { *pD = *pS; pD += 2; pS++; } } } Marshal.FreeHGlobal(depthImagePtr); OnDepthFrameReceived(depthE); } } }
//subscribed event set during kinect initialization (called each time a depth frame is available) private void Reader_DepthFrameArrived(object sender, DepthFrameArrivedEventArgs e) { using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { // verify data and begin processing the data if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)) ) { this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, depthFrame.DepthMaxReliableDistance); } } } } }
private void _depthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { using (var frame = e.FrameReference.AcquireFrame()) { if (_isStarted && depthFpsCounter > 0) { if (frame != null) { //depthSw.Start(); _recordDepthQueue.Enqueue(new RecordDepthFrame(frame)); //depthSw.Stop(); //depthSum += depthSw.Elapsed.TotalMilliseconds; //depthSw.Reset(); TimeCheckDepth(); depthFrames++; depthCounter++; // Console.WriteLine("Depth Enqueue time = {0}", sw.Elapsed); // System.Diagnostics.Debug.WriteLine("+++ Enqueued Depth Frame ({0})", _recordQueue.Count); } else { // System.Diagnostics.Debug.WriteLine("!!! FRAME SKIPPED (Depth in KinectRecorder)"); } depthFpsCounter -= DepthFramerate; } else if (depthFpsCounter == -50 || depthFpsCounter == -125) { depthFpsCounter -= DepthFramerate; } else { depthFpsCounter = 100; //color_delay = global_delay; // System.Diagnostics.Debug.WriteLine("!!! FRAME SKIPPED "); } } }
private void DepthReader_FrameArrived(DepthFrameArrivedEventArgs e) { DepthFrameReference frameReference = e.FrameReference; try { DepthFrame frame = frameReference.AcquireFrame(); if (frame != null) { // DepthFrame is IDisposable using (frame) { FrameDescription frameDescription = frame.FrameDescription; // verify data and write the new depth frame data to the display bitmap if (((frameDescription.Width * frameDescription.Height) == this.frameData.Length) && (frameDescription.Width == this.bitmap.PixelWidth) && (frameDescription.Height == this.bitmap.PixelHeight)) { // Copy the pixel data from the image to a temporary array frame.CopyFrameDataToArray(this.frameData); // Get the min and max reliable depth for the current frame ushort minDepth = frame.DepthMinReliableDistance; ushort maxDepth = frame.DepthMaxReliableDistance; // Convert the depth to RGB int colorPixelIndex = 0; for (int i = 0; i < this.frameData.Length; ++i) { // Get the depth for this pixel ushort depth = this.frameData[i]; // To convert to a byte, we're discarding the most-significant // rather than least-significant bits. // We're preserving detail, although the intensity will "wrap." // Values outside the reliable depth range are mapped to 0 (black). byte intensity = (byte)(depth >= minDepth && depth <= maxDepth ? depth : 0); // Write out blue byte this.pixels[colorPixelIndex++] = intensity; // Write out green byte this.pixels[colorPixelIndex++] = intensity; // Write out red byte this.pixels[colorPixelIndex++] = intensity; // We're outputting BGR, the last byte in the 32 bits is unused so skip it // If we were outputting BGRA, we would write alpha here. ++colorPixelIndex; } this.bitmap.WritePixels( new Int32Rect(0, 0, frameDescription.Width, frameDescription.Height), this.pixels, frameDescription.Width * this.cbytesPerPixel, 0); } } } } catch (Exception) { // ignore if the frame is no longer available } }