void DepthFrameReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { if (mIs15Fps) { if (mDiscardFrame) { mDiscardFrame = false; return; } else { mDiscardFrame = true; } } using (KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { if (((mFrameDescription.Width * mFrameDescription.Height) == (depthBuffer.Size / mFrameDescription.BytesPerPixel))) { ProcessDepthFrameData(depthBuffer); mFeedPixels(mDepthPixels); } } } } }
/// <summary> /// creates a depth picture from the depthframe data package and broadcasts it /// </summary> /// <param name="e">the depthframe data package</param> void CalculateDepthPicture(DepthFrameArrivedEventArgs e) { using (DepthFrame df = e.FrameReference.AcquireFrame()) { if (df != null) { using (Microsoft.Kinect.KinectBuffer depthBuffer = df.LockImageBuffer()) { WriteableBitmap depthBitmap = new WriteableBitmap(df.FrameDescription.Width, df.FrameDescription.Height, 96.0, 96.0, PixelFormats.Gray8, null); if (((df.FrameDescription.Width * df.FrameDescription.Height) == (depthBuffer.Size / df.FrameDescription.BytesPerPixel)) && (df.FrameDescription.Width == depthBitmap.PixelWidth) && (df.FrameDescription.Height == depthBitmap.PixelHeight)) { depthReturnStruc dd = ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, df); byte[] depthPixels = dd.pictureData; depthBitmap.WritePixels( new Int32Rect(0, 0, depthBitmap.PixelWidth, depthBitmap.PixelHeight), depthPixels, depthBitmap.PixelWidth, 0); depthBitmap.Freeze(); OnDepthPictureEvent.BeginInvoke(depthBitmap, null, null); OnDepthDataEvent.BeginInvoke(dd.depthData, null, null); } } } } }
private void Reader_DepthFrameArrived(object sender, DepthFrameArrivedEventArgs e) { bool depthFrameProcessed = false; using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { using (KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { if ((depthFrameDescription.Width * depthFrameDescription.Height) == (depthBuffer.Size / depthFrameDescription.BytesPerPixel)) { ushort maxDepth = ushort.MaxValue; ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth); depthFrameProcessed = true; } } } } if (depthFrameProcessed) { depthBitmap.WritePixels( new Int32Rect(0, 0, depthBitmap.PixelWidth, depthBitmap.PixelHeight), depthPixels, depthBitmap.PixelWidth, 0); } }
private static void Reader_FrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { if (e.FrameReference != null) { MultiSourceFrame multiFrame = e.FrameReference.AcquireFrame(); if (multiFrame.DepthFrameReference != null) { try { using (DepthFrame depthFrame = multiFrame.DepthFrameReference.AcquireFrame()) { if (depthFrame != null) { using (KinectBuffer buffer = depthFrame.LockImageBuffer()) { depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; depthFrameData = new ushort[depthWidth * depthHeight]; depthFrame.CopyFrameDataToArray(depthFrameData); } } } } catch (Exception) { return; } } } }
public static unsafe void Update(this DepthFrame depth, uint *color, PointCloudOfXYZRGBA cloud) { var fd = depth.FrameDescription; var sensor = depth.DepthFrameSource.KinectSensor; var pixels = fd.LengthInPixels; MatchSize(depth, cloud); var pPtr = cloud.Data; var pSize = (uint)Marshal.SizeOf <PointXYZRGBA>() * pixels; using (var dBuffer = depth.LockImageBuffer()) sensor.CoordinateMapper.MapDepthFrameToCameraSpaceUsingIntPtr( dBuffer.UnderlyingBuffer, dBuffer.Size, (IntPtr)pPtr, pSize); //we have the data copied raw, but it's misaligned, as kinect is 12 bytes/pixel. we need to 'expand' the data. //copying from the back will prevent data loss. var vptr = (Vector3 *)pPtr; for (var i = pixels - 1; i >= 0; i--) { pPtr[i].V = vptr[i]; pPtr[i].data[3] = 1; //also copy the color in the process. pPtr[i].RGBA = color[i]; } }
//private FrameDescription depthFrameDescription = null; internal DFrame(DepthFrame depthFrame) { this.frameDescriptor = depthFrame.FrameDescription; this.width = depthFrame.FrameDescription.Width; this.height = depthFrame.FrameDescription.Height; this.pixels = new ushort[width * height]; // this.depthFrameDescription = this.kinectSensor.DepthFrameSource.FrameDescription; this.WriteableBitmap = new WriteableBitmap( this.width, this.height, 96.0, 96.0, PixelFormats.Bgr32, null); this.WriteableBitmapForDoorNavigation = new WriteableBitmap(this.width, this.height, 96.0, 96.0, PixelFormats.Gray8, null); using (KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { // ushort maxDepth = ushort.MaxValue; this.ProcessDepthFrameData(depthFrame, depthBuffer.UnderlyingBuffer, depthBuffer.Size); } this.ProcessBitmap(); // creates bitmap, and bitmap source }
unsafe void runtime_DepthFrameReady(object sender, Microsoft.Kinect.DepthFrameArrivedEventArgs e) { using (DepthFrame frame = e.FrameReference.AcquireFrame()) { if (frame != null && !processing && this.FInEnabled[0]) { this.dp = new ushort[512 * 424 * 2]; using (var buffer = frame.LockImageBuffer()) { fixed(ushort *ptr = &this.dp[0]) { memcpy(new IntPtr(ptr), buffer.UnderlyingBuffer, (int)buffer.Size); } /*Marshal.Copy(buffer.UnderlyingBuffer, 0, this.dp, 512 * 424 * 2);*/ } // Copy the pixel data from the image to a temporary array //frame.CopyFrameDataToArray(this.dp); this.processing = true; Thread thr = new Thread(new ThreadStart(this.Run)); thr.Priority = ThreadPriority.BelowNormal; thr.Start(); } } }
/// <summary> /// Processes the Depth Image. /// </summary> /// <param name="frame">The frame.</param> private void ProcessDepthImage(DepthFrame frame) { if (updateDepthTexture) { return; } if (frame != null) { FrameDescription frameDescription = frame.FrameDescription; // the fastest way to process the body index data is to directly access // the underlying buffer using (KinectBuffer buffer = frame.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((frameDescription.Width * frameDescription.Height) == (buffer.Size / frameDescription.BytesPerPixel)) && (frameDescription.Width == this.depthTexture.Width) && (frameDescription.Height == this.depthTexture.Height)) { this.ProcessGrayFrameData(buffer.UnderlyingBuffer, buffer.Size, this.depthMinReliableDistance, this.depthMaxReliableDistance, frameDescription.BytesPerPixel, this.depthData); this.updateDepthTexture = true; } } } }
/// <summary> /// Handles the depth frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { bool depthFrameProcessed = false; using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { // the fastest way to process the body index data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)) && (this.depthFrameDescription.Width == this.depthBitmap.PixelWidth) && (this.depthFrameDescription.Height == this.depthBitmap.PixelHeight)) { // Note: In order to see the full range of depth (including the less reliable far field depth) // we are setting maxDepth to the extreme potential depth threshold ushort maxDepth = ushort.MaxValue; // If you wish to filter by reliable depth distance, uncomment the following line: //// maxDepth = depthFrame.DepthMaxReliableDistance this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth); depthFrameProcessed = true; } } } } if (depthFrameProcessed) { this.RenderDepthPixels(); } }
public void ParseToBitmaps(DepthFrame depthFrame, out Bitmap bitmap, out WriteableBitmap writeableBitmap) { int width = depthFrame.FrameDescription.Width; int height = depthFrame.FrameDescription.Height; Bitmap depthBitmap = new Bitmap(width, height, System.Drawing.Imaging.PixelFormat.Format8bppIndexed); WriteableBitmap depthWBitmap = new WriteableBitmap(width, height, 96.0, 96.0, PixelFormats.Gray8, null); using (KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { ushort maxDepth = ushort.MaxValue; //maxDepth = depthFrame.DepthMaxReliableDistance; //TODO: make this a setting? byte[] depthPixels = ConvertDepthFrameData(depthFrame.FrameDescription, depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth); //Create writeable bitmap depthWBitmap.WritePixels(new Int32Rect(0, 0, depthWBitmap.PixelWidth, depthWBitmap.PixelHeight), depthPixels, depthWBitmap.PixelWidth, 0); writeableBitmap = depthWBitmap; //Create bitmap BitmapData bitmapData = depthBitmap.LockBits(new Rectangle(0, 0, width, height), ImageLockMode.WriteOnly, depthBitmap.PixelFormat); IntPtr intPointer = bitmapData.Scan0; Marshal.Copy(depthPixels, 0, intPointer, depthPixels.Length); depthBitmap.UnlockBits(bitmapData); bitmap = depthBitmap; } }
public void PollMostRecentDepthFrame() { MultiSourceFrame multiFrame = _reader.AcquireLatestFrame(); if (multiFrame == null) { return; } using (DepthFrame frame = multiFrame.DepthFrameReference.AcquireFrame()) { if (frame == null) { return; // Could not find multi-frame or depth-frame } using (KinectBuffer buffer = frame.LockImageBuffer()) { if (DepthFrameDescription.Width * DepthFrameDescription.Height == buffer.Size / DepthFrameDescription.BytesPerPixel) { ProcessDepthFrameData( buffer.UnderlyingBuffer, buffer.Size, frame.DepthMinReliableDistance, ushort.MaxValue); } } } }
private void UpdateDepthData(DepthFrame depthFrame) { var depthFrameDescription = depthFrame.FrameDescription; if (this.depthWidth == depthFrameDescription.Width && this.depthHeight == depthFrameDescription.Height) { var stopwatch = Stopwatch.StartNew(); depthFrame.CopyFrameDataToArray(this.depthData); using (var depthBuffer = depthFrame.LockImageBuffer()) { this.depthBitmap.Lock(); this.depthBitmap.WritePixels( new Int32Rect(0, 0, this.depthBitmap.PixelWidth, this.depthBitmap.PixelHeight), Array.ConvertAll(this.depthData, d => MapDepthToByte(d, depthFrame.DepthMinReliableDistance, depthFrame.DepthMaxReliableDistance)), this.depthBitmap.PixelWidth, 0); this.depthBitmap.Unlock(); } logger.Trace("DepthFrame updated. Spent: {0}ms", stopwatch.ElapsedMilliseconds); } else { logger.Error("Size of DepthFrame does not match. Expected: {0}x{1}, Actual: {2}x{3}", this.depthWidth, this.depthHeight, depthFrameDescription.Width, depthFrameDescription.Height); } }
private bool aquireBodyDataAndBuffers( BodyFrame bodyFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame, out Body body, out KinectBuffer depthBuffer, out KinectBuffer bodyIndexBuffer) { depthBuffer = null; bodyIndexBuffer = null; body = getActiveBody(bodyFrame); if (body == null) { return(false); } depthBuffer = depthFrame.LockImageBuffer(); var width = depthFrameDescription.Width; var height = depthFrameDescription.Height; if (depthBuffer == null || (width * height) != (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)) { return(false); } bodyIndexBuffer = bodyIndexFrame.LockImageBuffer(); if (bodyIndexBuffer == null || bodyIndexBuffer.Size * 2 != depthBuffer.Size) { depthBuffer.Dispose(); return(false); } return(true); }
private void Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { bool depthFrameProcessed = false; using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { if (((this.depthframdescrioption.Width * this.depthframdescrioption.Height) == (depthBuffer.Size / this.depthframdescrioption.BytesPerPixel)) && (this.depthframdescrioption.Width == this.depthmap.PixelWidth) && (this.depthframdescrioption.Height == this.depthmap.PixelHeight)) { ushort maxDepth = ushort.MaxValue; this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth); depthFrameProcessed = true; } } } } if (depthFrameProcessed) { this.RenderDepthPixels(); } }
private void Depth_Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { bool depthFrameProcessed = false; //chart1.Series[0].Points.Clear(); using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)) && (this.depthFrameDescription.Width == this.depthBitmap.PixelWidth) && (this.depthFrameDescription.Height == this.depthBitmap.PixelHeight)) { // Note: In order to see the full range of depth (including the less reliable far field depth) // we are setting maxDepth to the extreme potential depth threshold ushort maxDepth = ushort.MaxValue; // If you wish to filter by reliable depth distance, uncomment the following line: maxDepth = depthFrame.DepthMaxReliableDistance; this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth); depthFrameProcessed = true; if (ctr < 1) { writer.WriteLine("\nmax_depth: " + depthFrame.DepthMaxReliableDistance); writer.WriteLine("min_depth: " + depthFrame.DepthMinReliableDistance); writer.WriteLine("Cur_max" + cur_mux); } } } } } if (depthFrameProcessed) { // if (ctr < 1) // { // writer.WriteLine("Depth_length:"+this.depthPixels.Length); // int sum=0; // for (int i = 0; i < this.depthPixels.Length; i++) // { // //Console.Write("Depth["+i+"] = " + depthPixels[i] +"\n"); // sum += hist[i]; // writer.Write(hist[i] + "\t"); // } // writer.Write("\n\nsum= \t"+sum); // } //for(int i = 0; i < this.depthPixels.Length; i++) // chart1.Series[0].Points.Add(hist[i]); //chart1.SaveImage(hist_display.Source); ctr++; this.RenderDepthPixels(); } }
private static void Reader_FrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { if (e.FrameReference != null) { MultiSourceFrame multiFrame = e.FrameReference.AcquireFrame(); if (multiFrame.DepthFrameReference != null) { try { using (DepthFrame depthFrame = multiFrame.DepthFrameReference.AcquireFrame()) { if (depthFrame != null) { using (KinectBuffer buffer = depthFrame.LockImageBuffer()) { depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; depthFrameData = new ushort[depthWidth * depthHeight]; depthFrame.CopyFrameDataToArray(depthFrameData); } } } } catch (Exception) { return; } } if (multiFrame.ColorFrameReference != null) { try { using (ColorFrame colorFrame = multiFrame.ColorFrameReference.AcquireFrame()) { if (colorFrame != null) { colorFrameDescription = colorFrame.FrameDescription; colorWidth = colorFrameDescription.Width; colorHeight = colorFrameDescription.Height; colorFrameData = new byte[colorWidth * colorHeight * bytesForPixelColor]; // 4 == bytes per color using (KinectBuffer buffer = colorFrame.LockRawImageBuffer()) { if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra) { colorFrame.CopyRawFrameDataToArray(colorFrameData); } else { colorFrame.CopyConvertedFrameDataToArray(colorFrameData, ColorImageFormat.Bgra); } } } } } catch (Exception) { return; } } } }
private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var mSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (mSourceFrame == null) { return; } DepthFrame depthFrame = null; ColorFrame colorFrame = mSourceFrame.ColorFrameReference.AcquireFrame(); using (colorFrame) { if (colorFrame != null) { cameraIMG.Source = KinectExtensions.ToBitmap(colorFrame); colorFrame.CopyConvertedFrameDataToArray(lastNotNullColorData, ColorImageFormat.Bgra); (Parent as MainWindow).PlaygroundWindow.ShowImage(cameraIMG.Source); } try { depthFrame = mSourceFrame.DepthFrameReference.AcquireFrame(); if (depthFrame != null) { // Access the depth frame data directly via LockImageBuffer to avoid making a copy using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { kinectSensor.CoordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, colorMappedToDepthSpace); depthFrame.CopyFrameDataToArray(lastNotNullDepthData); } // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; } } finally { if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } } } }
// Get xyz physical coordinates for bounce location, add to Bounce list - not very good right now private DataPoint BounceLocation(DepthFrame depthFrame, int xavg, int yavg) { DataPoint bounceLocn = new TableTennisTracker.DataPoint(0, 0, 0, 0); if (depthFrame == null) { return(bounceLocn); } using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { CameraSpacePoint[] camSpacePoints = new CameraSpacePoint[1920 * 1080]; this.coordinateMapper.MapColorFrameToCameraSpaceUsingIntPtr(depthFrameData.UnderlyingBuffer, depthFrameData.Size, camSpacePoints); List <float> xvals = new List <float>(); List <float> yvals = new List <float>(); List <float> zvals = new List <float>(); int Vcount = 0; // Find ball in camera space for (int i = -40; i < 40; i++) { for (int j = -40; j < 40; j++) { if (yavg + i > tableLevel) { int tempIndex = (yavg + i) * 1920 + xavg + j; int arrVal = 4 * tempIndex; if (arrVal > 4 * 1920 * 1080 || arrVal < 0) { arrVal = 4; } if (camSpacePoints[tempIndex].Z > 1 && camSpacePoints[tempIndex].Z < 3.5) { xvals.Add(camSpacePoints[tempIndex].X); yvals.Add(camSpacePoints[tempIndex].Y); zvals.Add(camSpacePoints[tempIndex].Z); Vcount++; } } } } if (Vcount > 0) { bounceLocn.X = FindMedian(xvals); bounceLocn.Y = FindMedian(yvals); bounceLocn.Z = FindMedian(zvals); } else { bounceLocn.X = 0; bounceLocn.Y = 0; bounceLocn.Z = 0; } return(bounceLocn); } }
void DepthSection(DepthFrame frame) { using (KinectBuffer depthFrameData = frame.LockImageBuffer()) { this.coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.colorMappedToDepthPoints); //PictureBox2.Source = getImageSourceFromBitmap(DepthFrameToBitmap(frame)); } }
private static void Reader_FrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { if (e.FrameReference != null) { MultiSourceFrame multiFrame = e.FrameReference.AcquireFrame(); if (multiFrame.ColorFrameReference != null && multiFrame.DepthFrameReference != null) { try { using (DepthFrame depthFrame = multiFrame.DepthFrameReference.AcquireFrame()) { using (ColorFrame colorFrame = multiFrame.ColorFrameReference.AcquireFrame()) { if (depthFrame != null && colorFrame != null) { colorFrameDescription = colorFrame.FrameDescription; colorWidth = colorFrameDescription.Width; colorHeight = colorFrameDescription.Height; colorFrameData = new byte[colorWidth * colorHeight * bytesPerPixel]; if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra) { colorFrame.CopyRawFrameDataToArray(colorFrameData); } else { colorFrame.CopyConvertedFrameDataToArray(colorFrameData, ColorImageFormat.Bgra); } using (KinectBuffer buffer = depthFrame.LockImageBuffer()) { depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrame.FrameDescription.Width; depthHeight = depthFrame.FrameDescription.Height; depthFrameData = new ushort[depthWidth * depthHeight]; cameraSpacePoints = new CameraSpacePoint[depthWidth * depthHeight]; colorSpacePoints = new ColorSpacePoint[depthWidth * depthHeight]; depthFrame.CopyFrameDataToArray(depthFrameData); //coordinateMapper.MapDepthFrameToColorSpace(depthFrameData, colorSpacePoints); //coordinateMapper.MapDepthFrameToCameraSpace(depthFrameData, cameraSpacePoints); coordinateMapper.MapDepthFrameToColorSpaceUsingIntPtr(buffer.UnderlyingBuffer, buffer.Size, colorSpacePoints); coordinateMapper.MapDepthFrameToCameraSpaceUsingIntPtr(buffer.UnderlyingBuffer, buffer.Size, cameraSpacePoints); } } } } } catch (Exception) { return; } } } }
// Get xyz physical coordinates private BallCoords XYZLocation(DepthFrame depthFrame, int xavg, int yavg) { BallCoords ballLocn = new BallCoords(); if (depthFrame == null) { return(ballLocn); } using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { CameraSpacePoint[] camSpacePoints = new CameraSpacePoint[1920 * 1080]; this.coordinateMapper.MapColorFrameToCameraSpaceUsingIntPtr(depthFrameData.UnderlyingBuffer, depthFrameData.Size, camSpacePoints); List <float> xvals = new List <float>(); List <float> yvals = new List <float>(); List <float> zvals = new List <float>(); int Vcount = 0; // Find ball in camera space for (int i = -40; i < 40; i++) { for (int j = -40; j < 40; j++) { if (yavg + i > tableLevel) { int tempIndex = (yavg + i) * 1920 + xavg + j; if (camSpacePoints[tempIndex].Z > GlobalClass.minZ && camSpacePoints[tempIndex].Z < 3.6) { xvals.Add(camSpacePoints[tempIndex].X); yvals.Add(camSpacePoints[tempIndex].Y); zvals.Add(camSpacePoints[tempIndex].Z); Vcount++; } } } } if (Vcount > 0) { ballLocn.X = FindMedian(xvals); ballLocn.Y = FindMedian(yvals); ballLocn.Z = FindMedian(zvals); ballLocn.Time = DateTime.Now; } else { ballLocn.X = 0; ballLocn.Y = 0; ballLocn.Z = 0; } return(ballLocn); } }
/// <summary> /// Store depth image /// </summary> /// <param name="depthFrame">depth frame to be stored</param> /// <param name="frameNumber">frame number</param> public static void Handle_DepthFrame(DepthFrame depthFrame, String frameNumber) { using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { BitmapSource bitmapSource = BitmapSource.Create(depthWidth, depthHeight, 96.0, 96.0, PixelFormats.Gray16, null, depthBuffer.UnderlyingBuffer, (int)depthBuffer.Size, depthWidth << 1); String depthPath = FramesAndPaths.GetImageFilePath(FramesAndPaths.FileType.DepthImage, frameNumber); bitmapSource.Save(depthPath + ".png", ImageFormat.Png); } // Release depthFrame depthFrame.Dispose(); }
private void Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { bool depthFrameProcessed = false; using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { if (recording && DateTime.Now.Ticks > (lastFrameRecorded + (1e7 / framesPerSecond))) { ushort[] depthPoints = new ushort[this.depthFrameDescription.Width * this.depthFrameDescription.Height]; depthFrame.CopyFrameDataToArray(depthPoints); CameraSpacePoint[] cameraPtsArray = new CameraSpacePoint[this.depthFrameDescription.Width * this.depthFrameDescription.Height]; kinectSensor.CoordinateMapper.MapDepthFrameToCameraSpace(depthPoints, cameraPtsArray); this.ProcessDepthFrameDataToFile(cameraPtsArray, (ushort)(minDepth), (ushort)(maxDepth), "S" + participantNumber.ToString("00") + "_" + globalFrameNumber.ToString("00000") + "_" + fileName + "Frame" + imageCount.ToString("0000") + ".pcd"); lastFrameRecorded = DateTime.Now.Ticks; imageCount++; globalFrameNumber++; } // the fastest way to process the body index data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)) && (this.depthFrameDescription.Width == this.depthBitmap.PixelWidth) && (this.depthFrameDescription.Height == this.depthBitmap.PixelHeight)) { // Note: In order to see the full range of depth (including the less reliable far field depth) // we are setting maxDepth to the extreme potential depth threshold //ushort maxDepth = ushort.MaxValue; // If you wish to filter by reliable depth distance, uncomment the following line: //maxDepth = depthFrame.DepthMaxReliableDistance; this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, (ushort)(minDepth * 1000), (ushort)(maxDepth * 1000)); depthFrameProcessed = true; //this.ProcessDepthFrameDataToFile(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, (ushort)(maxDepth * 1000)); } } } } if (depthFrameProcessed) { this.RenderDepthPixels(); } }
private void DepthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { if (recording) { //record each gesture frame SaveBitmap(gesturepath + "\\" + counter.ToString() + ".png"); ++counter; //reset timer if 3 seconds reached if (timer.Elapsed.TotalSeconds > 3) { timer.Reset(); recording = false; counter = 0; //kill Myo and Leap processes var myo = Process.GetProcesses().Where(pr => (pr.ProcessName == "MyoDataCapture") || (pr.ProcessName == "python")); foreach (var process in myo) { process.Kill(); } } } //update gesture information else { if (gestureindex < gestures.Count) { textBox1.Text = "Next gesture: " + gestures[gestureindex]; gestureImage.Source = new BitmapImage(new Uri(Directory.GetCurrentDirectory() + "\\images\\" + gestures[gestureindex] + ".gif")); } else { textBox1.Text = "Completed round!"; gestureImage.Source = new BitmapImage(new Uri(Directory.GetCurrentDirectory() + "\\images\\blank.png")); } } canvas.Children.Clear(); using (DepthFrame frame = e.FrameReference.AcquireFrame()) { if (frame != null) { // 2) Update the HandsController using the array (or pointer) of the depth depth data, and the tracked body. using (KinectBuffer buffer = frame.LockImageBuffer()) { _handsController.Update(buffer.UnderlyingBuffer, _body); } } } }
private void Reader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { depthFrame.CopyFrameDataToArray(uDepthPixels); } } } DoMapping(); }
// every frame private void DepthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { canvas.Children.Clear(); using (DepthFrame frame = e.FrameReference.AcquireFrame()) { if (frame != null) { using (KinectBuffer buffer = frame.LockImageBuffer()) { _handsController.Update(buffer.UnderlyingBuffer, _body); } } } }
private void DepthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e) { canvas.Children.Clear(); using (DepthFrame frame = e.FrameReference.AcquireFrame()) { if (frame != null) { // 2) Update the HandsController using the array (or pointer) of the depth depth data, and the tracked body. using (KinectBuffer buffer = frame.LockImageBuffer()) { _handsController.Update(buffer.UnderlyingBuffer, _body); } } } }
private void DepthFrameReader_FrameArrived(DepthFrame depthFrame) { if (depthFrame != null) { FrameDescription depthFrameDescription = depthFrame.FrameDescription; using (KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { using (var dest = ImagePool.GetOrCreate(depthFrameDescription.Width, depthFrameDescription.Height, Imaging.PixelFormat.Gray_16bpp)) { depthFrame.CopyFrameDataToIntPtr(dest.Resource.ImageData, (uint)(depthFrameDescription.Width * depthFrameDescription.Height * 2)); var time = this.pipeline.GetCurrentTimeFromElapsedTicks(depthFrame.RelativeTime.Ticks); this.DepthImage.Post(dest, time); } } } }
//subscribed event set during kinect initialization (called each time a depth frame is available) private void Reader_DepthFrameArrived(object sender, DepthFrameArrivedEventArgs e) { using (DepthFrame depthFrame = e.FrameReference.AcquireFrame()) { if (depthFrame != null) { using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { // verify data and begin processing the data if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel))) { this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, depthFrame.DepthMaxReliableDistance); } } } } }
public void DepthFrameArrival(DepthFrame df, ref bool frameProcessed, double fps, WriteableBitmap depthBitmap) { // the fastest way to process the body index data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer depthBuffer = df.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((df.FrameDescription.Width * df.FrameDescription.Height) == (depthBuffer.Size / getBPP())) && (df.FrameDescription.Width == depthBitmap.PixelWidth) && (df.FrameDescription.Height == depthBitmap.PixelHeight)) { // Note: In order to see the full range of depth (including the less reliable far field depth) // we are setting maxDepth to the extreme potential depth threshold //ushort maxDepth = ushort.MaxValue; // If you wish to filter by reliable depth distance, uncomment the following line: ushort maxDepth = df.DepthMaxReliableDistance; ushort minDepth = df.DepthMinReliableDistance; ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, minDepth, maxDepth); frameProcessed = true; // depthFrame.CopyFrameDataToArray(this.depthPixelBuffer); done in processing function if (depthRecording) { garbageCount++; this.depthBinaryBuffer.Enqueue((byte[])(depthPixelBuffer.Clone())); this.frameCount++; if (fps < 16.0) { garbageCount++; Console.WriteLine("fps drop yaşandı"); this.depthBinaryBuffer.Enqueue((byte[])(depthPixelBuffer.Clone())); this.frameCount++; } /*if(garbageCount > 500) * { * System.GC.Collect(); * garbageCount = 0; * }*/ } } } }
void GreenScreenMappingDepthToColorSplats(ref DepthFrame depthFrame, ref ColorFrame colorFrame, ref BodyIndexFrame bodyIndexFrame, int depthWidth, int depthHeight, int colorWidth, int colorHeight) { m_stopwatch.Restart(); using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { // Need to know the color space point for each depth space point, but this is much less data // and much faster to compute than mapping the other way m_coordinateMapper.MapDepthFrameToColorSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, m_depthToColorSpacePoints); } m_depthMapTimer.Update(m_stopwatch.ElapsedMilliseconds); m_stopwatch.Restart(); // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; lock (m_displayPixels) { // [KinectThread] avoid racing display buffer refresh with render (can cause missing images) // have to clear the display pixels so we can copy only the BGRA image of the player(s) Array.Clear(m_displayPixels, 0, m_displayPixels.Length); unsafe { fixed (byte* colorFrameDataPtr = &m_colorFrameData[0]) { colorFrame.CopyConvertedFrameDataToIntPtr(new IntPtr(colorFrameDataPtr), (uint)m_colorFrameData.Length, ColorImageFormat.Bgra); } } // done with the colorFrame colorFrame.Dispose(); colorFrame = null; m_colorCopyTimer.Update(m_stopwatch.ElapsedMilliseconds); m_stopwatch.Restart(); // We'll access the body index data directly to avoid a copy using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { byte* bodyIndexDataPointer = (byte*)bodyIndexData.UnderlyingBuffer; uint bodyIndexDataLength = bodyIndexData.Size; int colorMappedToDepthPointCount = m_colorToDepthSpacePoints.Length; fixed (ColorSpacePoint* depthMappedToColorPointsPointer = m_depthToColorSpacePoints) { fixed (byte* bitmapPixelsBytePointer = &m_displayPixels[0]) { fixed (byte* sourcePixelsBytePointer = &m_colorFrameData[0]) { uint* bitmapPixelsPointer = (uint*)bitmapPixelsBytePointer; uint* sourcePixelsPointer = (uint*)sourcePixelsBytePointer; // We don't go all the way to the edge of the depth buffer, to eliminate a chance // that a splat will go outside the edge of the color buffer when mapped to color // space. In the x direction this will never happen anyway since the depth FOV // is so much narrower than the color FOV. const int Margin = 2; for (int y = Margin; y < depthHeight - Margin; y++) { for (int x = 0; x < depthWidth; x++) { // Scan forwards until we find a non-0xff value in the body index data. int depthIndex = y * depthWidth + x; if (bodyIndexDataPointer[depthIndex] != 0xff) { int depthIndex2 = depthIndex; // We found the beginning of a horizontal run of player pixels. // Scan to the end. int runWidth; for (runWidth = 1; runWidth + x < depthWidth; runWidth++) { depthIndex2++; if (bodyIndexDataPointer[depthIndex2] == 0xff) { break; } } // Now splat from (x, y) to (x + runWidth, y) float depthMappedToColorLeftX = depthMappedToColorPointsPointer[depthIndex].X; float depthMappedToColorLeftY = depthMappedToColorPointsPointer[depthIndex].Y; float depthMappedToColorRightX = depthMappedToColorPointsPointer[depthIndex2 - 1].X; float depthMappedToColorRightY = depthMappedToColorPointsPointer[depthIndex2 - 1].Y; // Now copy color pixels along that rectangle. const int splatHMargin = 2; // X margin of splat rectangle in color pixels const int splatVMargin = 3; // Y margin of splat rectangle in color pixels int minX = (int)Math.Min(depthMappedToColorLeftX, depthMappedToColorRightX) - splatHMargin; int minY = (int)Math.Min(depthMappedToColorLeftY, depthMappedToColorRightY) - splatVMargin; int maxX = (int)Math.Max(depthMappedToColorLeftX, depthMappedToColorRightX) + splatHMargin; int maxY = (int)Math.Max(depthMappedToColorLeftY, depthMappedToColorRightY) + splatVMargin; // Some edge of screen situations can result in color space points that are negative or otherwise // actually outside the color space coordinate range. Clamp(ref minX, colorWidth - 1); Clamp(ref minY, colorHeight - 1); Clamp(ref maxX, colorWidth - 1); Clamp(ref maxY, colorHeight - 1); for (int colorY = minY; colorY < maxY; colorY++) { int colorIndex = colorY * colorWidth + minX; for (int colorX = minX; colorX < maxX; colorX++) { bitmapPixelsPointer[colorIndex] = sourcePixelsPointer[colorIndex]; colorIndex++; } } x += runWidth; } } } } } } } } // Done with bodyIndexFrame bodyIndexFrame.Dispose(); bodyIndexFrame = null; } m_colorScanTimer.Update(m_stopwatch.ElapsedMilliseconds); m_stopwatch.Restart(); m_displayTexture.SetData(m_displayPixels); m_textureSetDataTimer.Update(m_stopwatch.ElapsedMilliseconds); m_stopwatch.Restart(); Spam.TopLine1 = string.Format("depth map: {0} msec; color copy: {1} msec; color scan: {2} msec; texture set: {3} msec", m_depthMapTimer.Average, m_colorCopyTimer.Average, m_colorScanTimer.Average, m_textureSetDataTimer.Average); }
private void processDepthFrame(DepthFrame depthFrame, int depthWidth, int depthHeight) { FrameDescription depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; // Access the depth frame data directly via LockImageBuffer to avoid making a copy using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { this.coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.colorMappedToDepthPoints); } // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; }