/// Handles the body index frame data arriving from the sensor /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_FrameArrived(object sender, BodyIndexFrameArrivedEventArgs e) { bool bodyIndexFrameProcessed = false; using (BodyIndexFrame bodyIndexFrame = e.FrameReference.AcquireFrame()) { if (bodyIndexFrame != null) { // the fastest way to process the body index data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer bodyIndexBuffer = bodyIndexFrame.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((this._bodyIndexFrameDescription.Width * this._bodyIndexFrameDescription.Height) == bodyIndexBuffer.Size) && (this._bodyIndexFrameDescription.Width == this._bodyIndexBitmap.PixelWidth) && (this._bodyIndexFrameDescription.Height == this._bodyIndexBitmap.PixelHeight)) { this.ProcessBodyIndexFrameData(bodyIndexBuffer.UnderlyingBuffer, bodyIndexBuffer.Size); bodyIndexFrameProcessed = true; } } } } if (bodyIndexFrameProcessed) { this.RenderBodyIndexPixels(); } }
public void PollMostRecentSilhouetteFrame() { MultiSourceFrame multiFrame = _reader.AcquireLatestFrame(); if (multiFrame == null) { return; } using (BodyIndexFrame frame = multiFrame.BodyIndexFrameReference.AcquireFrame()) { if (frame == null) { return; // Could not find multi-frame or body index frame } using (KinectBuffer buffer = frame.LockImageBuffer()) { if ( SilhouetteFrameDescription.Width * SilhouetteFrameDescription.Height == buffer.Size) { ProcessSilhouetteData(buffer.UnderlyingBuffer, buffer.Size); } } } }
private bool aquireBodyDataAndBuffers( BodyFrame bodyFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame, out Body body, out KinectBuffer depthBuffer, out KinectBuffer bodyIndexBuffer) { depthBuffer = null; bodyIndexBuffer = null; body = getActiveBody(bodyFrame); if (body == null) { return(false); } depthBuffer = depthFrame.LockImageBuffer(); var width = depthFrameDescription.Width; var height = depthFrameDescription.Height; if (depthBuffer == null || (width * height) != (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)) { return(false); } bodyIndexBuffer = bodyIndexFrame.LockImageBuffer(); if (bodyIndexBuffer == null || bodyIndexBuffer.Size * 2 != depthBuffer.Size) { depthBuffer.Dispose(); return(false); } return(true); }
/// <summary> /// Store body index image /// </summary> /// <param name="bodyIndexFrame">body index frame to be stored</param> /// <param name="frameNumber">frame number</param> public static void Handle_BodyIndexFrame(BodyIndexFrame bodyIndexFrame, String frameNumber) { using (Microsoft.Kinect.KinectBuffer bodyIndexBuffer = bodyIndexFrame.LockImageBuffer()) { BitmapSource bitmapSource = BitmapSource.Create(bodyIndexWidth, bodyIndexHeight, 96.0, 96.0, PixelFormats.Gray8, null, bodyIndexBuffer.UnderlyingBuffer, (int)bodyIndexBuffer.Size, bodyIndexWidth * 1); String bodyIndexPath = FramesAndPaths.GetImageFilePath(FramesAndPaths.FileType.BodyIndexImage, frameNumber); bitmapSource.Save(bodyIndexPath + ".jpg", ImageFormat.Jpeg); } // Release bodyIndexFrame bodyIndexFrame.Dispose(); }
public void BodyIndexFrameArrival(BodyIndexFrame bif, ref bool frameProcessed, double fps, WriteableBitmap bodyIndexBitmap) { // the fastest way to process the body index data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer bodyIndexBuffer = bif.LockImageBuffer()) { int width = bif.FrameDescription.Width; int height = bif.FrameDescription.Height; // verify data and write the color data to the display bitmap if (((width * height) == bodyIndexBuffer.Size) && (width == bodyIndexBitmap.PixelWidth) && (height == bodyIndexBitmap.PixelHeight)) { ProcessBodyIndexFrameData(bodyIndexBuffer.UnderlyingBuffer, bodyIndexBuffer.Size); frameProcessed = true; } if (bodyRecording) { Bitmap bitmapFrame; try { bitmapFrame = new Bitmap(width, height, System.Drawing.Imaging.PixelFormat.Format8bppIndexed); } catch (Exception e) { Console.WriteLine("Body Exception"); Console.WriteLine(e); System.GC.Collect(); bitmapFrame = new Bitmap(width, height, System.Drawing.Imaging.PixelFormat.Format8bppIndexed); } UtilityClass.ByteArrayToBitmap(ref bitmapFrame, bodyPixelBuffer, width, height); bBitmap = bitmapFrame; bodyBitmapBuffer.Enqueue(bBitmap); //System.GC.Collect(); frameCount++; if (fps < 16.0) { Console.WriteLine("fps drop yaşandı"); bodyBitmapBuffer.Enqueue(bBitmap); frameCount++; } } } }
private void updateDisplayedBitmap(BodyIndexFrame bif) { using (Microsoft.Kinect.KinectBuffer bodyIndexBuffer = bif.LockImageBuffer()) { //Verify if the frame is of right size - not sure why but recommended in tutorials if (((sensor.getBodyIndexFrameDescription().Width *sensor.getBodyIndexFrameDescription().Height) == bodyIndexBuffer.Size) && (sensor.getBodyIndexFrameDescription().Width == this.displayedBitmap.PixelWidth) && (sensor.getBodyIndexFrameDescription().Height == this.displayedBitmap.PixelHeight)) { uint[] pixalData = processBIF(bodyIndexBuffer.UnderlyingBuffer, bodyIndexBuffer.Size); displayedBitmap.WritePixels( new Int32Rect(0, 0, displayedBitmap.PixelWidth, displayedBitmap.PixelHeight), pixalData, this.displayedBitmap.PixelWidth * BytesPerPixel, 0); bitmap_feed.Source = displayedBitmap; } } }
/// <summary> /// 人体索引帧临帧事件 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> public void Reader_BodyIndexFrameArrived(Object sender, BodyIndexFrameArrivedEventArgs e) { using (BodyIndexFrame bodyIndexFrame = e.FrameReference.AcquireFrame()) { if (bodyIndexFrame != null) { // the fastest way to process the body index data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer bodyIndexBuffer = bodyIndexFrame.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((this.bodyIndexFrameDescription.Width * this.bodyIndexFrameDescription.Height) == bodyIndexBuffer.Size)) { //this.BodyIndexToDepth(bodyIndexBuffer.UnderlyingBuffer, bodyIndexBuffer.Size); this.ProcessBodyIndexFrameData(bodyIndexBuffer.UnderlyingBuffer, bodyIndexBuffer.Size); } } } } }
/// <summary> /// Handles the body index frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_FrameArrived(object sender, BodyIndexFrameArrivedEventArgs e) { bool bodyIndexFrameProcessed = false; using (BodyIndexFrame bodyIndexFrame = e.FrameReference.AcquireFrame()) { if (bodyIndexFrame != null) { // the fastest way to process the body index data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer bodyIndexBuffer = bodyIndexFrame.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((this.bodyIndexFrameDescription.Width * this.bodyIndexFrameDescription.Height) == bodyIndexBuffer.Size) && (this.bodyIndexFrameDescription.Width == this.bodyIndexBitmap.PixelWidth) && (this.bodyIndexFrameDescription.Height == this.bodyIndexBitmap.PixelHeight)) { // checks each pixel to see if it is part of the player or not this.ProcessBodyIndexFrameData(bodyIndexBuffer.UnderlyingBuffer, bodyIndexBuffer.Size); // check if the frame is the same as before or different checkFrame(bodyIndexBuffer.UnderlyingBuffer, bodyIndexBuffer.Size); if (found && !fired) { fired = true; frameNum++; Console.WriteLine("Shot " + frameNum); // fire gun Process proc = new System.Diagnostics.Process(); proc.StartInfo.WindowStyle = System.Diagnostics.ProcessWindowStyle.Hidden; proc.StartInfo.FileName = "C:/Python27/python.exe"; proc.StartInfo.Arguments = "activateSentry.py"; proc.StartInfo.WorkingDirectory = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments); proc.StartInfo.UseShellExecute = false; proc.StartInfo.RedirectStandardOutput = true; proc.Start(); while (!proc.StandardOutput.EndOfStream) { Console.WriteLine(proc.StandardOutput.ReadLine()); } Console.WriteLine("Fired" + DateTime.Now); // take photo of victum takePhoto(); Console.WriteLine("Photo Taken " + DateTime.Now); // tweet current photo Process proc2 = new System.Diagnostics.Process(); proc2.StartInfo.WindowStyle = System.Diagnostics.ProcessWindowStyle.Hidden; proc2.StartInfo.FileName = "C:/Python27/python.exe"; proc2.StartInfo.Arguments = "tweetphoto.py"; proc2.StartInfo.WorkingDirectory = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments); proc2.StartInfo.UseShellExecute = false; proc2.StartInfo.RedirectStandardOutput = true; proc2.Start(); while (!proc2.StandardOutput.EndOfStream) { Console.WriteLine(proc2.StandardOutput.ReadLine()); } } bodyIndexFrameProcessed = true; } } } } //bodyIndexFrameProcessed = false; if (bodyIndexFrameProcessed) { // this displays the new body but for some reason this constantly executes this.RenderBodyIndexPixels(); //Console.WriteLine("body count: " + BodyFrameSource::BodyCount); } }
private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { // All frame counter this.frameCount++; if (this.frameCount % this.framesToCapture != 0) { return; } ColorFrame colorFrame = null; DepthFrame depthFrame = null; BodyFrame bodyFrame = null; BodyIndexFrame bodyIndexFrame = null; Body body = null; SkeletonOfBody skel_up = new SkeletonOfBody(Constants.SKEL_UP_TOTAL_JOINTS); try { var frameReference = e.FrameReference.AcquireFrame(); colorFrame = frameReference.ColorFrameReference.AcquireFrame(); depthFrame = frameReference.DepthFrameReference.AcquireFrame(); bodyFrame = frameReference.BodyFrameReference.AcquireFrame(); bodyIndexFrame = frameReference.BodyIndexFrameReference.AcquireFrame(); if (colorFrame == null || depthFrame == null || bodyFrame == null || bodyIndexFrame == null) { return; } //-------------------------------------------- // Get the color frame //-------------------------------------------- using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer()) { colorFrame.CopyConvertedFrameDataToArray(this.buffColor32, ColorImageFormat.Bgra); } //End ColorFrame //-------------------------------------------- // Get the depth frame //-------------------------------------------- using (KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { depthFrame.CopyFrameDataToArray(this.buffDepth16); //depthFrame.CopyFrameDataToArray(this.buffDepth16Copy); // Multiplication by 20 only to turn the depth visually more perceptible //int i = 0; //Array.ForEach(this.buffDepth16Copy, (x) => { this.buffDepth16Copy[i++] = (ushort)(x * 20); }); } //End DepthFrame //-------------------------------------------- // Get the body index frame //-------------------------------------------- using (KinectBuffer bodyIndexBuffer = bodyIndexFrame.LockImageBuffer()) { bodyIndexFrame.CopyFrameDataToArray(this.buffBodyIndex8); } //-------------------------------------------- // Get the body frame //-------------------------------------------- bodyFrame.GetAndRefreshBodyData(this.listBodies); //bodyFrame.FloorClipPlane. //-------------------------------------------- // Map the depth frame to it color frame //-------------------------------------------- { Array.Clear(this.buffColorSpacePoints, 0, this.buffColorSpacePoints.Length); Array.Clear(this.buffMapDepthToColor32, 0, this.buffMapDepthToColor32.Length); // Coordinate mapping this.coordinateMapper.MapDepthFrameToColorSpace(this.buffDepth16, this.buffColorSpacePoints); unsafe { fixed(ColorSpacePoint *depthMappedToColorPointsPointer = buffColorSpacePoints) { // Loop over each row and column of the color image // Zero out any pixels that don't correspond to a body index for (int idxDepth = 0; idxDepth < buffColorSpacePoints.Length; ++idxDepth) { float depthMappedToColorX = depthMappedToColorPointsPointer[idxDepth].X; float depthMappedToColorY = depthMappedToColorPointsPointer[idxDepth].Y; // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(depthMappedToColorX) && !float.IsNegativeInfinity(depthMappedToColorY)) { // Make sure the depth pixel maps to a valid point in color space int colorX = (int)(depthMappedToColorX + 0.5f); int colorY = (int)(depthMappedToColorY + 0.5f); // If the point is not valid, there is no body index there. if ((colorX >= 0) && (colorX < this.colorImageSize.Width) && (colorY >= 0) && (colorY < this.colorImageSize.Height)) { int idxColor = (colorY * this.colorImageSize.Width) + colorX; // If we are tracking a body for the current pixel, save the depth data if (this.buffBodyIndex8[idxDepth] != 0xff) { this.buffMapDepthToColor32[idxDepth * 4] = this.buffColor32[idxColor * 4]; // B this.buffMapDepthToColor32[idxDepth * 4 + 1] = this.buffColor32[idxColor * 4 + 1]; // G this.buffMapDepthToColor32[idxDepth * 4 + 2] = this.buffColor32[idxColor * 4 + 2]; // R } } } } } } //End Unsafe } //End Mapping //-------------------------------------------- // Process the face of the default body //-------------------------------------------- // Variable to save the detected face paramenters this.faceData = new FaceData(new BoxFace(0, 0, 0, 0), new BoxFace(0, 0, 0, 0)); #if FACE_DETECTION // Get the default body // Body body = this.listBodies.Where(b => b.IsTracked).FirstOrDefault(); if (this.faceFrameSource.IsActive) { // In our experiment we get the closest body body = Util.GetClosestBody(this.listBodies); if (body != null && body.IsTracked) { // Get the first skeleton skel_up = Util.GetSkeletonUpperBody(this.Mapper, body); // Draw skeleton joints if (this.drawingDepthMarkups) { Util.WriteSkeletonOverFrame(this, VisTypes.Depth, skel_up, 2, ref this.buffMapDepthToColor32); //Util.WriteSkeletonOverFrame(this, VisTypes.Depth, skeleton, 2, ref this.buffDepth16); } // Assign a tracking ID to the face source this.faceFrameSource.TrackingId = body.TrackingId; if (this.faceFrameResults != null) { var boxColor = this.faceFrameResults.FaceBoundingBoxInColorSpace; var boxDepth = this.faceFrameResults.FaceBoundingBoxInInfraredSpace; // If there are face results, then save data // We save in a format of rectangle [x, y, width, height] this.faceData.boxColor = new BoxFace(boxColor.Left, boxColor.Top, (boxColor.Right - boxColor.Left), (boxColor.Bottom - boxColor.Top)); this.faceData.boxDepth = new BoxFace(boxDepth.Left, boxDepth.Top, (boxDepth.Right - boxDepth.Left), (boxDepth.Bottom - boxDepth.Top)); // Draw the face if (this.drawingDepthMarkups) { Util.WriteFaceOverFrame(this, VisTypes.Depth, faceData.boxDepth, 1, ref this.buffMapDepthToColor32); //Util.WriteFaceOverFrame(this, VisTypes.Depth, faceData.boxDepth, 1, ref this.buffDepth16); } //End Drawing } //End FaceResult } //End Body } #endif // Update the data handler this.frameHandler( this.buffColor32, this.buffDepth16, this.buffBodyIndex8, this.buffMapDepthToColor32, this.listBodies, this.faceData ); // Recording state ture byte[] _colorData = null; ushort[] _depthData = null; byte[] _bodyIndexData = null; IList <Body> _bodies = null; //-------------------------------------------- // Record the data //-------------------------------------------- if (this.stateOfRecording) { // 25-09-15 // Discard frames where the hand is not corrected tracked (i.e., the hand has a zero coordinate) // To discard failures in hand tracking if (skel_up.jointDepthSpace[(int)JointUpType.HandLeft].X == 0 || skel_up.jointDepthSpace[(int)JointUpType.HandLeft].Y == 0 || skel_up.jointDepthSpace[(int)JointUpType.HandRight].X == 0 || skel_up.jointDepthSpace[(int)JointUpType.HandRight].Y == 0) { Console.WriteLine("Neglect frame {0}", this.recordedFrames); return; } // Storage data; _colorData = new byte[this.buffColor32.Length]; _depthData = new ushort[this.buffDepth16.Length]; _bodyIndexData = new byte[this.buffBodyIndex8.Length]; _bodies = new Body[this.listBodies.Count]; colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra); depthFrame.CopyFrameDataToArray(_depthData); bodyIndexFrame.CopyFrameDataToArray(_bodyIndexData); bodyFrame.GetAndRefreshBodyData(_bodies); // Increase the counter this.recordedFrames++; this.dataContainer.AddColor = _colorData; this.dataContainer.AddDepth = _depthData; this.dataContainer.AddBodyIndex = _bodyIndexData; this.dataContainer.AddListOfBodies = _bodies; this.dataContainer.AddFaceData = this.faceData; // If the user only require to save a fixed number of frames if (this.fixedFrames == this.recordedFrames) { this.stateOfRecording = false; } } // Notice: // Array.Copy() --> how many elements to copy // Buffer.BlockCopy --> how many of bytes to copy // Update Frame Rate UpdateGrabberFrameRate(); } finally { if (this.frameCount > 100000000) { this.frameCount = 0; } if (colorFrame != null) { colorFrame.Dispose(); } if (depthFrame != null) { depthFrame.Dispose(); } if (bodyFrame != null) { bodyFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } }
/// <summary> /// Handles the depth/color/body index frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { int depthWidth = 0; int depthHeight = 0; DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; bool isBitmapLocked = false; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } // We use a try/finally to ensure that we clean up before we exit the function. // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer. try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } // Process Depth FrameDescription depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; // Access the depth frame data directly via LockImageBuffer to avoid making a copy using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { this.coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.colorMappedToDepthPoints); } // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; // Process Color // Lock the bitmap for writing this.bitmap.Lock(); isBitmapLocked = true; colorFrame.CopyConvertedFrameDataToIntPtr(this.bitmap.BackBuffer, this.bitmapBackBufferSize, ColorImageFormat.Bgra); // We're done with the ColorFrame colorFrame.Dispose(); colorFrame = null; // We'll access the body index data directly to avoid a copy using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { byte *bodyIndexDataPointer = (byte *)bodyIndexData.UnderlyingBuffer; int colorMappedToDepthPointCount = this.colorMappedToDepthPoints.Length; fixed(DepthSpacePoint *colorMappedToDepthPointsPointer = this.colorMappedToDepthPoints) { // Treat the color data as 4-byte pixels uint *bitmapPixelsPointer = (uint *)this.bitmap.BackBuffer; // Loop over each row and column of the color image // Zero out any pixels that don't correspond to a body index for (int colorIndex = 0; colorIndex < colorMappedToDepthPointCount; ++colorIndex) { float colorMappedToDepthX = colorMappedToDepthPointsPointer[colorIndex].X; float colorMappedToDepthY = colorMappedToDepthPointsPointer[colorIndex].Y; // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { // Make sure the depth pixel maps to a valid point in color space int depthX = (int)(colorMappedToDepthX + 0.5f); int depthY = (int)(colorMappedToDepthY + 0.5f); // If the point is not valid, there is no body index there. if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { int depthIndex = (depthY * depthWidth) + depthX; // If we are tracking a body for the current pixel, do not zero out the pixel if (bodyIndexDataPointer[depthIndex] != 0xff) { continue; } } } bitmapPixelsPointer[colorIndex] = 0; } } this.bitmap.AddDirtyRect(new Int32Rect(0, 0, this.bitmap.PixelWidth, this.bitmap.PixelHeight)); } } } finally { if (isBitmapLocked) { this.bitmap.Unlock(); } if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } }
/// <summary> /// Handles the depth/color/body index frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { if (this.runningCalibration) { //Console.WriteLine("runningCalibration"); if (this.depthFrameCaptured) { //Console.WriteLine("depthFrameCaptured"); if (this.depthFrameFiltered) { //Console.WriteLine("this.depthFrameFiltered"); if (this.depthScreenshotTaken) { //Console.WriteLine("this.depthScreenshotTaken"); if (this.horizontalProjectingDone && this.verticalProjectingDone) { this.StatusText = "this.horizontalProjectingDone && this.verticalProjectingDone"; if (this.calculatingStarted) { this.StatusText = "calculatingFinished"; } else { this.StatusText = "calculating"; this.startCalculating(); } } else { //this.StatusText = "projecting Patterns"; this.writeImageToFirstImageBuffer = true; } //} /*else { * this.StatusText = "projecting Patterns"; * this.writeImageToFirstImageBuffer = true; * }*/ } else { this.StatusText = "making Screenshot of DepthMap"; this.makeScreenshotOfDepthMap(); } } else { this.StatusText = "filtering Depth"; this.filterDepth2(5); } } } int depthWidth = 0; int depthHeight = 0; DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; bool isBitmapLocked = false; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } // We use a try/finally to ensure that we clean up before we exit the function. // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer. try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } // Process Depth FrameDescription depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; // Access the depth frame data directly via LockImageBuffer to avoid making a copy using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { this.coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.colorMappedToDepthPoints); if (this.captureDepthFrame) { unsafe { depthFrame.CopyFrameDataToArray(this.globalDepth); //for (int i = 2000; i < 2020;i++ ) //{ this.minDepthRange = depthFrame.DepthMinReliableDistance; this.maxDepthRange = depthFrame.DepthMaxReliableDistance; //Console.WriteLine("DEPTHFRAME: {0}", frameData[512*424-1]); //} } this.captureDepthFrame = false; this.depthFrameCaptured = true; } } // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; // Process Color // Lock the bitmap for writing this.bitmap.Lock(); isBitmapLocked = true; colorFrame.CopyConvertedFrameDataToIntPtr(this.bitmap.BackBuffer, this.bitmapBackBufferSize, ColorImageFormat.Bgra); if (this.projectingStarted && !this.verticalProjectingDone) { Boolean newPattern = false; bool timeToTakeScreenshot = false; if (this.frameCounter == 10) { timeToTakeScreenshot = true; } if (this.frameCounter >= 15) { newPattern = true; this.projectionImageCounter++; this.frameCounter = 0; } else { this.frameCounter++; } if (timeToTakeScreenshot) { //int size = this.verticalImages.Count(); this.bitmap.Unlock(); this.screenshot(this.projectionImageCounter, "horizontal"); this.bitmap.Lock(); //this.verticalImages[size] timeToTakeScreenshot = false; } if (newPattern) { ProjectablePattern pp = new ProjectablePattern(this.projectorWidth, this.projectorHeight, true, this.projectionImageCounter, this.myProjector); bool success = pp.projectPattern(); if (!success) { this.frameCounter = 0; this.projectionImageCounter = 0; this.verticalProjectingDone = true; } else { this.horizontalImages.Add(pp); } } } if (this.projectingStarted && this.verticalProjectingDone && !this.horizontalProjectingDone) { Boolean newPattern = false; bool timeToTakeScreenshot = false; if (this.frameCounter == 10) { timeToTakeScreenshot = true; } if (this.frameCounter >= 15) { newPattern = true; this.projectionImageCounter++; this.frameCounter = 0; } else { this.frameCounter++; } if (timeToTakeScreenshot) { //int size = this.verticalImages.Count(); this.bitmap.Unlock(); this.screenshot(this.projectionImageCounter, "vertical"); this.bitmap.Lock(); //this.verticalImages[size] timeToTakeScreenshot = false; } if (newPattern) { ProjectablePattern pp = new ProjectablePattern(this.projectorWidth, this.projectorHeight, false, this.projectionImageCounter, this.myProjector); bool success = pp.projectPattern(); if (!success) { this.horizontalProjectingDone = true; this.projectButton.IsEnabled = false; } else { this.verticalImages.Add(pp); } } } if (this.writeImageToFirstImageBuffer) { colorFrame.CopyConvertedFrameDataToIntPtr(this.firstImage.BackBuffer, this.bitmapBackBufferSize, ColorImageFormat.Bgra); this.writeImageToFirstImageBuffer = false; this.projectingStarted = true; } // We're done with the ColorFrame colorFrame.Dispose(); colorFrame = null; // We'll access the body index data directly to avoid a copy using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { byte *bodyIndexDataPointer = (byte *)bodyIndexData.UnderlyingBuffer; int colorMappedToDepthPointCount = this.colorMappedToDepthPoints.Length; fixed(DepthSpacePoint *colorMappedToDepthPointsPointer = this.colorMappedToDepthPoints) { // Treat the color data as 4-byte pixels uint *bitmapPixelsPointer = (uint *)this.bitmap.BackBuffer; uint *firstImagePixelsPointer = (uint *)this.firstImage.BackBuffer; // Loop over each row and column of the color image // Zero out any pixels that don't correspond to a body index for (int colorIndex = 0; colorIndex < colorMappedToDepthPointCount; ++colorIndex) { float colorMappedToDepthX = colorMappedToDepthPointsPointer[colorIndex].X; float colorMappedToDepthY = colorMappedToDepthPointsPointer[colorIndex].Y; if (colorIndex == 600) { // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { /*Console.WriteLine("colorMappedToDepthX: {0}, colorMappedToDepthY {1} ,colorIndex: {2}", colorMappedToDepthX, colorMappedToDepthY, colorIndex);*/ // Make sure the depth pixel maps to a valid point in color space int depthX = (int)(colorMappedToDepthX + 0.5f); int depthY = (int)(colorMappedToDepthY + 0.5f); // If the point is not valid, there is no body index there. if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { int depthIndex = (depthY * depthWidth) + depthX; // If we are tracking a body for the current pixel, do not zero out the pixel if (bodyIndexDataPointer[depthIndex] != 0xff) { continue; } } } }//(colorIndex<200) //bitmapPixelsPointer[colorIndex] = 0; } } this.bitmap.AddDirtyRect(new Int32Rect(0, 0, this.bitmap.PixelWidth, this.bitmap.PixelHeight)); } } } finally { if (isBitmapLocked) { this.bitmap.Unlock(); } if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } }
/// <summary> /// Handles the depth/color/body index frame data arriving from the sensor. /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { int depthWidth = 0; int depthHeight = 0; DepthFrame depthFrame = null; IBuffer depthFrameData = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; IBuffer bodyIndexFrameData = null; IBufferByteAccess bodyIndexByteAccess = null; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } // We use a try/finally to ensure that we clean up before we exit the function. // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer. try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } // Process Depth FrameDescription depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; // Access the depth frame data directly via LockImageBuffer to avoid making a copy depthFrameData = depthFrame.LockImageBuffer(); this.coordinateMapper.MapColorFrameToDepthSpaceUsingIBuffer(depthFrameData, this.colorMappedToDepthPoints); // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; // We must force a release of the IBuffer in order to ensure that we have dropped all references to it. System.Runtime.InteropServices.Marshal.ReleaseComObject(depthFrameData); depthFrameData = null; // Process Color colorFrame.CopyConvertedFrameDataToBuffer(this.bitmap.PixelBuffer, ColorImageFormat.Bgra); // We're done with the ColorFrame colorFrame.Dispose(); colorFrame = null; FrameDescription bodyIndexFrameDescription = bodyIndexFrame.FrameDescription; // Access the body index frame data directly via LockImageBuffer to avoid making a copy bodyIndexFrameData = bodyIndexFrame.LockImageBuffer(); int colorMappedToDepthPointCount = this.colorMappedToDepthPoints.Length; unsafe { bodyIndexByteAccess = (IBufferByteAccess)bodyIndexFrameData; byte *bodyIndexBytes = null; bodyIndexByteAccess.Buffer(out bodyIndexBytes); fixed(DepthSpacePoint *colorMappedToDepthPointsPointer = this.colorMappedToDepthPoints) { IBufferByteAccess bitmapBackBufferByteAccess = (IBufferByteAccess)this.bitmap.PixelBuffer; byte *bitmapBackBufferBytes = null; bitmapBackBufferByteAccess.Buffer(out bitmapBackBufferBytes); // Treat the color data as 4-byte pixels uint *bitmapPixelsPointer = (uint *)bitmapBackBufferBytes; // Loop over each row and column of the color image // Zero out any pixels that don't correspond to a body index for (int colorIndex = 0; colorIndex < colorMappedToDepthPointCount; ++colorIndex) { float colorMappedToDepthX = colorMappedToDepthPointsPointer[colorIndex].X; float colorMappedToDepthY = colorMappedToDepthPointsPointer[colorIndex].Y; // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { // Make sure the depth pixel maps to a valid point in color space int depthX = (int)(colorMappedToDepthX + 0.5f); int depthY = (int)(colorMappedToDepthY + 0.5f); // If the point is not valid, there is no body index there. if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { int depthIndex = (depthY * depthWidth) + depthX; // If we are tracking a body for the current pixel, do not zero out the pixel if (bodyIndexBytes[depthIndex] != 0xff) { continue; } } } bitmapPixelsPointer[colorIndex] = 0; } } this.bitmap.Invalidate(); } } finally { if (depthFrame != null) { depthFrame.Dispose(); } if (depthFrameData != null) { // We must force a release of the IBuffer in order to ensure that we have dropped all references to it. System.Runtime.InteropServices.Marshal.ReleaseComObject(depthFrameData); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } if (bodyIndexFrameData != null) { // We must force a release of the IBuffer in order to ensure that we have dropped all references to it. System.Runtime.InteropServices.Marshal.ReleaseComObject(bodyIndexFrameData); } if (bodyIndexByteAccess != null) { // We must force a release of the IBuffer in order to ensure that we have dropped all references to it. System.Runtime.InteropServices.Marshal.ReleaseComObject(bodyIndexByteAccess); } } }
/// <summary> /// 多格式数据流的处理事件,暂时顺序处理,以后为提高性能,可以将三个格式的数据流并发处理。 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); bool bodyIndexFrameProcessed = false; // 处理BodyIndex数据流 using (BodyIndexFrame bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame()) { if (bodyIndexFrame != null) { // the fastest way to process the body index data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer bodyIndexBuffer = bodyIndexFrame.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((this.bodyIndexFrameDescription.Width * this.bodyIndexFrameDescription.Height) == bodyIndexBuffer.Size) && (this.bodyIndexFrameDescription.Width == this.bodyIndexBitmap.PixelWidth) && (this.bodyIndexFrameDescription.Height == this.bodyIndexBitmap.PixelHeight)) { this.ProcessBodyIndexFrameData(bodyIndexBuffer.UnderlyingBuffer, bodyIndexBuffer.Size); this.bodyIndexBytes = new byte[bodyIndexBuffer.Size]; Marshal.Copy(bodyIndexBuffer.UnderlyingBuffer, this.bodyIndexBytes, 0, (int)bodyIndexBuffer.Size); bodyIndexFrameProcessed = true; } } } } if (bodyIndexFrameProcessed) { this.RenderBodyIndexPixels(); } // 处理颜色图像 using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame()) { if (colorFrame != null) { FrameDescription colorFrameDescription = colorFrame.FrameDescription; using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer()) { this.colorBitmap.Lock(); // verify data and write the new color frame data to the display bitmap if ((colorFrameDescription.Width == this.colorBitmap.PixelWidth) && (colorFrameDescription.Height == this.colorBitmap.PixelHeight)) { colorFrame.CopyConvertedFrameDataToIntPtr( this.colorBitmap.BackBuffer, (uint)(colorFrameDescription.Width * colorFrameDescription.Height * 4), ColorImageFormat.Bgra); this.colorBitmap.AddDirtyRect(new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight)); } this.colorBitmap.Unlock(); } } } // 处理Body数据,包含骨骼信息 using (BodyFrame bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame()) { if (bodyFrame != null) { if (this.bodies == null) { this.bodies = new Body[bodyFrame.BodyCount]; } bodyFrame.GetAndRefreshBodyData(this.bodies); } } // 处理深度图像 using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame()) { if (depthFrame != null) { depthFrameDescription = depthFrame.FrameDescription; using (KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { Marshal.Copy(depthBuffer.UnderlyingBuffer, this.depthBytes, 0, (int)depthBuffer.Size); } } } }
/// <summary> /// Handles the body index frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_FrameArrived(object sender, BodyIndexFrameArrivedEventArgs e) { if (frameCount >= min_ && frameCount < max_) { bool bodyIndexFrameProcessed = false; using (BodyIndexFrame bodyIndexFrame = e.FrameReference.AcquireFrame()) { if (bodyIndexFrame != null) { // the fastest way to process the body index data is to directly access // the underlying buffer using (Microsoft.Kinect.KinectBuffer bodyIndexBuffer = bodyIndexFrame.LockImageBuffer()) { // verify data and write the color data to the display bitmap if (((this.bodyIndexFrameDescription.Width * this.bodyIndexFrameDescription.Height) == bodyIndexBuffer.Size) && (this.bodyIndexFrameDescription.Width == this.bodyIndexBitmap.PixelWidth) && (this.bodyIndexFrameDescription.Height == this.bodyIndexBitmap.PixelHeight)) { this.ProcessBodyIndexFrameData(bodyIndexBuffer.UnderlyingBuffer, bodyIndexBuffer.Size); bodyIndexFrameProcessed = true; } } } } if (bodyIndexFrameProcessed) { this.RenderBodyIndexPixels(); if (this.bodyIndexBitmap != null) { // create a png bitmap encoder which knows how to save a .png file BitmapEncoder encoder = new PngBitmapEncoder(); // create frame from the writable bitmap and add to encoder encoder.Frames.Add(BitmapFrame.Create(this.bodyIndexBitmap)); // string time = System.DateTime.UtcNow.ToString("hh'-'mm'-'ss", CultureInfo.CurrentUICulture.DateTimeFormat); // string myPhotos = Environment.GetFolderPath(Environment.SpecialFolder.MyPictures); // string mycustompath = @"G:\01\bodyindex_data\"; string path = Path.Combine(path_ + @"/bodyIndex" + frameCount.ToString() + ".png"); // write the new file to disk try { // FileStream is IDisposable using (FileStream fs = new FileStream(path, FileMode.Create)) { encoder.Save(fs); } // this.StatusText = string.Format(CultureInfo.CurrentCulture, Properties.Resources.SavedScreenshotStatusTextFormat, path); } catch (IOException) { // this.StatusText = string.Format(CultureInfo.CurrentCulture, Properties.Resources.FailedScreenshotStatusTextFormat, path); } } Console.WriteLine("I m here " + frameCount.ToString()); frameCount++; } } else { frameCount++; return; } }
public IImageData GetData() { return(new KinectBufferImageData(frame.FrameDescription, frame.LockImageBuffer())); }
public void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { int depthWidth = 0; int depthHeight = 0; DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; bool isBitmapLocked = false; //bool multiSourceFrameProcessed = false; //bool colorFrameProcessed = false; //bool depthFrameProcessed = false; //bool bodyIndexFrameProcessed = false; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } // Process Depth FrameDescription depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; // Access the depth frame data directly via LockImageBuffer to avoid making a copy using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { this.coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.colorMappedToDepthPoints); } // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; // Process Color // Lock the bitmap for writing this.bitmapBody.Lock(); isBitmapLocked = true; colorFrame.CopyConvertedFrameDataToIntPtr(this.bitmapBody.BackBuffer, this.bitmapBackBufferSize, ColorImageFormat.Bgra); // We're done with the ColorFrame colorFrame.Dispose(); colorFrame = null; // We'll access the body index data directly to avoid a copy using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { byte *bodyIndexDataPointer = (byte *)bodyIndexData.UnderlyingBuffer; int colorMappedToDepthPointCount = this.colorMappedToDepthPoints.Length; fixed(DepthSpacePoint *colorMappedToDepthPointsPointer = this.colorMappedToDepthPoints) { // Treat the color data as 4-byte pixels uint *bitmapPixelsPointer = (uint *)this.bitmapBody.BackBuffer; // Loop over each row and column of the color image // Zero out any pixels that don't correspond to a body index for (int colorIndex = 0; colorIndex < colorMappedToDepthPointCount; ++colorIndex) { float colorMappedToDepthX = colorMappedToDepthPointsPointer[colorIndex].X; float colorMappedToDepthY = colorMappedToDepthPointsPointer[colorIndex].Y; // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { // Make sure the depth pixel maps to a valid point in color space int depthX = (int)(colorMappedToDepthX + 0.5f); int depthY = (int)(colorMappedToDepthY + 0.5f); // If the point is not valid, there is no body index there. if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { int depthIndex = (depthY * depthWidth) + depthX; // If we are tracking a body for the current pixel, do not zero out the pixel if (bodyIndexDataPointer[depthIndex] != 0xff) { continue; } } } bitmapPixelsPointer[colorIndex] = 0; } } this.bitmapBody.AddDirtyRect(new Int32Rect(0, 0, this.bitmapBody.PixelWidth, this.bitmapBody.PixelHeight)); } } } finally { if (isBitmapLocked) { this.bitmapBody.Unlock(); } if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } kinectImage.Source = bitmapBody; // kinectImage.Source = bitmapBody; //if (multiSourceFrame != null) //{ // // Frame Acquisition should always occur first when using multiSourceFrameReader // using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame()) // { // if (depthFrame != null) // { // FrameDescription depthFrameDescription = depthFrame.FrameDescription; // depthWidth = depthFrameDescription.Width; // depthHeight = depthFrameDescription.Height; // if ((depthWidth * depthHeight) == this.depthFrameData.Length) // { // depthFrame.CopyFrameDataToArray(this.depthFrameData); // depthFrameProcessed = true; // } // } // } // using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame()) // { // if (colorFrame != null) // { // FrameDescription colorFrameDescription = colorFrame.FrameDescription; // if ((colorFrameDescription.Width * colorFrameDescription.Height * this.bytesPerPixel) == this.colorFrameData.Length) // { // if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra) // { // colorFrame.CopyRawFrameDataToArray(this.colorFrameData); // } // else // { // colorFrame.CopyConvertedFrameDataToArray(this.colorFrameData, ColorImageFormat.Bgra); // } // colorFrameProcessed = true; // } // } // } // using (BodyIndexFrame bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame()) // { // if (bodyIndexFrame != null) // { // FrameDescription bodyIndexFrameDescription = bodyIndexFrame.FrameDescription; // if ((bodyIndexFrameDescription.Width * bodyIndexFrameDescription.Height) == this.bodyIndexFrameData.Length) // { // bodyIndexFrame.CopyFrameDataToArray(this.bodyIndexFrameData); // bodyIndexFrameProcessed = true; // } // } // multiSourceFrameProcessed = true; // } //} //// we got all frames //if (multiSourceFrameProcessed && depthFrameProcessed && colorFrameProcessed && bodyIndexFrameProcessed) //{ // this.coordinateMapper.MapColorFrameToDepthSpace(this.depthFrameData, this.depthPoints); // Array.Clear(this.displayPixels, 0, this.displayPixels.Length); // // loop over each row and column of the depth // for (int colorIndex = 0; colorIndex < this.depthPoints.Length; ++colorIndex) // { // DepthSpacePoint depthPoint = this.depthPoints[colorIndex]; // if (!float.IsNegativeInfinity(depthPoint.X) && !float.IsNegativeInfinity(depthPoint.Y)) // { // // make sure the depth pixel maps to a valid point in color space // int depthX = (int)(depthPoint.X + 0.5f); // int depthY = (int)(depthPoint.Y + 0.5f); // if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) // { // int depthIndex = (depthY * depthWidth) + depthX; // byte player = this.bodyIndexFrameData[depthIndex]; // // if we're tracking a player for the current pixel, sets its color and alpha to full // if (player != 0xff) // { // // set source for copy to the color pixel // int sourceIndex = colorIndex * this.bytesPerPixel; // // write out blue byte // this.displayPixels[sourceIndex] = this.colorFrameData[sourceIndex++]; // // write out green byte // this.displayPixels[sourceIndex] = this.colorFrameData[sourceIndex++]; // // write out red byte // this.displayPixels[sourceIndex] = this.colorFrameData[sourceIndex++]; // // write out alpha byte // this.displayPixels[sourceIndex] = 0xff; // } // } // } // } // this.RenderColorPixels(); //} }
public void ProcessBackgroundOld(DepthFrame depthFrame, ColorFrame colorFrame, BodyIndexFrame bodyIndexFrame) { // Process Depth var depthWidth = _displayWidth; var depthHeight = _displayHeight; // Access the depth frame data directly via LockImageBuffer to avoid making a copy using (var depthFrameData = depthFrame.LockImageBuffer()) { _coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, _colorMappedToDepthPoints); } // We're done with the DepthFrame depthFrame.Dispose(); // Lock the bitmap for writing _bitmap.Lock(); _isBitmapLocked = true; colorFrame.CopyConvertedFrameDataToIntPtr(_bitmap.BackBuffer, _bitmapBackBufferSize, ColorImageFormat.Bgra); // We're done with the ColorFrame colorFrame.Dispose(); // We'll access the body index data directly to avoid a copy using (var bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { var bodyIndexDataPointer = (byte *)bodyIndexData.UnderlyingBuffer; var colorMappedToDepthPointCount = _colorMappedToDepthPoints.Length; fixed(DepthSpacePoint *colorMappedToDepthPointsPointer = _colorMappedToDepthPoints) { // Treat the color data as 4-byte pixels var bitmapPixelsPointer = (uint *)_bitmap.BackBuffer; // Loop over each row and column of the color image // Zero out any pixels that don't correspond to a body index for (var colorIndex = 0; colorIndex < colorMappedToDepthPointCount; ++colorIndex) { var colorMappedToDepthX = colorMappedToDepthPointsPointer[colorIndex].X; var colorMappedToDepthY = colorMappedToDepthPointsPointer[colorIndex].Y; // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { // Make sure the depth pixel maps to a valid point in color space var depthX = (int)(colorMappedToDepthX); var depthY = (int)(colorMappedToDepthY); // If the point is not valid, there is no body index there. if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { var depthIndex = (depthY * depthWidth) + depthX; // If we are tracking a body for the current pixel, do not zero out the pixel if (bodyIndexDataPointer[depthIndex] != 0xff) { continue; } } } bitmapPixelsPointer[colorIndex] = 0; } } _bitmap.AddDirtyRect(new Int32Rect(0, 0, _bitmap.PixelWidth, _bitmap.PixelHeight)); } } }
/// <summary> /// Handles the depth/color/body index frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { int depthWidth = 0; int depthHeight = 0; DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; bool isBitmapLocked = false; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } // We use a try/finally to ensure that we clean up before we exit the function. // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer. try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } // Process Depth FrameDescription depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; if (!showAverage) { // Access the depth frame data directly via LockImageBuffer to avoid making a copy using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { //Map depth data to color space this.coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.colorMappedToDepthPoints); } } // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; // Process Color if (!showAverage) { // Lock the bitmap for writing this.bitmap.Lock(); isBitmapLocked = true; colorFrame.CopyConvertedFrameDataToIntPtr(this.bitmap.BackBuffer, this.bitmapBackBufferSize, ColorImageFormat.Bgra); // We're done with the ColorFrame colorFrame.Dispose(); colorFrame = null; } // We'll access the body index data directly to avoid a copy using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { byte *bodyIndexDataPointer = (byte *)bodyIndexData.UnderlyingBuffer; currentFrame++; if (currentFrame % 10 == 0) { bool bodyFound = false; //Story body data in a temporary array, where 1 corresponds to a location with body in it and 0 corresponds to a location without a body in it for (int i = 0; i < (int)bodyIndexData.Size; ++i) { bool isBodyPixel = bodyIndexDataPointer[i] != 0xff; if (isBodyPixel) { bodyFound = true; tempPixels[i] = 1; } else { tempPixels[i] = 0; } } if (bodyFound) { //We need to compute non normalized body intensities first - per pixel average over the entire lifespan of the program float greatestValue = 0; for (int i = 0; i < (int)bodyIndexData.Size; ++i) { nonNormalizeAveragePixels[i] = (nonNormalizeAveragePixels[i] * (currentFrame - 1) + tempPixels[i]) / currentFrame; if (greatestValue < nonNormalizeAveragePixels[i]) { greatestValue = nonNormalizeAveragePixels[i]; } } //Now we will create a normalized (0-255) image for (int i = 0; i < (int)bodyIndexData.Size; ++i) { averagePixels[i] = (uint)(nonNormalizeAveragePixels[i] / greatestValue * 0xFF); } if (showAverage) { this.humanAverageBitmap.WritePixels( new Int32Rect(0, 0, this.humanAverageBitmap.PixelWidth, this.humanAverageBitmap.PixelHeight), this.averagePixels, this.humanAverageBitmap.PixelWidth * (int)bytesPerPixel, 0); } } } if (showAverage) { return; } int colorMappedToDepthPointCount = this.colorMappedToDepthPoints.Length; fixed(DepthSpacePoint *colorMappedToDepthPointsPointer = this.colorMappedToDepthPoints) { // Treat the color data as 4-byte pixels uint *bitmapPixelsPointer = (uint *)this.bitmap.BackBuffer; // Loop over each row and column of the color image // Zero out any pixels that don't correspond to a body index for (int colorIndex = 0; colorIndex < colorMappedToDepthPointCount; ++colorIndex) { float colorMappedToDepthX = colorMappedToDepthPointsPointer[colorIndex].X; float colorMappedToDepthY = colorMappedToDepthPointsPointer[colorIndex].Y; // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { // Make sure the depth pixel maps to a valid point in color space int depthX = (int)(colorMappedToDepthX + 0.5f); int depthY = (int)(colorMappedToDepthY + 0.5f); // If the point is not valid, there is no body index there. if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { int depthIndex = (depthY * depthWidth) + depthX; bitmapPixelsPointer[colorIndex] = bitmapPixelsPointer[colorIndex] | averagePixels[depthIndex]; } } } } this.bitmap.AddDirtyRect(new Int32Rect(0, 0, this.bitmap.PixelWidth, this.bitmap.PixelHeight)); } } } finally { if (isBitmapLocked) { this.bitmap.Unlock(); } if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } }
private void maskBackground(BodyIndexFrame bodyIndexFrame, int depthWidth, int depthHeight) { // We'll access the body index data directly to avoid a copy using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { byte* bodyIndexDataPointer = (byte*)bodyIndexData.UnderlyingBuffer; int colorMappedToDepthPointCount = this.colorMappedToDepthPoints.Length; fixed (DepthSpacePoint* colorMappedToDepthPointsPointer = this.colorMappedToDepthPoints) { // Treat the color data as 4-byte pixels uint* bitmapPixelsPointer = (uint*)this.bitmap.BackBuffer; // Loop over each row and column of the color image // Zero out any pixels that don't correspond to a body index for (int colorIndex = 0; colorIndex < colorMappedToDepthPointCount; ++colorIndex) { float colorMappedToDepthX = colorMappedToDepthPointsPointer[colorIndex].X; float colorMappedToDepthY = colorMappedToDepthPointsPointer[colorIndex].Y; // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { // Make sure the depth pixel maps to a valid point in color space int depthX = (int)(colorMappedToDepthX + 0.5f); int depthY = (int)(colorMappedToDepthY + 0.5f); // If the point is not valid, there is no body index there. if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { int depthIndex = (depthY * depthWidth) + depthX; // If we are tracking a body for the current pixel, do not zero out the pixel if (bodyIndexDataPointer[depthIndex] != 0xff) { continue; } } } bitmapPixelsPointer[colorIndex] = 0; } } this.bitmap.AddDirtyRect(new Int32Rect(0, 0, this.bitmap.PixelWidth, this.bitmap.PixelHeight)); } } }
private void Reader_MultiSourceFrameArrived(MultiSourceFrameReader sender, MultiSourceFrameArrivedEventArgs e) { MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } DepthFrame depthFrame = null; ColorFrame colorFrame = null; InfraredFrame infraredFrame = null; BodyFrame bodyFrame = null; BodyIndexFrame bodyIndexFrame = null; IBuffer depthFrameDataBuffer = null; IBuffer bodyIndexFrameData = null; // Com interface for unsafe byte manipulation IBufferByteAccess bufferByteAccess = null; switch (CurrentDisplayFrameType) { case DisplayFrameType.Infrared: using (infraredFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame()) { ShowInfraredFrame(infraredFrame); } break; case DisplayFrameType.Color: using (colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame()) { ShowColorFrame(colorFrame); } break; case DisplayFrameType.Depth: using (depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame()) { ShowDepthFrame(depthFrame); } break; case DisplayFrameType.BodyMask: // Put in a try catch to utilise finally() and clean up frames try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } // Access the depth frame data directly via LockImageBuffer to avoid making a copy depthFrameDataBuffer = depthFrame.LockImageBuffer(); this.coordinateMapper.MapColorFrameToDepthSpaceUsingIBuffer(depthFrameDataBuffer, this.colorMappedToDepthPoints); // Process Color colorFrame.CopyConvertedFrameDataToBuffer(this.bitmap.PixelBuffer, ColorImageFormat.Bgra); // Access the body index frame data directly via LockImageBuffer to avoid making a copy bodyIndexFrameData = bodyIndexFrame.LockImageBuffer(); ShowMappedBodyFrame(depthFrame.FrameDescription.Width, depthFrame.FrameDescription.Height, bodyIndexFrameData, bufferByteAccess); } finally { if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } if (depthFrameDataBuffer != null) { // We must force a release of the IBuffer in order to ensure that we have dropped all references to it. System.Runtime.InteropServices.Marshal.ReleaseComObject(depthFrameDataBuffer); } if (bodyIndexFrameData != null) { System.Runtime.InteropServices.Marshal.ReleaseComObject(bodyIndexFrameData); } if (bufferByteAccess != null) { System.Runtime.InteropServices.Marshal.ReleaseComObject(bufferByteAccess); } } break; case DisplayFrameType.BodyJoints: using (bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame()) { ShowBodyJoints(bodyFrame); } break; case DisplayFrameType.BackgroundRemoved: // Put in a try catch to utilise finally() and clean up frames try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); if ((depthFrame == null) || (colorFrame == null)) { return; } depthFrame.CopyFrameDataToArray(depthFrameData); this.coordinateMapper.MapColorFrameToDepthSpace(depthFrameData, this.colorMappedToDepthPoints); // Process Color. colorFrame.CopyConvertedFrameDataToBuffer(this.bitmap.PixelBuffer, ColorImageFormat.Bgra); ShowMappedColorBackgroundRemoved(colorMappedToDepthPoints, depthFrameData, depthFrame.FrameDescription); } finally { if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } } break; default: break; } }
private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { int depthWidth = 0; int depthHeight = 0; DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; BodyFrame bodyFrame = null; bool isBitmapLocked = false; try { MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame(); if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null) || (bodyFrame == null)) { return; } if (this.bodies == null) { this.bodies = new Body[bodyFrame.BodyCount]; } bodyFrame.GetAndRefreshBodyData(this.bodies); bodyFrame.Dispose(); bodyFrame = null; bool isBodyTracked = false; foreach (Body body in this.bodies) { if (body.IsTracked) { isBodyTracked = true; continue; } } hasTrackedBodies = isBodyTracked; if (hasTrackedBodies && !isCapturing) { BeginCountdown(); } FrameDescription depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { this.coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.colorMappedToDepthPoints); } depthFrame.Dispose(); depthFrame = null; this.liveBitmap.Lock(); isBitmapLocked = true; colorFrame.CopyConvertedFrameDataToIntPtr(this.liveBitmap.BackBuffer, this.bitmapBackBufferSize, ColorImageFormat.Bgra); colorFrame.Dispose(); colorFrame = null; using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { byte *bodyIndexDataPointer = (byte *)bodyIndexData.UnderlyingBuffer; int colorMappedToDepthPointCount = this.colorMappedToDepthPoints.Length; fixed(DepthSpacePoint *colorMappedToDepthPointsPointer = this.colorMappedToDepthPoints) { uint *bitmapPixelsPointer = (uint *)this.liveBitmap.BackBuffer; for (int colorIndex = 0; colorIndex < colorMappedToDepthPointCount; ++colorIndex) { float colorMappedToDepthX = colorMappedToDepthPointsPointer[colorIndex].X; float colorMappedToDepthY = colorMappedToDepthPointsPointer[colorIndex].Y; if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { int depthX = (int)(colorMappedToDepthX + 0.5f); int depthY = (int)(colorMappedToDepthY + 0.5f); if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { int depthIndex = (depthY * depthWidth) + depthX; if (bodyIndexDataPointer[depthIndex] != 0xff) { continue; } } } bitmapPixelsPointer[colorIndex] = 0; } } this.liveBitmap.AddDirtyRect(new Int32Rect(0, 0, this.liveBitmap.PixelWidth, this.liveBitmap.PixelHeight)); } } } finally { if (isBitmapLocked) { this.liveBitmap.Unlock(); } if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } if (bodyFrame != null) { bodyFrame.Dispose(); } } }
void GreenScreenMappingDepthToColorSplats(ref DepthFrame depthFrame, ref ColorFrame colorFrame, ref BodyIndexFrame bodyIndexFrame, int depthWidth, int depthHeight, int colorWidth, int colorHeight) { m_stopwatch.Restart(); using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { // Need to know the color space point for each depth space point, but this is much less data // and much faster to compute than mapping the other way m_coordinateMapper.MapDepthFrameToColorSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, m_depthToColorSpacePoints); } m_depthMapTimer.Update(m_stopwatch.ElapsedMilliseconds); m_stopwatch.Restart(); // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; lock (m_displayPixels) { // [KinectThread] avoid racing display buffer refresh with render (can cause missing images) // have to clear the display pixels so we can copy only the BGRA image of the player(s) Array.Clear(m_displayPixels, 0, m_displayPixels.Length); unsafe { fixed (byte* colorFrameDataPtr = &m_colorFrameData[0]) { colorFrame.CopyConvertedFrameDataToIntPtr(new IntPtr(colorFrameDataPtr), (uint)m_colorFrameData.Length, ColorImageFormat.Bgra); } } // done with the colorFrame colorFrame.Dispose(); colorFrame = null; m_colorCopyTimer.Update(m_stopwatch.ElapsedMilliseconds); m_stopwatch.Restart(); // We'll access the body index data directly to avoid a copy using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { byte* bodyIndexDataPointer = (byte*)bodyIndexData.UnderlyingBuffer; uint bodyIndexDataLength = bodyIndexData.Size; int colorMappedToDepthPointCount = m_colorToDepthSpacePoints.Length; fixed (ColorSpacePoint* depthMappedToColorPointsPointer = m_depthToColorSpacePoints) { fixed (byte* bitmapPixelsBytePointer = &m_displayPixels[0]) { fixed (byte* sourcePixelsBytePointer = &m_colorFrameData[0]) { uint* bitmapPixelsPointer = (uint*)bitmapPixelsBytePointer; uint* sourcePixelsPointer = (uint*)sourcePixelsBytePointer; // We don't go all the way to the edge of the depth buffer, to eliminate a chance // that a splat will go outside the edge of the color buffer when mapped to color // space. In the x direction this will never happen anyway since the depth FOV // is so much narrower than the color FOV. const int Margin = 2; for (int y = Margin; y < depthHeight - Margin; y++) { for (int x = 0; x < depthWidth; x++) { // Scan forwards until we find a non-0xff value in the body index data. int depthIndex = y * depthWidth + x; if (bodyIndexDataPointer[depthIndex] != 0xff) { int depthIndex2 = depthIndex; // We found the beginning of a horizontal run of player pixels. // Scan to the end. int runWidth; for (runWidth = 1; runWidth + x < depthWidth; runWidth++) { depthIndex2++; if (bodyIndexDataPointer[depthIndex2] == 0xff) { break; } } // Now splat from (x, y) to (x + runWidth, y) float depthMappedToColorLeftX = depthMappedToColorPointsPointer[depthIndex].X; float depthMappedToColorLeftY = depthMappedToColorPointsPointer[depthIndex].Y; float depthMappedToColorRightX = depthMappedToColorPointsPointer[depthIndex2 - 1].X; float depthMappedToColorRightY = depthMappedToColorPointsPointer[depthIndex2 - 1].Y; // Now copy color pixels along that rectangle. const int splatHMargin = 2; // X margin of splat rectangle in color pixels const int splatVMargin = 3; // Y margin of splat rectangle in color pixels int minX = (int)Math.Min(depthMappedToColorLeftX, depthMappedToColorRightX) - splatHMargin; int minY = (int)Math.Min(depthMappedToColorLeftY, depthMappedToColorRightY) - splatVMargin; int maxX = (int)Math.Max(depthMappedToColorLeftX, depthMappedToColorRightX) + splatHMargin; int maxY = (int)Math.Max(depthMappedToColorLeftY, depthMappedToColorRightY) + splatVMargin; // Some edge of screen situations can result in color space points that are negative or otherwise // actually outside the color space coordinate range. Clamp(ref minX, colorWidth - 1); Clamp(ref minY, colorHeight - 1); Clamp(ref maxX, colorWidth - 1); Clamp(ref maxY, colorHeight - 1); for (int colorY = minY; colorY < maxY; colorY++) { int colorIndex = colorY * colorWidth + minX; for (int colorX = minX; colorX < maxX; colorX++) { bitmapPixelsPointer[colorIndex] = sourcePixelsPointer[colorIndex]; colorIndex++; } } x += runWidth; } } } } } } } } // Done with bodyIndexFrame bodyIndexFrame.Dispose(); bodyIndexFrame = null; } m_colorScanTimer.Update(m_stopwatch.ElapsedMilliseconds); m_stopwatch.Restart(); m_displayTexture.SetData(m_displayPixels); m_textureSetDataTimer.Update(m_stopwatch.ElapsedMilliseconds); m_stopwatch.Restart(); Spam.TopLine1 = string.Format("depth map: {0} msec; color copy: {1} msec; color scan: {2} msec; texture set: {3} msec", m_depthMapTimer.Average, m_colorCopyTimer.Average, m_colorScanTimer.Average, m_textureSetDataTimer.Average); }
/// <summary> /// Handles the depth/color/body index frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { double fps = 0; TimeSpan elapsedSpan = new TimeSpan(DateTime.Now.Ticks - this.timestamp.Ticks); if (elapsedSpan.Milliseconds < (1000f / MAX_FPS)) { return; } fps = 1000f / elapsedSpan.Milliseconds; this.timestamp = DateTime.Now; DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; bool isBitmapLocked = false; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } // We use a try/finally to ensure that we clean up before we exit the function. // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer. try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } FrameDescription colorFrameDescription = colorFrame.FrameDescription; // Process Depth FrameDescription depthFrameDescription = depthFrame.FrameDescription; // Access the depth frame data directly via LockImageBuffer to avoid making a copy using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { this.coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.colorMappedToDepthPoints); this.coordinateMapper.MapDepthFrameToColorSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.depthMappedToColorPoints); } ushort[] depthData = new ushort[depthWidth * depthHeight]; depthFrame.CopyFrameDataToArray(depthData); // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; // Process Color // Lock the bitmap for writing this.bitmap.Lock(); isBitmapLocked = true; colorFrame.CopyConvertedFrameDataToIntPtr(this.bitmap.BackBuffer, this.bitmapBackBufferSize, ColorImageFormat.Bgra); // We're done with the ColorFrame colorFrame.Dispose(); colorFrame = null; // We'll access the body index data directly to avoid a copy using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { byte *bodyIndexDataPointer = (byte *)bodyIndexData.UnderlyingBuffer; BodyRect[] bodys = new BodyRect[BODY_MAX_NUMBER]; for (byte i = 0; i < BODY_MAX_NUMBER; ++i) { bodys[i] = new BodyRect(int.MaxValue, 0, 0, int.MaxValue); } int colorMappedToDepthPointCount = this.colorMappedToDepthPoints.Length; // use canny algorithm to detect edge... Stopwatch watch = Stopwatch.StartNew(); byte[] result = EdgeDetection.Canny(bodyIndexData.UnderlyingBuffer, depthWidth, depthHeight); watch.Stop(); List <Location> points = new List <Location>(); points.Add(transformToLocation(440, 200, depthData)); points.Add(transformToLocation(420, 230, depthData)); points.Add(transformToLocation(470, 220, depthData)); points.Add(transformToLocation(460, 300, depthData)); points.Add(transformToLocation(400, 280, depthData)); points.Add(transformToLocation(370, 280, depthData)); Plane plane = PlaneDetection.calc(points); double angel = Math.Abs(Math.Atan(-1 / plane.k)); double distance = plane.b * Math.Sin(angel); string output = string.Format("fps: {0:F}, It takes {1:F} ms for Canny. k: {2:F}, b: {3:F}, angel: {4:F}, distance: {5:F}", fps, watch.ElapsedMilliseconds, plane.k, plane.b, angel * 180 / Math.PI, distance); processStatusUpdated(output); fixed(byte *cannyResult = &result[0]) fixed(DepthSpacePoint * colorMappedToDepthPointsPointer = this.colorMappedToDepthPoints) { // Treat the color data as 4-byte pixels uint *bitmapPixelsPointer = (uint *)this.bitmap.BackBuffer; // Loop over each row and column of the color image for (int y = 0; y < colorHeight; ++y) { for (int x = 0; x < colorWidth; ++x) { DepthSpacePoint depthPoint = getDepthPoint(x, y); float colorMappedToDepthX = depthPoint.X; float colorMappedToDepthY = depthPoint.Y; // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { // Make sure the depth pixel maps to a valid point in color space int depthX = (int)(colorMappedToDepthX + 0.5f); int depthY = (int)(colorMappedToDepthY + 0.5f); // If the point is not valid, there is no body index there. if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { int depthIndex = (depthY * depthWidth) + depthX; if (cannyResult[depthIndex] > 0) { bitmapPixelsPointer[y * colorWidth + x] = 0xffff0000; } // If we are tracking a body for the current pixel, ... int bodyIndex = bodyIndexDataPointer[depthIndex]; if (bodyIndex != 0xff) { // justify whether this point is true body uint count = 0; if (bodyIndex == bodyIndexDataPointer[(depthY - 1) * depthWidth + (depthX - 1)]) { count++; } if (bodyIndex == bodyIndexDataPointer[(depthY) * depthWidth + (depthX + 1)]) { count++; } if (bodyIndex == bodyIndexDataPointer[(depthY + 1) * depthWidth + (depthX)]) { count++; } if (count == 3) { bodys[bodyIndex].top = Math.Min(bodys[bodyIndex].top, y); bodys[bodyIndex].bottom = Math.Max(bodys[bodyIndex].bottom, y); bodys[bodyIndex].left = Math.Min(bodys[bodyIndex].left, x); bodys[bodyIndex].right = Math.Max(bodys[bodyIndex].right, x); } //for (uint j = 0; j < 3; ++j) // for (uint i = 0; i < 3; ++i) // if (bodyIndex == bodyIndexDataPointer[(depthY - 1 + j) * depthWidth + (depthX - 1 + i)]) // count++; //if (count > 7) { // bodys[bodyIndex].top = Math.Min(bodys[bodyIndex].top, y); // bodys[bodyIndex].bottom = Math.Max(bodys[bodyIndex].bottom, y); // bodys[bodyIndex].left = Math.Min(bodys[bodyIndex].left, x); // bodys[bodyIndex].right = Math.Max(bodys[bodyIndex].right, x); //} } continue; } } } } } for (int i = 0; i < points.Count; ++i) { ColorSpacePoint point = getColorPoint(points[i].depthFrameX, points[i].depthFrameY); if (point.X != Double.NegativeInfinity) { DrawPoint(this.bitmap, point); } } List <Location> locations = new List <Location>(); BodyRect initialBody = new BodyRect(int.MaxValue, 0, 0, int.MaxValue); for (uint i = 0; i < BODY_MAX_NUMBER; i++) { BodyRect body = bodys[i]; if (!body.Equals(initialBody)) { DrawRect(this.bitmap, body.top, body.right, body.bottom, body.left); // calculate the actual location of this body DepthSpacePoint point = getDepthPoint((body.right + body.left) / 2, (body.bottom + body.top) / 2); Location loc = transformToLocation(point, depthData, true); if (loc.depth > 0) { locations.Add(loc); } } } locationUpdated(locations.ToArray()); this.bitmap.AddDirtyRect(new Int32Rect(0, 0, this.bitmap.PixelWidth, this.bitmap.PixelHeight)); } } } finally { if (isBitmapLocked) { this.bitmap.Unlock(); } if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } }