/// <summary> /// Kinect が複数種類のフレームを取得したとき実行されるメソッド(イベントハンドラ)。 /// </summary> /// <param name="sender"> /// イベントを通知したオブジェクト。ここでは Kinect になる。 /// </param> /// <param name="e"> /// イベントの発生時に渡されるデータ。 /// </param> void MultiSourceFrameReader_MultiSourceFrameArrived (object sender, MultiSourceFrameArrivedEventArgs e) { MultiSourceFrame frames = this.multiSourceFrameReader.AcquireLatestFrame(); if (frames == null) { return; } DepthFrame depthFrame = frames.DepthFrameReference.AcquireFrame(); if (depthFrame == null) { return; } BodyIndexFrame bodyIndexFrame = frames.BodyIndexFrameReference.AcquireFrame(); if (bodyIndexFrame == null) { depthFrame.Dispose(); return; } this.canvas.Background = new ImageBrush (GetBitmapSource(depthFrame, bodyIndexFrame, this.depthFrameDescription, this.bodyIndexFrameDescription)); depthFrame.Dispose(); bodyIndexFrame.Dispose(); }
// ************************************************************************************************************************* bool RefreshFrame(MultiSourceFrame _frame) //updateframe to our array { if (_frame == null) { return(false); } try{ bodyframe = _frame.BodyFrameReference.AcquireFrame(); depthFrame = _frame.DepthFrameReference.AcquireFrame(); colorFrame = _frame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = _frame.BodyIndexFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null || bodyframe == null)) { return(false); } else { if (BodyData == null) { BodyData = new Body[_Sensor.BodyFrameSource.BodyCount]; } // update all the array and points bodyframe.GetAndRefreshBodyData(BodyData); colorFrame.CopyConvertedFrameDataToArray(colorData, ColorImageFormat.Rgba); depthFrame.CopyFrameDataToArray(depthData); bodyIndexFrame.CopyFrameDataToArray(bodyIndexData); // update all the array and points _frame = null; return(true); } } finally { if (bodyframe != null) { bodyframe.Dispose(); } if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } }
/// <summary> /// Handles the depth/color/body index frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; BodyFrame bodyFrame = null; _isBitmapLocked = false; var multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } // We use a try/finally to ensure that we clean up before we exit the function. // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer. try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null) || bodyFrame == null) { return; } ProcessBackgroundOld(depthFrame, colorFrame, bodyIndexFrame); ProcessBody(bodyFrame, false); } finally { if (_isBitmapLocked) { _bitmap.Unlock(); } depthFrame?.Dispose(); colorFrame?.Dispose(); bodyIndexFrame?.Dispose(); bodyFrame?.Dispose(); } }
/// <summary> /// Store body index image /// </summary> /// <param name="bodyIndexFrame">body index frame to be stored</param> /// <param name="frameNumber">frame number</param> public static void Handle_BodyIndexFrame(BodyIndexFrame bodyIndexFrame, String frameNumber) { using (Microsoft.Kinect.KinectBuffer bodyIndexBuffer = bodyIndexFrame.LockImageBuffer()) { BitmapSource bitmapSource = BitmapSource.Create(bodyIndexWidth, bodyIndexHeight, 96.0, 96.0, PixelFormats.Gray8, null, bodyIndexBuffer.UnderlyingBuffer, (int)bodyIndexBuffer.Size, bodyIndexWidth * 1); String bodyIndexPath = FramesAndPaths.GetImageFilePath(FramesAndPaths.FileType.BodyIndexImage, frameNumber); bitmapSource.Save(bodyIndexPath + ".jpg", ImageFormat.Jpeg); } // Release bodyIndexFrame bodyIndexFrame.Dispose(); }
//frame取得時のイベント public void BodyIndexFrame_Arrived(object sender, BodyIndexFrameArrivedEventArgs e) #region { BodyIndexFrame bodyIndexFrame = e.FrameReference.AcquireFrame(); if (bodyIndexFrame == null) { return; } bodyIndexFrame.CopyFrameDataToArray(bodyIndexBuffer); //人がいないところ0xff いるところ0-6? this.KinectImagetoMat(this.kinectImage, this.bodyIndexBuffer); //this._showImageEvent(); bodyIndexFrame.Dispose(); }
private void DepthFrameReady(object sender, BodyIndexFrameArrivedEventArgs e) { BodyIndexFrame frame = e.FrameReference.AcquireFrame(); if (frame != null) { this.FInvalidate = true; this.frameindex = frame.RelativeTime.Ticks; lock (m_lock) { frame.CopyFrameDataToArray(this.rawdepth); } frame.Dispose(); } }
/// <summary> /// Kinect が複数種類のフレームを取得したとき実行されるメソッド(イベントハンドラ)。 /// </summary> /// <param name="sender"> /// イベントを通知したオブジェクト。ここでは Kinect になる。 /// </param> /// <param name="e"> /// イベントの発生時に渡されるデータ。 /// </param> void MultiSourceFrameReader_MultiSourceFrameArrived (object sender, MultiSourceFrameArrivedEventArgs e) { MultiSourceFrame frames = this.multiSourceFrameReader.AcquireLatestFrame(); if (frames == null) { return; } ColorFrame colorFrame = frames.ColorFrameReference.AcquireFrame(); if (colorFrame == null) { return; } DepthFrame depthFrame = frames.DepthFrameReference.AcquireFrame(); if (depthFrame == null) { colorFrame.Dispose(); return; } BodyIndexFrame bodyIndexFrame = frames.BodyIndexFrameReference.AcquireFrame(); if (bodyIndexFrame == null) { colorFrame.Dispose(); depthFrame.Dispose(); return; } this.colorCanvas.Background = new ImageBrush(GetColorImage(colorFrame)); this.userMaskCanvas.Background = new ImageBrush (GetUserMaskImage(colorFrame, depthFrame, bodyIndexFrame)); colorFrame.Dispose(); depthFrame.Dispose(); bodyIndexFrame.Dispose(); }
private void ProcessBodyIndexFrame(BodyIndexFrame bodyIndexFrame) { if (bodyIndexFrame != null) { byte[] outBuffer = _displayableBuffers[SourceType.BODY_INDEX]; bodyIndexFrame.CopyFrameDataToArray(_rawBodyIndexPixels); bodyIndexFrame.Dispose(); int outIndex = 0; for (int inIndex = 0; inIndex < _rawBodyIndexPixels.Length; ++inIndex) { int bodyIndex = _rawBodyIndexPixels[inIndex]; UInt32 thisColor = 0; _bodyIndexToColorMap.TryGetValue(bodyIndex, out thisColor); outBuffer[outIndex++] = (byte)((thisColor & 0xFF000000) >> 32); // Blue channel outBuffer[outIndex++] = (byte)((thisColor & 0x00FF0000) >> 16); // Green channel outBuffer[outIndex++] = (byte)((thisColor & 0x0000FF00) >> 8); // Red channel outBuffer[outIndex++] = (byte)((thisColor & 0x000000FF)); // Alpha channel } } }
private void DepthFrameReady(object sender, BodyIndexFrameArrivedEventArgs e) { BodyIndexFrame frame = e.FrameReference.AcquireFrame(); int bg = this.backcolor; if (frame != null) { this.FInvalidate = true; this.frameindex = frame.RelativeTime.Ticks; lock (m_lock) { frame.CopyFrameDataToArray(this.rawdepth); for (int i16 = 0; i16 < 512 * 424; i16++) { byte player = rawdepth[i16]; this.playerimage[i16] = player == 255 ? bg : this.colors[player % 6]; } } frame.Dispose(); } }
public void FreeMultiSourceFrame(KinectInterop.SensorData sensorData) { // release all frames if (msBodyFrame != null) { msBodyFrame.Dispose(); msBodyFrame = null; } if (msBodyIndexFrame != null) { msBodyIndexFrame.Dispose(); msBodyIndexFrame = null; } if (msColorFrame != null) { msColorFrame.Dispose(); msColorFrame = null; } if (msDepthFrame != null) { msDepthFrame.Dispose(); msDepthFrame = null; } if (msInfraredFrame != null) { msInfraredFrame.Dispose(); msInfraredFrame = null; } if (multiSourceFrame != null) { multiSourceFrame = null; } }
/// <summary> /// Handles the depth/color/body index frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { int depthWidth = 0; int depthHeight = 0; DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; bool isBitmapLocked = false; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } // We use a try/finally to ensure that we clean up before we exit the function. // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer. try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } // Process Depth FrameDescription depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; if (!showAverage) { // Access the depth frame data directly via LockImageBuffer to avoid making a copy using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { //Map depth data to color space this.coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.colorMappedToDepthPoints); } } // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; // Process Color if (!showAverage) { // Lock the bitmap for writing this.bitmap.Lock(); isBitmapLocked = true; colorFrame.CopyConvertedFrameDataToIntPtr(this.bitmap.BackBuffer, this.bitmapBackBufferSize, ColorImageFormat.Bgra); // We're done with the ColorFrame colorFrame.Dispose(); colorFrame = null; } // We'll access the body index data directly to avoid a copy using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { byte *bodyIndexDataPointer = (byte *)bodyIndexData.UnderlyingBuffer; currentFrame++; if (currentFrame % 10 == 0) { bool bodyFound = false; //Story body data in a temporary array, where 1 corresponds to a location with body in it and 0 corresponds to a location without a body in it for (int i = 0; i < (int)bodyIndexData.Size; ++i) { bool isBodyPixel = bodyIndexDataPointer[i] != 0xff; if (isBodyPixel) { bodyFound = true; tempPixels[i] = 1; } else { tempPixels[i] = 0; } } if (bodyFound) { //We need to compute non normalized body intensities first - per pixel average over the entire lifespan of the program float greatestValue = 0; for (int i = 0; i < (int)bodyIndexData.Size; ++i) { nonNormalizeAveragePixels[i] = (nonNormalizeAveragePixels[i] * (currentFrame - 1) + tempPixels[i]) / currentFrame; if (greatestValue < nonNormalizeAveragePixels[i]) { greatestValue = nonNormalizeAveragePixels[i]; } } //Now we will create a normalized (0-255) image for (int i = 0; i < (int)bodyIndexData.Size; ++i) { averagePixels[i] = (uint)(nonNormalizeAveragePixels[i] / greatestValue * 0xFF); } if (showAverage) { this.humanAverageBitmap.WritePixels( new Int32Rect(0, 0, this.humanAverageBitmap.PixelWidth, this.humanAverageBitmap.PixelHeight), this.averagePixels, this.humanAverageBitmap.PixelWidth * (int)bytesPerPixel, 0); } } } if (showAverage) { return; } int colorMappedToDepthPointCount = this.colorMappedToDepthPoints.Length; fixed(DepthSpacePoint *colorMappedToDepthPointsPointer = this.colorMappedToDepthPoints) { // Treat the color data as 4-byte pixels uint *bitmapPixelsPointer = (uint *)this.bitmap.BackBuffer; // Loop over each row and column of the color image // Zero out any pixels that don't correspond to a body index for (int colorIndex = 0; colorIndex < colorMappedToDepthPointCount; ++colorIndex) { float colorMappedToDepthX = colorMappedToDepthPointsPointer[colorIndex].X; float colorMappedToDepthY = colorMappedToDepthPointsPointer[colorIndex].Y; // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { // Make sure the depth pixel maps to a valid point in color space int depthX = (int)(colorMappedToDepthX + 0.5f); int depthY = (int)(colorMappedToDepthY + 0.5f); // If the point is not valid, there is no body index there. if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { int depthIndex = (depthY * depthWidth) + depthX; bitmapPixelsPointer[colorIndex] = bitmapPixelsPointer[colorIndex] | averagePixels[depthIndex]; } } } } this.bitmap.AddDirtyRect(new Int32Rect(0, 0, this.bitmap.PixelWidth, this.bitmap.PixelHeight)); } } } finally { if (isBitmapLocked) { this.bitmap.Unlock(); } if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } }
/// <summary> /// Handles the depth/color/body index frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { if (this.runningCalibration) { //Console.WriteLine("runningCalibration"); if (this.depthFrameCaptured) { //Console.WriteLine("depthFrameCaptured"); if (this.depthFrameFiltered) { //Console.WriteLine("this.depthFrameFiltered"); if (this.depthScreenshotTaken) { //Console.WriteLine("this.depthScreenshotTaken"); if (this.horizontalProjectingDone && this.verticalProjectingDone) { this.StatusText = "this.horizontalProjectingDone && this.verticalProjectingDone"; if (this.calculatingStarted) { this.StatusText = "calculatingFinished"; } else { this.StatusText = "calculating"; this.startCalculating(); } } else { //this.StatusText = "projecting Patterns"; this.writeImageToFirstImageBuffer = true; } //} /*else { * this.StatusText = "projecting Patterns"; * this.writeImageToFirstImageBuffer = true; * }*/ } else { this.StatusText = "making Screenshot of DepthMap"; this.makeScreenshotOfDepthMap(); } } else { this.StatusText = "filtering Depth"; this.filterDepth2(5); } } } int depthWidth = 0; int depthHeight = 0; DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; bool isBitmapLocked = false; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } // We use a try/finally to ensure that we clean up before we exit the function. // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer. try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } // Process Depth FrameDescription depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; // Access the depth frame data directly via LockImageBuffer to avoid making a copy using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { this.coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.colorMappedToDepthPoints); if (this.captureDepthFrame) { unsafe { depthFrame.CopyFrameDataToArray(this.globalDepth); //for (int i = 2000; i < 2020;i++ ) //{ this.minDepthRange = depthFrame.DepthMinReliableDistance; this.maxDepthRange = depthFrame.DepthMaxReliableDistance; //Console.WriteLine("DEPTHFRAME: {0}", frameData[512*424-1]); //} } this.captureDepthFrame = false; this.depthFrameCaptured = true; } } // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; // Process Color // Lock the bitmap for writing this.bitmap.Lock(); isBitmapLocked = true; colorFrame.CopyConvertedFrameDataToIntPtr(this.bitmap.BackBuffer, this.bitmapBackBufferSize, ColorImageFormat.Bgra); if (this.projectingStarted && !this.verticalProjectingDone) { Boolean newPattern = false; bool timeToTakeScreenshot = false; if (this.frameCounter == 10) { timeToTakeScreenshot = true; } if (this.frameCounter >= 15) { newPattern = true; this.projectionImageCounter++; this.frameCounter = 0; } else { this.frameCounter++; } if (timeToTakeScreenshot) { //int size = this.verticalImages.Count(); this.bitmap.Unlock(); this.screenshot(this.projectionImageCounter, "horizontal"); this.bitmap.Lock(); //this.verticalImages[size] timeToTakeScreenshot = false; } if (newPattern) { ProjectablePattern pp = new ProjectablePattern(this.projectorWidth, this.projectorHeight, true, this.projectionImageCounter, this.myProjector); bool success = pp.projectPattern(); if (!success) { this.frameCounter = 0; this.projectionImageCounter = 0; this.verticalProjectingDone = true; } else { this.horizontalImages.Add(pp); } } } if (this.projectingStarted && this.verticalProjectingDone && !this.horizontalProjectingDone) { Boolean newPattern = false; bool timeToTakeScreenshot = false; if (this.frameCounter == 10) { timeToTakeScreenshot = true; } if (this.frameCounter >= 15) { newPattern = true; this.projectionImageCounter++; this.frameCounter = 0; } else { this.frameCounter++; } if (timeToTakeScreenshot) { //int size = this.verticalImages.Count(); this.bitmap.Unlock(); this.screenshot(this.projectionImageCounter, "vertical"); this.bitmap.Lock(); //this.verticalImages[size] timeToTakeScreenshot = false; } if (newPattern) { ProjectablePattern pp = new ProjectablePattern(this.projectorWidth, this.projectorHeight, false, this.projectionImageCounter, this.myProjector); bool success = pp.projectPattern(); if (!success) { this.horizontalProjectingDone = true; this.projectButton.IsEnabled = false; } else { this.verticalImages.Add(pp); } } } if (this.writeImageToFirstImageBuffer) { colorFrame.CopyConvertedFrameDataToIntPtr(this.firstImage.BackBuffer, this.bitmapBackBufferSize, ColorImageFormat.Bgra); this.writeImageToFirstImageBuffer = false; this.projectingStarted = true; } // We're done with the ColorFrame colorFrame.Dispose(); colorFrame = null; // We'll access the body index data directly to avoid a copy using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { byte *bodyIndexDataPointer = (byte *)bodyIndexData.UnderlyingBuffer; int colorMappedToDepthPointCount = this.colorMappedToDepthPoints.Length; fixed(DepthSpacePoint *colorMappedToDepthPointsPointer = this.colorMappedToDepthPoints) { // Treat the color data as 4-byte pixels uint *bitmapPixelsPointer = (uint *)this.bitmap.BackBuffer; uint *firstImagePixelsPointer = (uint *)this.firstImage.BackBuffer; // Loop over each row and column of the color image // Zero out any pixels that don't correspond to a body index for (int colorIndex = 0; colorIndex < colorMappedToDepthPointCount; ++colorIndex) { float colorMappedToDepthX = colorMappedToDepthPointsPointer[colorIndex].X; float colorMappedToDepthY = colorMappedToDepthPointsPointer[colorIndex].Y; if (colorIndex == 600) { // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { /*Console.WriteLine("colorMappedToDepthX: {0}, colorMappedToDepthY {1} ,colorIndex: {2}", colorMappedToDepthX, colorMappedToDepthY, colorIndex);*/ // Make sure the depth pixel maps to a valid point in color space int depthX = (int)(colorMappedToDepthX + 0.5f); int depthY = (int)(colorMappedToDepthY + 0.5f); // If the point is not valid, there is no body index there. if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { int depthIndex = (depthY * depthWidth) + depthX; // If we are tracking a body for the current pixel, do not zero out the pixel if (bodyIndexDataPointer[depthIndex] != 0xff) { continue; } } } }//(colorIndex<200) //bitmapPixelsPointer[colorIndex] = 0; } } this.bitmap.AddDirtyRect(new Int32Rect(0, 0, this.bitmap.PixelWidth, this.bitmap.PixelHeight)); } } } finally { if (isBitmapLocked) { this.bitmap.Unlock(); } if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } }
public void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { int depthWidth = 0; int depthHeight = 0; DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; bool isBitmapLocked = false; //bool multiSourceFrameProcessed = false; //bool colorFrameProcessed = false; //bool depthFrameProcessed = false; //bool bodyIndexFrameProcessed = false; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } // Process Depth FrameDescription depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; // Access the depth frame data directly via LockImageBuffer to avoid making a copy using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { this.coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.colorMappedToDepthPoints); } // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; // Process Color // Lock the bitmap for writing this.bitmapBody.Lock(); isBitmapLocked = true; colorFrame.CopyConvertedFrameDataToIntPtr(this.bitmapBody.BackBuffer, this.bitmapBackBufferSize, ColorImageFormat.Bgra); // We're done with the ColorFrame colorFrame.Dispose(); colorFrame = null; // We'll access the body index data directly to avoid a copy using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { byte *bodyIndexDataPointer = (byte *)bodyIndexData.UnderlyingBuffer; int colorMappedToDepthPointCount = this.colorMappedToDepthPoints.Length; fixed(DepthSpacePoint *colorMappedToDepthPointsPointer = this.colorMappedToDepthPoints) { // Treat the color data as 4-byte pixels uint *bitmapPixelsPointer = (uint *)this.bitmapBody.BackBuffer; // Loop over each row and column of the color image // Zero out any pixels that don't correspond to a body index for (int colorIndex = 0; colorIndex < colorMappedToDepthPointCount; ++colorIndex) { float colorMappedToDepthX = colorMappedToDepthPointsPointer[colorIndex].X; float colorMappedToDepthY = colorMappedToDepthPointsPointer[colorIndex].Y; // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { // Make sure the depth pixel maps to a valid point in color space int depthX = (int)(colorMappedToDepthX + 0.5f); int depthY = (int)(colorMappedToDepthY + 0.5f); // If the point is not valid, there is no body index there. if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { int depthIndex = (depthY * depthWidth) + depthX; // If we are tracking a body for the current pixel, do not zero out the pixel if (bodyIndexDataPointer[depthIndex] != 0xff) { continue; } } } bitmapPixelsPointer[colorIndex] = 0; } } this.bitmapBody.AddDirtyRect(new Int32Rect(0, 0, this.bitmapBody.PixelWidth, this.bitmapBody.PixelHeight)); } } } finally { if (isBitmapLocked) { this.bitmapBody.Unlock(); } if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } kinectImage.Source = bitmapBody; // kinectImage.Source = bitmapBody; //if (multiSourceFrame != null) //{ // // Frame Acquisition should always occur first when using multiSourceFrameReader // using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame()) // { // if (depthFrame != null) // { // FrameDescription depthFrameDescription = depthFrame.FrameDescription; // depthWidth = depthFrameDescription.Width; // depthHeight = depthFrameDescription.Height; // if ((depthWidth * depthHeight) == this.depthFrameData.Length) // { // depthFrame.CopyFrameDataToArray(this.depthFrameData); // depthFrameProcessed = true; // } // } // } // using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame()) // { // if (colorFrame != null) // { // FrameDescription colorFrameDescription = colorFrame.FrameDescription; // if ((colorFrameDescription.Width * colorFrameDescription.Height * this.bytesPerPixel) == this.colorFrameData.Length) // { // if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra) // { // colorFrame.CopyRawFrameDataToArray(this.colorFrameData); // } // else // { // colorFrame.CopyConvertedFrameDataToArray(this.colorFrameData, ColorImageFormat.Bgra); // } // colorFrameProcessed = true; // } // } // } // using (BodyIndexFrame bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame()) // { // if (bodyIndexFrame != null) // { // FrameDescription bodyIndexFrameDescription = bodyIndexFrame.FrameDescription; // if ((bodyIndexFrameDescription.Width * bodyIndexFrameDescription.Height) == this.bodyIndexFrameData.Length) // { // bodyIndexFrame.CopyFrameDataToArray(this.bodyIndexFrameData); // bodyIndexFrameProcessed = true; // } // } // multiSourceFrameProcessed = true; // } //} //// we got all frames //if (multiSourceFrameProcessed && depthFrameProcessed && colorFrameProcessed && bodyIndexFrameProcessed) //{ // this.coordinateMapper.MapColorFrameToDepthSpace(this.depthFrameData, this.depthPoints); // Array.Clear(this.displayPixels, 0, this.displayPixels.Length); // // loop over each row and column of the depth // for (int colorIndex = 0; colorIndex < this.depthPoints.Length; ++colorIndex) // { // DepthSpacePoint depthPoint = this.depthPoints[colorIndex]; // if (!float.IsNegativeInfinity(depthPoint.X) && !float.IsNegativeInfinity(depthPoint.Y)) // { // // make sure the depth pixel maps to a valid point in color space // int depthX = (int)(depthPoint.X + 0.5f); // int depthY = (int)(depthPoint.Y + 0.5f); // if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) // { // int depthIndex = (depthY * depthWidth) + depthX; // byte player = this.bodyIndexFrameData[depthIndex]; // // if we're tracking a player for the current pixel, sets its color and alpha to full // if (player != 0xff) // { // // set source for copy to the color pixel // int sourceIndex = colorIndex * this.bytesPerPixel; // // write out blue byte // this.displayPixels[sourceIndex] = this.colorFrameData[sourceIndex++]; // // write out green byte // this.displayPixels[sourceIndex] = this.colorFrameData[sourceIndex++]; // // write out red byte // this.displayPixels[sourceIndex] = this.colorFrameData[sourceIndex++]; // // write out alpha byte // this.displayPixels[sourceIndex] = 0xff; // } // } // } // } // this.RenderColorPixels(); //} }
private void ProcessMultiFrame(MultiSourceFrameArrivedEventArgs e) { long ticksAcqTotal = DateTime.Now.Ticks; // frames DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; BodyFrame bodyFrame = null; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // ps3eye byte[] psBytes0 = null; byte[] psBytes1 = null; // if the frame has expired by the time we process this event, return (this actually never happens) if (multiSourceFrame == null) { return; } try { // get kinect frames depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame(); // optionally get ps3eye frames if (isPs3EyeRunning) { psBytes0 = frames[0]; psBytes1 = frames[1]; } // if any frame has expired by the time we process this event, return (dispose others in finally) // psBytes0 and psBytes1 may be null in the beginning (when ps3eye starts longer than kinect) if (colorFrame == null || depthFrame == null || bodyIndexFrame == null || bodyFrame == null || (isPs3EyeRunning && (psBytes0 == null || psBytes1 == null))) { Utils.UpdateCounter("Expired"); Utils.IncrementTotalLost(); return; } else { Utils.UpdateCounter("Expired", false); } // performance Utils.UpdateTimer("Acquire", ticksAcqTotal); Utils.UpdateCounter("Acquired"); // process ProcessFrames(colorFrame, depthFrame, bodyIndexFrame, bodyFrame, psBytes0, psBytes1); } catch { } finally { if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } if (bodyFrame != null) { bodyFrame.Dispose(); } } }
private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { var reference = e.FrameReference; MultiSourceFrame multiSourceFrame = null; ColorFrame colorFrame = null; DepthFrame depthFrame = null; BodyFrame bodyFrame = null; BodyIndexFrame bodyIndexFrame = null; try { using (_frameCounter.Increment()) { multiSourceFrame = reference.AcquireFrame(); if (multiSourceFrame == null) { return; } colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); if (colorFrame == null | depthFrame == null | bodyFrame == null | bodyIndexFrame == null) { return; } var colorDesc = colorFrame.FrameDescription; int colorWidth = colorDesc.Width; int colorHeight = colorDesc.Height; if (_colorFrameData == null) { int size = colorDesc.Width * colorDesc.Height; _colorFrameData = new byte[size * bytesPerPixel]; _displayFrame = new byte[size * bytesPerPixel]; } var depthDesc = depthFrame.FrameDescription; uint depthSize = depthDesc.LengthInPixels; _depthFrameData = new ushort[depthSize]; _colorSpacePoints = new ColorSpacePoint[depthSize]; FrameDescription bodyIndexFrameDescription = bodyIndexFrame.FrameDescription; int bodyIndexWidth = bodyIndexFrameDescription.Width; int bodyIndexHeight = bodyIndexFrameDescription.Height; if ((bodyIndexWidth * bodyIndexHeight) == bodyIndexFrameData.Length) { bodyIndexFrame.CopyFrameDataToArray(bodyIndexFrameData); } Array.Clear(_displayFrame, 0, _displayFrame.Length); colorFrame.CopyConvertedFrameDataToArray(_colorFrameData, ColorImageFormat.Bgra); depthFrame.CopyFrameDataToArray(_depthFrameData); kinectSensor.CoordinateMapper.MapDepthFrameToColorSpace(_depthFrameData, _colorSpacePoints); kinectSensor.CoordinateMapper.MapDepthFrameToCameraSpace(_depthFrameData, _cameraPoints); for (int depthIndex = 0; depthIndex < _depthFrameData.Length; ++depthIndex) { byte player = bodyIndexFrameData[depthIndex]; bool?c = OnlyPlayersMenuItem.IsChecked; bool val = c != null ? (bool)c : false; if (!val || player != 0xff) { ColorSpacePoint point = _colorSpacePoints[depthIndex]; CameraSpacePoint p = this._cameraPoints[depthIndex]; int colorX = (int)Math.Floor(point.X + 0.5); int colorY = (int)Math.Floor(point.Y + 0.5); int colorImageIndex = ((colorWidth * colorY) + colorX) * bytesPerPixel; if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight)) { if (p.Z > 0) { _displayFrame[colorImageIndex] = _colorFrameData[colorImageIndex]; // b _displayFrame[colorImageIndex + 1] = _colorFrameData[colorImageIndex + 1]; // g _displayFrame[colorImageIndex + 2] = _colorFrameData[colorImageIndex + 2]; // r _displayFrame[colorImageIndex + 3] = _colorFrameData[colorImageIndex + 3]; // a } } } } colorBitmap.WritePixels( new Int32Rect(0, 0, colorDesc.Width, colorDesc.Height), _displayFrame, //_colorFrameData, colorDesc.Width * bytesPerPixel, 0); if (calibratingSurface) { if (_pointsToDepth.Count > 0) { foreach (Point p in _pointsToDepth) { int depthIndex = Convert.ToInt32(p.Y) * depthDesc.Width + Convert.ToInt32(p.X); try { CameraSpacePoint cameraPoint = _cameraPoints[depthIndex]; if (!(Double.IsInfinity(cameraPoint.X)) && !(Double.IsInfinity(cameraPoint.Y)) && !(Double.IsInfinity(cameraPoint.Z) && cameraPoint.Z > 0)) { Console.WriteLine("" + p.X + " " + p.Y + " ---> " + cameraPoint.X + " " + cameraPoint.Y + " " + cameraPoint.Z); _calibrationPoints.Add(cameraPoint); drawEllipse(p.X, p.Y); } } catch { } } _pointsToDepth = new List <Point>(); } if (false && _calibrationPoints.Count == 3) { canvas.Children.Clear(); CameraSpacePoint a = VectorTools.subPoint(_calibrationPoints[0], _calibrationPoints[1]); CameraSpacePoint b = VectorTools.subPoint(_calibrationPoints[2], _calibrationPoints[1]); CameraSpacePoint up = VectorTools.cross(a, b); CameraSpacePoint c1 = VectorTools.cross(b, up); CameraSpacePoint c2 = VectorTools.mult(c1, -1f); CameraSpacePoint c; if (VectorTools.distance(_calibrationPoints[2], VectorTools.addPoint(_calibrationPoints[1], c1)) < VectorTools.distance(_calibrationPoints[2], VectorTools.addPoint(_calibrationPoints[1], c2))) { c = VectorTools.mult(VectorTools.normalize(c1), 9.0f / 16.0f * VectorTools.norm(a) /*norm(b)*/); } else { c = VectorTools.mult(VectorTools.normalize(c2), 9.0f / 16.0f * VectorTools.norm(a) /*norm(b)*/); } CameraSpacePoint BL = _calibrationPoints[0]; CameraSpacePoint BR = _calibrationPoints[1]; CameraSpacePoint TR = VectorTools.addPoint(BR, c); CameraSpacePoint TL = VectorTools.addPoint(BL, c); VectorTools.DebugPoint(BL); VectorTools.DebugPoint(BR); VectorTools.DebugPoint(TR); VectorTools.DebugPoint(TL); //_drawSurface(coordinateMapper.MapCameraPointToColorSpace(BL), // coordinateMapper.MapCameraPointToColorSpace(BR), // coordinateMapper.MapCameraPointToColorSpace(TR), // coordinateMapper.MapCameraPointToColorSpace(TL)); _calibrationPoints.Clear(); calibratingSurface = false; } } } } catch (Exception ex) { Console.WriteLine(ex.Message); Console.WriteLine(ex.StackTrace); Console.WriteLine(ex.Source); } finally { if (colorFrame != null) { colorFrame.Dispose(); } if (depthFrame != null) { depthFrame.Dispose(); } if (bodyFrame != null) { bodyFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } }
public bool GetMultiSourceFrame(KinectInterop.SensorData sensorData) { if (multiSourceFrameReader != null) { multiSourceFrame = multiSourceFrameReader.AcquireLatestFrame(); if (multiSourceFrame != null) { // try to get all frames at once msBodyFrame = (sensorFlags & KinectInterop.FrameSource.TypeBody) != 0 ? multiSourceFrame.BodyFrameReference.AcquireFrame() : null; msBodyIndexFrame = (sensorFlags & KinectInterop.FrameSource.TypeBodyIndex) != 0 ? multiSourceFrame.BodyIndexFrameReference.AcquireFrame() : null; bool bAllSet = ((sensorFlags & KinectInterop.FrameSource.TypeBody) == 0 || msBodyFrame != null) && ((sensorFlags & KinectInterop.FrameSource.TypeBodyIndex) == 0 || msBodyIndexFrame != null); if (!bAllSet) { // release all frames if (msBodyFrame != null) { msBodyFrame.Dispose(); msBodyFrame = null; } if (msBodyIndexFrame != null) { msBodyIndexFrame.Dispose(); msBodyIndexFrame = null; } if (msColorFrame != null) { msColorFrame.Dispose(); msColorFrame = null; } if (msDepthFrame != null) { msDepthFrame.Dispose(); msDepthFrame = null; } if (msInfraredFrame != null) { msInfraredFrame.Dispose(); msInfraredFrame = null; } } // else // { // bool bNeedBody = (sensorFlags & KinectInterop.FrameSource.TypeBody) != 0; // bool bNeedBodyIndex = (sensorFlags & KinectInterop.FrameSource.TypeBodyIndex) != 0; // bool bNeedColor = (sensorFlags & KinectInterop.FrameSource.TypeColor) != 0; // bool bNeedDepth = (sensorFlags & KinectInterop.FrameSource.TypeDepth) != 0; // bool bNeedInfrared = (sensorFlags & KinectInterop.FrameSource.TypeInfrared) != 0; // // bAllSet = true; // } } return(multiSourceFrame != null); } return(false); }
private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { // All frame counter this.frameCount++; if (this.frameCount % this.framesToCapture != 0) { return; } ColorFrame colorFrame = null; DepthFrame depthFrame = null; BodyFrame bodyFrame = null; BodyIndexFrame bodyIndexFrame = null; Body body = null; SkeletonOfBody skel_up = new SkeletonOfBody(Constants.SKEL_UP_TOTAL_JOINTS); try { var frameReference = e.FrameReference.AcquireFrame(); colorFrame = frameReference.ColorFrameReference.AcquireFrame(); depthFrame = frameReference.DepthFrameReference.AcquireFrame(); bodyFrame = frameReference.BodyFrameReference.AcquireFrame(); bodyIndexFrame = frameReference.BodyIndexFrameReference.AcquireFrame(); if (colorFrame == null || depthFrame == null || bodyFrame == null || bodyIndexFrame == null) { return; } //-------------------------------------------- // Get the color frame //-------------------------------------------- using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer()) { colorFrame.CopyConvertedFrameDataToArray(this.buffColor32, ColorImageFormat.Bgra); } //End ColorFrame //-------------------------------------------- // Get the depth frame //-------------------------------------------- using (KinectBuffer depthBuffer = depthFrame.LockImageBuffer()) { depthFrame.CopyFrameDataToArray(this.buffDepth16); //depthFrame.CopyFrameDataToArray(this.buffDepth16Copy); // Multiplication by 20 only to turn the depth visually more perceptible //int i = 0; //Array.ForEach(this.buffDepth16Copy, (x) => { this.buffDepth16Copy[i++] = (ushort)(x * 20); }); } //End DepthFrame //-------------------------------------------- // Get the body index frame //-------------------------------------------- using (KinectBuffer bodyIndexBuffer = bodyIndexFrame.LockImageBuffer()) { bodyIndexFrame.CopyFrameDataToArray(this.buffBodyIndex8); } //-------------------------------------------- // Get the body frame //-------------------------------------------- bodyFrame.GetAndRefreshBodyData(this.listBodies); //bodyFrame.FloorClipPlane. //-------------------------------------------- // Map the depth frame to it color frame //-------------------------------------------- { Array.Clear(this.buffColorSpacePoints, 0, this.buffColorSpacePoints.Length); Array.Clear(this.buffMapDepthToColor32, 0, this.buffMapDepthToColor32.Length); // Coordinate mapping this.coordinateMapper.MapDepthFrameToColorSpace(this.buffDepth16, this.buffColorSpacePoints); unsafe { fixed(ColorSpacePoint *depthMappedToColorPointsPointer = buffColorSpacePoints) { // Loop over each row and column of the color image // Zero out any pixels that don't correspond to a body index for (int idxDepth = 0; idxDepth < buffColorSpacePoints.Length; ++idxDepth) { float depthMappedToColorX = depthMappedToColorPointsPointer[idxDepth].X; float depthMappedToColorY = depthMappedToColorPointsPointer[idxDepth].Y; // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(depthMappedToColorX) && !float.IsNegativeInfinity(depthMappedToColorY)) { // Make sure the depth pixel maps to a valid point in color space int colorX = (int)(depthMappedToColorX + 0.5f); int colorY = (int)(depthMappedToColorY + 0.5f); // If the point is not valid, there is no body index there. if ((colorX >= 0) && (colorX < this.colorImageSize.Width) && (colorY >= 0) && (colorY < this.colorImageSize.Height)) { int idxColor = (colorY * this.colorImageSize.Width) + colorX; // If we are tracking a body for the current pixel, save the depth data if (this.buffBodyIndex8[idxDepth] != 0xff) { this.buffMapDepthToColor32[idxDepth * 4] = this.buffColor32[idxColor * 4]; // B this.buffMapDepthToColor32[idxDepth * 4 + 1] = this.buffColor32[idxColor * 4 + 1]; // G this.buffMapDepthToColor32[idxDepth * 4 + 2] = this.buffColor32[idxColor * 4 + 2]; // R } } } } } } //End Unsafe } //End Mapping //-------------------------------------------- // Process the face of the default body //-------------------------------------------- // Variable to save the detected face paramenters this.faceData = new FaceData(new BoxFace(0, 0, 0, 0), new BoxFace(0, 0, 0, 0)); #if FACE_DETECTION // Get the default body // Body body = this.listBodies.Where(b => b.IsTracked).FirstOrDefault(); if (this.faceFrameSource.IsActive) { // In our experiment we get the closest body body = Util.GetClosestBody(this.listBodies); if (body != null && body.IsTracked) { // Get the first skeleton skel_up = Util.GetSkeletonUpperBody(this.Mapper, body); // Draw skeleton joints if (this.drawingDepthMarkups) { Util.WriteSkeletonOverFrame(this, VisTypes.Depth, skel_up, 2, ref this.buffMapDepthToColor32); //Util.WriteSkeletonOverFrame(this, VisTypes.Depth, skeleton, 2, ref this.buffDepth16); } // Assign a tracking ID to the face source this.faceFrameSource.TrackingId = body.TrackingId; if (this.faceFrameResults != null) { var boxColor = this.faceFrameResults.FaceBoundingBoxInColorSpace; var boxDepth = this.faceFrameResults.FaceBoundingBoxInInfraredSpace; // If there are face results, then save data // We save in a format of rectangle [x, y, width, height] this.faceData.boxColor = new BoxFace(boxColor.Left, boxColor.Top, (boxColor.Right - boxColor.Left), (boxColor.Bottom - boxColor.Top)); this.faceData.boxDepth = new BoxFace(boxDepth.Left, boxDepth.Top, (boxDepth.Right - boxDepth.Left), (boxDepth.Bottom - boxDepth.Top)); // Draw the face if (this.drawingDepthMarkups) { Util.WriteFaceOverFrame(this, VisTypes.Depth, faceData.boxDepth, 1, ref this.buffMapDepthToColor32); //Util.WriteFaceOverFrame(this, VisTypes.Depth, faceData.boxDepth, 1, ref this.buffDepth16); } //End Drawing } //End FaceResult } //End Body } #endif // Update the data handler this.frameHandler( this.buffColor32, this.buffDepth16, this.buffBodyIndex8, this.buffMapDepthToColor32, this.listBodies, this.faceData ); // Recording state ture byte[] _colorData = null; ushort[] _depthData = null; byte[] _bodyIndexData = null; IList <Body> _bodies = null; //-------------------------------------------- // Record the data //-------------------------------------------- if (this.stateOfRecording) { // 25-09-15 // Discard frames where the hand is not corrected tracked (i.e., the hand has a zero coordinate) // To discard failures in hand tracking if (skel_up.jointDepthSpace[(int)JointUpType.HandLeft].X == 0 || skel_up.jointDepthSpace[(int)JointUpType.HandLeft].Y == 0 || skel_up.jointDepthSpace[(int)JointUpType.HandRight].X == 0 || skel_up.jointDepthSpace[(int)JointUpType.HandRight].Y == 0) { Console.WriteLine("Neglect frame {0}", this.recordedFrames); return; } // Storage data; _colorData = new byte[this.buffColor32.Length]; _depthData = new ushort[this.buffDepth16.Length]; _bodyIndexData = new byte[this.buffBodyIndex8.Length]; _bodies = new Body[this.listBodies.Count]; colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra); depthFrame.CopyFrameDataToArray(_depthData); bodyIndexFrame.CopyFrameDataToArray(_bodyIndexData); bodyFrame.GetAndRefreshBodyData(_bodies); // Increase the counter this.recordedFrames++; this.dataContainer.AddColor = _colorData; this.dataContainer.AddDepth = _depthData; this.dataContainer.AddBodyIndex = _bodyIndexData; this.dataContainer.AddListOfBodies = _bodies; this.dataContainer.AddFaceData = this.faceData; // If the user only require to save a fixed number of frames if (this.fixedFrames == this.recordedFrames) { this.stateOfRecording = false; } } // Notice: // Array.Copy() --> how many elements to copy // Buffer.BlockCopy --> how many of bytes to copy // Update Frame Rate UpdateGrabberFrameRate(); } finally { if (this.frameCount > 100000000) { this.frameCount = 0; } if (colorFrame != null) { colorFrame.Dispose(); } if (depthFrame != null) { depthFrame.Dispose(); } if (bodyFrame != null) { bodyFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } }
/// <summary> /// Handles the depth/color/body index frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { int depthWidth = 0; int depthHeight = 0; DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; bool isBitmapLocked = false; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } // We use a try/finally to ensure that we clean up before we exit the function. // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer. try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } // Process Depth FrameDescription depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; // Access the depth frame data directly via LockImageBuffer to avoid making a copy using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { this.coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.colorMappedToDepthPoints); } // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; // Process Color // Lock the bitmap for writing this.bitmap.Lock(); isBitmapLocked = true; colorFrame.CopyConvertedFrameDataToIntPtr(this.bitmap.BackBuffer, this.bitmapBackBufferSize, ColorImageFormat.Bgra); // We're done with the ColorFrame colorFrame.Dispose(); colorFrame = null; // We'll access the body index data directly to avoid a copy using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { byte *bodyIndexDataPointer = (byte *)bodyIndexData.UnderlyingBuffer; int colorMappedToDepthPointCount = this.colorMappedToDepthPoints.Length; fixed(DepthSpacePoint *colorMappedToDepthPointsPointer = this.colorMappedToDepthPoints) { // Treat the color data as 4-byte pixels uint *bitmapPixelsPointer = (uint *)this.bitmap.BackBuffer; // Loop over each row and column of the color image // Zero out any pixels that don't correspond to a body index for (int colorIndex = 0; colorIndex < colorMappedToDepthPointCount; ++colorIndex) { float colorMappedToDepthX = colorMappedToDepthPointsPointer[colorIndex].X; float colorMappedToDepthY = colorMappedToDepthPointsPointer[colorIndex].Y; // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { // Make sure the depth pixel maps to a valid point in color space int depthX = (int)(colorMappedToDepthX + 0.5f); int depthY = (int)(colorMappedToDepthY + 0.5f); // If the point is not valid, there is no body index there. if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { int depthIndex = (depthY * depthWidth) + depthX; // If we are tracking a body for the current pixel, do not zero out the pixel if (bodyIndexDataPointer[depthIndex] != 0xff) { continue; } } } bitmapPixelsPointer[colorIndex] = 0; } } this.bitmap.AddDirtyRect(new Int32Rect(0, 0, this.bitmap.PixelWidth, this.bitmap.PixelHeight)); } } } finally { if (isBitmapLocked) { this.bitmap.Unlock(); } if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } }
private unsafe void FrameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { _Stopwatch.Restart(); MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); if (multiSourceFrame != null) { ColorFrame colorFrame = null; DepthFrame depthFrame = null; BodyFrame bodyFrame = null; BodyIndexFrame bodyIndexFrame = null; try { bool allRequiredDataReceived = true; if (_FrameSourceTypes.HasFlag(FrameSourceTypes.Color)) { colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); if (colorFrame != null) { fixed(byte *colorBytesPointer = ColorPixels) { IntPtr colorPtr = (IntPtr)colorBytesPointer; uint size = (uint)(_ColorFrameDescription.Width * _ColorFrameDescription.Height * _ColorFrameDescription.BytesPerPixel); if (colorFrame.RawColorImageFormat == ImageFormat) { colorFrame.CopyRawFrameDataToIntPtr(colorPtr, size); } else { colorFrame.CopyConvertedFrameDataToIntPtr(colorPtr, size, ImageFormat); } } } else { allRequiredDataReceived = false; } } if (_FrameSourceTypes.HasFlag(FrameSourceTypes.Depth) && allRequiredDataReceived) { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); if (depthFrame != null) { fixed(ushort *depthBytesPointer = DepthPixels) { IntPtr depthPtr = (IntPtr)depthBytesPointer; depthFrame.CopyFrameDataToIntPtr(depthPtr, (uint)(_DepthFrameDescription.Width * _DepthFrameDescription.Height * _DepthFrameDescription.BytesPerPixel)); } } else { allRequiredDataReceived = false; } } if (_FrameSourceTypes.HasFlag(FrameSourceTypes.Body) && allRequiredDataReceived) { bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame(); if (bodyFrame != null) { bodyFrame.GetAndRefreshBodyData(Bodies); } else { allRequiredDataReceived = false; } } if (_FrameSourceTypes.HasFlag(FrameSourceTypes.BodyIndex) && allRequiredDataReceived) { bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); if (bodyIndexFrame != null) { fixed(byte *bodyIndexBytesPointer = BodyIndexPixels) { IntPtr bodyIndexPtr = (IntPtr)bodyIndexBytesPointer; bodyIndexFrame.CopyFrameDataToIntPtr(bodyIndexPtr, (uint)(_BodyIndexFrameDescription.Width * _BodyIndexFrameDescription.Height * _BodyIndexFrameDescription.BytesPerPixel)); } } else { allRequiredDataReceived = false; } } if (allRequiredDataReceived) { _KinectFrameArrivedEventArgs.ColorPixels = ColorPixels; _KinectFrameArrivedEventArgs.DepthPixels = DepthPixels; _KinectFrameArrivedEventArgs.Bodies = Bodies; _KinectFrameArrivedEventArgs.BodyIndexPixels = BodyIndexPixels; _KinectFrameArrivedEventArgs.KinectSensor = multiSourceFrame.KinectSensor; _KinectFrameArrivedEventArgs.FrameNumber = _FrameNumber; EventHandler <KinectFrameArrivedEventArgs> handler = FrameArrived; if (handler != null) { handler(this, _KinectFrameArrivedEventArgs); } } } finally { if (colorFrame != null) { colorFrame.Dispose(); } if (depthFrame != null) { depthFrame.Dispose(); } if (bodyFrame != null) { bodyFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } } _Stopwatch.Stop(); RaiseKinectFrameComplete(_Stopwatch.Elapsed); _FrameNumber++; }
private void MutilReaderSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; bool isBitmapLocked = false; int depthWidth = 0; int depthHeight = 0; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } // We use a try/finally to ensure that we clean up before we exit the function. // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer. try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } // Process Depth FrameDescription depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; // Access the depth frame data directly via LockImageBuffer to avoid making a copy using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { this.coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.colorMappedToDepthPoints); } DepthFrameReader(depthFrame); ColorFrameReader(colorFrame); } finally { if (isBitmapLocked) { this.colorBitmap.Unlock(); } if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } }
/// <summary> /// Handles the depth/color/body index frame data arriving from the sensor. /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { int depthWidth = 0; int depthHeight = 0; DepthFrame depthFrame = null; IBuffer depthFrameData = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; IBuffer bodyIndexFrameData = null; IBufferByteAccess bodyIndexByteAccess = null; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } // We use a try/finally to ensure that we clean up before we exit the function. // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer. try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } // Process Depth FrameDescription depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; // Access the depth frame data directly via LockImageBuffer to avoid making a copy depthFrameData = depthFrame.LockImageBuffer(); this.coordinateMapper.MapColorFrameToDepthSpaceUsingIBuffer(depthFrameData, this.colorMappedToDepthPoints); // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; // We must force a release of the IBuffer in order to ensure that we have dropped all references to it. System.Runtime.InteropServices.Marshal.ReleaseComObject(depthFrameData); depthFrameData = null; // Process Color colorFrame.CopyConvertedFrameDataToBuffer(this.bitmap.PixelBuffer, ColorImageFormat.Bgra); // We're done with the ColorFrame colorFrame.Dispose(); colorFrame = null; FrameDescription bodyIndexFrameDescription = bodyIndexFrame.FrameDescription; // Access the body index frame data directly via LockImageBuffer to avoid making a copy bodyIndexFrameData = bodyIndexFrame.LockImageBuffer(); int colorMappedToDepthPointCount = this.colorMappedToDepthPoints.Length; unsafe { bodyIndexByteAccess = (IBufferByteAccess)bodyIndexFrameData; byte *bodyIndexBytes = null; bodyIndexByteAccess.Buffer(out bodyIndexBytes); fixed(DepthSpacePoint *colorMappedToDepthPointsPointer = this.colorMappedToDepthPoints) { IBufferByteAccess bitmapBackBufferByteAccess = (IBufferByteAccess)this.bitmap.PixelBuffer; byte *bitmapBackBufferBytes = null; bitmapBackBufferByteAccess.Buffer(out bitmapBackBufferBytes); // Treat the color data as 4-byte pixels uint *bitmapPixelsPointer = (uint *)bitmapBackBufferBytes; // Loop over each row and column of the color image // Zero out any pixels that don't correspond to a body index for (int colorIndex = 0; colorIndex < colorMappedToDepthPointCount; ++colorIndex) { float colorMappedToDepthX = colorMappedToDepthPointsPointer[colorIndex].X; float colorMappedToDepthY = colorMappedToDepthPointsPointer[colorIndex].Y; // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { // Make sure the depth pixel maps to a valid point in color space int depthX = (int)(colorMappedToDepthX + 0.5f); int depthY = (int)(colorMappedToDepthY + 0.5f); // If the point is not valid, there is no body index there. if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { int depthIndex = (depthY * depthWidth) + depthX; // If we are tracking a body for the current pixel, do not zero out the pixel if (bodyIndexBytes[depthIndex] != 0xff) { continue; } } } bitmapPixelsPointer[colorIndex] = 0; } } this.bitmap.Invalidate(); } } finally { if (depthFrame != null) { depthFrame.Dispose(); } if (depthFrameData != null) { // We must force a release of the IBuffer in order to ensure that we have dropped all references to it. System.Runtime.InteropServices.Marshal.ReleaseComObject(depthFrameData); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } if (bodyIndexFrameData != null) { // We must force a release of the IBuffer in order to ensure that we have dropped all references to it. System.Runtime.InteropServices.Marshal.ReleaseComObject(bodyIndexFrameData); } if (bodyIndexByteAccess != null) { // We must force a release of the IBuffer in order to ensure that we have dropped all references to it. System.Runtime.InteropServices.Marshal.ReleaseComObject(bodyIndexByteAccess); } } }
private void Reader_MultiSourceFrameArrived(MultiSourceFrameReader sender, MultiSourceFrameArrivedEventArgs e) { MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } DepthFrame depthFrame = null; ColorFrame colorFrame = null; InfraredFrame infraredFrame = null; BodyFrame bodyFrame = null; BodyIndexFrame bodyIndexFrame = null; IBuffer depthFrameDataBuffer = null; IBuffer bodyIndexFrameData = null; // Com interface for unsafe byte manipulation IBufferByteAccess bufferByteAccess = null; switch (CurrentDisplayFrameType) { case DisplayFrameType.Infrared: using (infraredFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame()) { ShowInfraredFrame(infraredFrame); } break; case DisplayFrameType.Color: using (colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame()) { ShowColorFrame(colorFrame); } break; case DisplayFrameType.Depth: using (depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame()) { ShowDepthFrame(depthFrame); } break; case DisplayFrameType.BodyMask: // Put in a try catch to utilise finally() and clean up frames try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } // Access the depth frame data directly via LockImageBuffer to avoid making a copy depthFrameDataBuffer = depthFrame.LockImageBuffer(); this.coordinateMapper.MapColorFrameToDepthSpaceUsingIBuffer(depthFrameDataBuffer, this.colorMappedToDepthPoints); // Process Color colorFrame.CopyConvertedFrameDataToBuffer(this.bitmap.PixelBuffer, ColorImageFormat.Bgra); // Access the body index frame data directly via LockImageBuffer to avoid making a copy bodyIndexFrameData = bodyIndexFrame.LockImageBuffer(); ShowMappedBodyFrame(depthFrame.FrameDescription.Width, depthFrame.FrameDescription.Height, bodyIndexFrameData, bufferByteAccess); } finally { if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } if (depthFrameDataBuffer != null) { // We must force a release of the IBuffer in order to ensure that we have dropped all references to it. System.Runtime.InteropServices.Marshal.ReleaseComObject(depthFrameDataBuffer); } if (bodyIndexFrameData != null) { System.Runtime.InteropServices.Marshal.ReleaseComObject(bodyIndexFrameData); } if (bufferByteAccess != null) { System.Runtime.InteropServices.Marshal.ReleaseComObject(bufferByteAccess); } } break; case DisplayFrameType.BodyJoints: using (bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame()) { ShowBodyJoints(bodyFrame); } break; case DisplayFrameType.BackgroundRemoved: // Put in a try catch to utilise finally() and clean up frames try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); if ((depthFrame == null) || (colorFrame == null)) { return; } depthFrame.CopyFrameDataToArray(depthFrameData); this.coordinateMapper.MapColorFrameToDepthSpace(depthFrameData, this.colorMappedToDepthPoints); // Process Color. colorFrame.CopyConvertedFrameDataToBuffer(this.bitmap.PixelBuffer, ColorImageFormat.Bgra); ShowMappedColorBackgroundRemoved(colorMappedToDepthPoints, depthFrameData, depthFrame.FrameDescription); } finally { if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } } break; default: break; } }
private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { int depthWidth = 0; int depthHeight = 0; DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; BodyFrame bodyFrame = null; bool isBitmapLocked = false; try { MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame(); if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null) || (bodyFrame == null)) { return; } if (this.bodies == null) { this.bodies = new Body[bodyFrame.BodyCount]; } bodyFrame.GetAndRefreshBodyData(this.bodies); bodyFrame.Dispose(); bodyFrame = null; bool isBodyTracked = false; foreach (Body body in this.bodies) { if (body.IsTracked) { isBodyTracked = true; continue; } } hasTrackedBodies = isBodyTracked; if (hasTrackedBodies && !isCapturing) { BeginCountdown(); } FrameDescription depthFrameDescription = depthFrame.FrameDescription; depthWidth = depthFrameDescription.Width; depthHeight = depthFrameDescription.Height; using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { this.coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.colorMappedToDepthPoints); } depthFrame.Dispose(); depthFrame = null; this.liveBitmap.Lock(); isBitmapLocked = true; colorFrame.CopyConvertedFrameDataToIntPtr(this.liveBitmap.BackBuffer, this.bitmapBackBufferSize, ColorImageFormat.Bgra); colorFrame.Dispose(); colorFrame = null; using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { byte *bodyIndexDataPointer = (byte *)bodyIndexData.UnderlyingBuffer; int colorMappedToDepthPointCount = this.colorMappedToDepthPoints.Length; fixed(DepthSpacePoint *colorMappedToDepthPointsPointer = this.colorMappedToDepthPoints) { uint *bitmapPixelsPointer = (uint *)this.liveBitmap.BackBuffer; for (int colorIndex = 0; colorIndex < colorMappedToDepthPointCount; ++colorIndex) { float colorMappedToDepthX = colorMappedToDepthPointsPointer[colorIndex].X; float colorMappedToDepthY = colorMappedToDepthPointsPointer[colorIndex].Y; if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { int depthX = (int)(colorMappedToDepthX + 0.5f); int depthY = (int)(colorMappedToDepthY + 0.5f); if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { int depthIndex = (depthY * depthWidth) + depthX; if (bodyIndexDataPointer[depthIndex] != 0xff) { continue; } } } bitmapPixelsPointer[colorIndex] = 0; } } this.liveBitmap.AddDirtyRect(new Int32Rect(0, 0, this.liveBitmap.PixelWidth, this.liveBitmap.PixelHeight)); } } } finally { if (isBitmapLocked) { this.liveBitmap.Unlock(); } if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } if (bodyFrame != null) { bodyFrame.Dispose(); } } }
void GreenScreenMappingDepthToColorSplats(ref DepthFrame depthFrame, ref ColorFrame colorFrame, ref BodyIndexFrame bodyIndexFrame, int depthWidth, int depthHeight, int colorWidth, int colorHeight) { m_stopwatch.Restart(); using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { // Need to know the color space point for each depth space point, but this is much less data // and much faster to compute than mapping the other way m_coordinateMapper.MapDepthFrameToColorSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, m_depthToColorSpacePoints); } m_depthMapTimer.Update(m_stopwatch.ElapsedMilliseconds); m_stopwatch.Restart(); // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; lock (m_displayPixels) { // [KinectThread] avoid racing display buffer refresh with render (can cause missing images) // have to clear the display pixels so we can copy only the BGRA image of the player(s) Array.Clear(m_displayPixels, 0, m_displayPixels.Length); unsafe { fixed (byte* colorFrameDataPtr = &m_colorFrameData[0]) { colorFrame.CopyConvertedFrameDataToIntPtr(new IntPtr(colorFrameDataPtr), (uint)m_colorFrameData.Length, ColorImageFormat.Bgra); } } // done with the colorFrame colorFrame.Dispose(); colorFrame = null; m_colorCopyTimer.Update(m_stopwatch.ElapsedMilliseconds); m_stopwatch.Restart(); // We'll access the body index data directly to avoid a copy using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { byte* bodyIndexDataPointer = (byte*)bodyIndexData.UnderlyingBuffer; uint bodyIndexDataLength = bodyIndexData.Size; int colorMappedToDepthPointCount = m_colorToDepthSpacePoints.Length; fixed (ColorSpacePoint* depthMappedToColorPointsPointer = m_depthToColorSpacePoints) { fixed (byte* bitmapPixelsBytePointer = &m_displayPixels[0]) { fixed (byte* sourcePixelsBytePointer = &m_colorFrameData[0]) { uint* bitmapPixelsPointer = (uint*)bitmapPixelsBytePointer; uint* sourcePixelsPointer = (uint*)sourcePixelsBytePointer; // We don't go all the way to the edge of the depth buffer, to eliminate a chance // that a splat will go outside the edge of the color buffer when mapped to color // space. In the x direction this will never happen anyway since the depth FOV // is so much narrower than the color FOV. const int Margin = 2; for (int y = Margin; y < depthHeight - Margin; y++) { for (int x = 0; x < depthWidth; x++) { // Scan forwards until we find a non-0xff value in the body index data. int depthIndex = y * depthWidth + x; if (bodyIndexDataPointer[depthIndex] != 0xff) { int depthIndex2 = depthIndex; // We found the beginning of a horizontal run of player pixels. // Scan to the end. int runWidth; for (runWidth = 1; runWidth + x < depthWidth; runWidth++) { depthIndex2++; if (bodyIndexDataPointer[depthIndex2] == 0xff) { break; } } // Now splat from (x, y) to (x + runWidth, y) float depthMappedToColorLeftX = depthMappedToColorPointsPointer[depthIndex].X; float depthMappedToColorLeftY = depthMappedToColorPointsPointer[depthIndex].Y; float depthMappedToColorRightX = depthMappedToColorPointsPointer[depthIndex2 - 1].X; float depthMappedToColorRightY = depthMappedToColorPointsPointer[depthIndex2 - 1].Y; // Now copy color pixels along that rectangle. const int splatHMargin = 2; // X margin of splat rectangle in color pixels const int splatVMargin = 3; // Y margin of splat rectangle in color pixels int minX = (int)Math.Min(depthMappedToColorLeftX, depthMappedToColorRightX) - splatHMargin; int minY = (int)Math.Min(depthMappedToColorLeftY, depthMappedToColorRightY) - splatVMargin; int maxX = (int)Math.Max(depthMappedToColorLeftX, depthMappedToColorRightX) + splatHMargin; int maxY = (int)Math.Max(depthMappedToColorLeftY, depthMappedToColorRightY) + splatVMargin; // Some edge of screen situations can result in color space points that are negative or otherwise // actually outside the color space coordinate range. Clamp(ref minX, colorWidth - 1); Clamp(ref minY, colorHeight - 1); Clamp(ref maxX, colorWidth - 1); Clamp(ref maxY, colorHeight - 1); for (int colorY = minY; colorY < maxY; colorY++) { int colorIndex = colorY * colorWidth + minX; for (int colorX = minX; colorX < maxX; colorX++) { bitmapPixelsPointer[colorIndex] = sourcePixelsPointer[colorIndex]; colorIndex++; } } x += runWidth; } } } } } } } } // Done with bodyIndexFrame bodyIndexFrame.Dispose(); bodyIndexFrame = null; } m_colorScanTimer.Update(m_stopwatch.ElapsedMilliseconds); m_stopwatch.Restart(); m_displayTexture.SetData(m_displayPixels); m_textureSetDataTimer.Update(m_stopwatch.ElapsedMilliseconds); m_stopwatch.Restart(); Spam.TopLine1 = string.Format("depth map: {0} msec; color copy: {1} msec; color scan: {2} msec; texture set: {3} msec", m_depthMapTimer.Average, m_colorCopyTimer.Average, m_colorScanTimer.Average, m_textureSetDataTimer.Average); }
/// <summary> /// Handles the depth/color/body index frame data arriving from the sensor /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e) { double fps = 0; TimeSpan elapsedSpan = new TimeSpan(DateTime.Now.Ticks - this.timestamp.Ticks); if (elapsedSpan.Milliseconds < (1000f / MAX_FPS)) { return; } fps = 1000f / elapsedSpan.Milliseconds; this.timestamp = DateTime.Now; DepthFrame depthFrame = null; ColorFrame colorFrame = null; BodyIndexFrame bodyIndexFrame = null; bool isBitmapLocked = false; MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame(); // If the Frame has expired by the time we process this event, return. if (multiSourceFrame == null) { return; } // We use a try/finally to ensure that we clean up before we exit the function. // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer. try { depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame(); colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame(); bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame(); // If any frame has expired by the time we process this event, return. // The "finally" statement will Dispose any that are not null. if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null)) { return; } FrameDescription colorFrameDescription = colorFrame.FrameDescription; // Process Depth FrameDescription depthFrameDescription = depthFrame.FrameDescription; // Access the depth frame data directly via LockImageBuffer to avoid making a copy using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) { this.coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.colorMappedToDepthPoints); this.coordinateMapper.MapDepthFrameToColorSpaceUsingIntPtr( depthFrameData.UnderlyingBuffer, depthFrameData.Size, this.depthMappedToColorPoints); } ushort[] depthData = new ushort[depthWidth * depthHeight]; depthFrame.CopyFrameDataToArray(depthData); // We're done with the DepthFrame depthFrame.Dispose(); depthFrame = null; // Process Color // Lock the bitmap for writing this.bitmap.Lock(); isBitmapLocked = true; colorFrame.CopyConvertedFrameDataToIntPtr(this.bitmap.BackBuffer, this.bitmapBackBufferSize, ColorImageFormat.Bgra); // We're done with the ColorFrame colorFrame.Dispose(); colorFrame = null; // We'll access the body index data directly to avoid a copy using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) { unsafe { byte *bodyIndexDataPointer = (byte *)bodyIndexData.UnderlyingBuffer; BodyRect[] bodys = new BodyRect[BODY_MAX_NUMBER]; for (byte i = 0; i < BODY_MAX_NUMBER; ++i) { bodys[i] = new BodyRect(int.MaxValue, 0, 0, int.MaxValue); } int colorMappedToDepthPointCount = this.colorMappedToDepthPoints.Length; // use canny algorithm to detect edge... Stopwatch watch = Stopwatch.StartNew(); byte[] result = EdgeDetection.Canny(bodyIndexData.UnderlyingBuffer, depthWidth, depthHeight); watch.Stop(); List <Location> points = new List <Location>(); points.Add(transformToLocation(440, 200, depthData)); points.Add(transformToLocation(420, 230, depthData)); points.Add(transformToLocation(470, 220, depthData)); points.Add(transformToLocation(460, 300, depthData)); points.Add(transformToLocation(400, 280, depthData)); points.Add(transformToLocation(370, 280, depthData)); Plane plane = PlaneDetection.calc(points); double angel = Math.Abs(Math.Atan(-1 / plane.k)); double distance = plane.b * Math.Sin(angel); string output = string.Format("fps: {0:F}, It takes {1:F} ms for Canny. k: {2:F}, b: {3:F}, angel: {4:F}, distance: {5:F}", fps, watch.ElapsedMilliseconds, plane.k, plane.b, angel * 180 / Math.PI, distance); processStatusUpdated(output); fixed(byte *cannyResult = &result[0]) fixed(DepthSpacePoint * colorMappedToDepthPointsPointer = this.colorMappedToDepthPoints) { // Treat the color data as 4-byte pixels uint *bitmapPixelsPointer = (uint *)this.bitmap.BackBuffer; // Loop over each row and column of the color image for (int y = 0; y < colorHeight; ++y) { for (int x = 0; x < colorWidth; ++x) { DepthSpacePoint depthPoint = getDepthPoint(x, y); float colorMappedToDepthX = depthPoint.X; float colorMappedToDepthY = depthPoint.Y; // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel. if (!float.IsNegativeInfinity(colorMappedToDepthX) && !float.IsNegativeInfinity(colorMappedToDepthY)) { // Make sure the depth pixel maps to a valid point in color space int depthX = (int)(colorMappedToDepthX + 0.5f); int depthY = (int)(colorMappedToDepthY + 0.5f); // If the point is not valid, there is no body index there. if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight)) { int depthIndex = (depthY * depthWidth) + depthX; if (cannyResult[depthIndex] > 0) { bitmapPixelsPointer[y * colorWidth + x] = 0xffff0000; } // If we are tracking a body for the current pixel, ... int bodyIndex = bodyIndexDataPointer[depthIndex]; if (bodyIndex != 0xff) { // justify whether this point is true body uint count = 0; if (bodyIndex == bodyIndexDataPointer[(depthY - 1) * depthWidth + (depthX - 1)]) { count++; } if (bodyIndex == bodyIndexDataPointer[(depthY) * depthWidth + (depthX + 1)]) { count++; } if (bodyIndex == bodyIndexDataPointer[(depthY + 1) * depthWidth + (depthX)]) { count++; } if (count == 3) { bodys[bodyIndex].top = Math.Min(bodys[bodyIndex].top, y); bodys[bodyIndex].bottom = Math.Max(bodys[bodyIndex].bottom, y); bodys[bodyIndex].left = Math.Min(bodys[bodyIndex].left, x); bodys[bodyIndex].right = Math.Max(bodys[bodyIndex].right, x); } //for (uint j = 0; j < 3; ++j) // for (uint i = 0; i < 3; ++i) // if (bodyIndex == bodyIndexDataPointer[(depthY - 1 + j) * depthWidth + (depthX - 1 + i)]) // count++; //if (count > 7) { // bodys[bodyIndex].top = Math.Min(bodys[bodyIndex].top, y); // bodys[bodyIndex].bottom = Math.Max(bodys[bodyIndex].bottom, y); // bodys[bodyIndex].left = Math.Min(bodys[bodyIndex].left, x); // bodys[bodyIndex].right = Math.Max(bodys[bodyIndex].right, x); //} } continue; } } } } } for (int i = 0; i < points.Count; ++i) { ColorSpacePoint point = getColorPoint(points[i].depthFrameX, points[i].depthFrameY); if (point.X != Double.NegativeInfinity) { DrawPoint(this.bitmap, point); } } List <Location> locations = new List <Location>(); BodyRect initialBody = new BodyRect(int.MaxValue, 0, 0, int.MaxValue); for (uint i = 0; i < BODY_MAX_NUMBER; i++) { BodyRect body = bodys[i]; if (!body.Equals(initialBody)) { DrawRect(this.bitmap, body.top, body.right, body.bottom, body.left); // calculate the actual location of this body DepthSpacePoint point = getDepthPoint((body.right + body.left) / 2, (body.bottom + body.top) / 2); Location loc = transformToLocation(point, depthData, true); if (loc.depth > 0) { locations.Add(loc); } } } locationUpdated(locations.ToArray()); this.bitmap.AddDirtyRect(new Int32Rect(0, 0, this.bitmap.PixelWidth, this.bitmap.PixelHeight)); } } } finally { if (isBitmapLocked) { this.bitmap.Unlock(); } if (depthFrame != null) { depthFrame.Dispose(); } if (colorFrame != null) { colorFrame.Dispose(); } if (bodyIndexFrame != null) { bodyIndexFrame.Dispose(); } } }