// frame수로 시간을 재는 함수 void SetFrameTime(DepthImageFrame depthFrame) { long lFrame = (long)depthFrame.FrameNumber; long lTime = (long)depthFrame.Timestamp; if (lTime - m_saveTime > 1000) { m_saveFrame = lFrame; m_saveTime = lTime; // 1초마다 방향에 따른 장애물 배열의 0초부분을 초기화 for (int i = 0; i < directionPartWidth; i++) { for (int j = 0; j < directionPartHeight; j++) { bool temp = m_objectByDirection[i, j, 0]; for (int k = 1; k < 5; k++) { bool temp2 = m_objectByDirection[i, j, k]; m_objectByDirection[i, j, k] = temp; temp = temp2; } m_objectByDirection[i, j, 0] = false; int whichRec = i + j * directionPartWidth; m_objectRect[whichRec].Width = 0; m_objectRect[whichRec].Height = 0; } } //1초마다 시선의 방향 배열 초기화 for (int i = 0; i < directionPartWidth; i++) { for (int j = 0; j < directionPartHeight; j++) { m_sightDirection[i, j] = false; } } } }
public static BitmapSource SliceDepthImage(this DepthImageFrame image, int min = 20, int max = 1000) { int width = image.Width; int height = image.Height; // Get the min and max reliable depth for the current frame int minDepth = image.MinDepth; int maxDepth = image.MaxDepth; //var depthFrame = image.Image.Bits; short[] rawDepthData = new short[image.PixelDataLength]; image.CopyPixelDataTo(rawDepthData); var pixels = new byte[height * width * 4]; const int BlueIndex = 0; const int GreenIndex = 1; const int RedIndex = 2; for (int depthIndex = 0, colorIndex = 0; depthIndex < rawDepthData.Length && colorIndex < pixels.Length; depthIndex++, colorIndex += 4) { // Calculate the distance represented by the two depth bytes int depth = rawDepthData[depthIndex] >> DepthImageFrame.PlayerIndexBitmaskWidth; // Map the distance to an intesity that can be represented in RGB byte intensity = (byte)(depth >= minDepth && depth <= maxDepth ? depth : 0); if (depth > min && depth < max) { // Apply the intensity to the color channels pixels[colorIndex + BlueIndex] = intensity; //blue pixels[colorIndex + GreenIndex] = intensity; //green pixels[colorIndex + RedIndex] = intensity; //red } } return(BitmapSource.Create(width, height, 96, 96, PixelFormats.Bgr32, null, pixels, width * 4)); }
private byte[] GenerateColoredBytes(DepthImageFrame depthFrame) { short[] rawDepthData = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(rawDepthData); Byte[] pixels = new byte[depthFrame.Height * depthFrame.Width * 4]; const int BlueIndex = 0; const int GreenIndex = 1; const int RedIndex = 2; for (int depthIndex = 0, colorIndex = 0; depthIndex < rawDepthData.Length && colorIndex < pixels.Length; depthIndex++, colorIndex += 4) { int player = rawDepthData[depthIndex] & DepthImageFrame.PlayerIndexBitmask; int depth = rawDepthData[depthIndex] >> DepthImageFrame.PlayerIndexBitmaskWidth; if (depth <= 900) { pixels[colorIndex + BlueIndex] = 255; pixels[colorIndex + GreenIndex] = 0; pixels[colorIndex + RedIndex] = 0; } else if (depth > 900 && depth < 2000) { pixels[colorIndex + BlueIndex] = 0; pixels[colorIndex + GreenIndex] = 255; pixels[colorIndex + RedIndex] = 0; } else if (depth > 2000) { pixels[colorIndex + BlueIndex] = 0; pixels[colorIndex + GreenIndex] = 0; pixels[colorIndex + RedIndex] = 255; } } return(pixels); }
void nui_AllFramesReady(object sender, AllFramesReadyEventArgs e) { SkeletonFrame sf = e.OpenSkeletonFrame(); if (sf == null) { return; } Skeleton[] skeletonData = new Skeleton[sf.SkeletonArrayLength]; sf.CopySkeletonDataTo(skeletonData); using (DepthImageFrame depthImageFrame = e.OpenDepthImageFrame()) { if (depthImageFrame != null) { foreach (Skeleton sd in skeletonData) { if (sd.TrackingState == SkeletonTrackingState.Tracked) { Joint joint = sd.Joints[JointType.Head]; DepthImagePoint depthPoint; depthPoint = depthImageFrame.MapFromSkeletonPoint(joint.Position); Point point = new Point((int)(Image1.Width * depthPoint.X / depthImageFrame.Width), (int)(Image1.Height * depthPoint.Y / depthImageFrame.Height)); textBlock1.Text = string.Format("X:{0:0.00} Y:{1:0.00}", point.X, point.Y); Canvas.SetLeft(ellipse1, point.X); Canvas.SetTop(ellipse1, point.Y); } } } } }
private void _sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e) { using (ColorImageFrame colorFrame = e.OpenColorImageFrame()) { if (colorFrame == null) { return; } if (!irCam) //RGB { byte[] pixels1 = new byte[colorFrame.PixelDataLength]; colorFrame.CopyPixelDataTo(pixels1); int stride = colorFrame.Width * 4; image1.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null, pixels1, stride); } else //IR { byte[] pixels = new byte[colorFrame.PixelDataLength]; colorFrame.CopyPixelDataTo(pixels); int stride = colorFrame.Width * 2; image1.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Gray16, null, pixels, stride); } } using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame == null) { return; } byte[] pixels = GenerateColoredBytes(depthFrame); int stride = depthFrame.Width * 4; image3.Source = BitmapSource.Create(depthFrame.Width, depthFrame.Height, 96, 96, PixelFormats.Bgr32, null, pixels, stride); } }
public void setJoints(AllFramesReadyEventArgs e, Skeleton skeleton) { using (DepthImageFrame depth = e.OpenDepthImageFrame()) { if (depth != null) { //Map a joint location to a point on the depth map lastHead = currHead; lastNeck = currNeck; lastSpine = currSpine; lastHipCenter = currHipCenter; lastLeftHand = currLeftHand; lastLeftWrist = currLeftWrist; lastLeftElbow = currLeftElbow; lastLeftShoulder = currLeftShoulder; lastRightHand = currRightHand; lastRightWrist = currRightWrist; lastRightElbow = currRightElbow; lastRightShoulder = currRightShoulder; currHead = depth.MapFromSkeletonPoint(skeleton.Joints[JointType.Head].Position); currNeck = depth.MapFromSkeletonPoint(skeleton.Joints[JointType.ShoulderCenter].Position); currSpine = depth.MapFromSkeletonPoint(skeleton.Joints[JointType.Spine].Position); currHipCenter = depth.MapFromSkeletonPoint(skeleton.Joints[JointType.HipCenter].Position); currLeftHand = depth.MapFromSkeletonPoint(skeleton.Joints[JointType.HandLeft].Position); currLeftWrist = depth.MapFromSkeletonPoint(skeleton.Joints[JointType.WristLeft].Position); currLeftElbow = depth.MapFromSkeletonPoint(skeleton.Joints[JointType.ElbowLeft].Position); currLeftShoulder = depth.MapFromSkeletonPoint(skeleton.Joints[JointType.ShoulderLeft].Position); currRightHand = depth.MapFromSkeletonPoint(skeleton.Joints[JointType.HandRight].Position); currRightWrist = depth.MapFromSkeletonPoint(skeleton.Joints[JointType.WristRight].Position); currRightElbow = depth.MapFromSkeletonPoint(skeleton.Joints[JointType.ElbowRight].Position); currRightShoulder = depth.MapFromSkeletonPoint(skeleton.Joints[JointType.ShoulderRight].Position); } } }
void mykinect_AllFramesReady(object sender, AllFramesReadyEventArgs e) { #region 基本影像處理 depthframe = e.OpenDepthImageFrame(); colorframe = e.OpenColorImageFrame(); if (depthframe == null || colorframe == null) { return; } depthpixelData = new short[depthframe.PixelDataLength]; depthframe.CopyPixelDataTo(depthpixelData); depthPixel = new DepthImagePixel[depthframe.PixelDataLength]; depthframe.CopyDepthImagePixelDataTo(depthPixel); _DepthImageBitmap.WritePixels(_DepthImageBitmapRect, depthpixelData, _DepthImageStride, 0); colorpixelData = new byte[colorframe.PixelDataLength]; colorframe.CopyPixelDataTo(colorpixelData); #endregion #region 座標轉換與過濾 colorpoints = new ColorImagePoint[depthpixelData.Length]; kinect.CoordinateMapper.MapDepthFrameToColorFrame( depthframe.Format, depthPixel, colorframe.Format, colorpoints); if (depthpixelData != null) { RangeFilter(); } #endregion _ColorImageBitmap.WritePixels(_ColorImageBitmapRect, colorpixelData, _ColorImageStride, 0); depthframe.Dispose(); colorframe.Dispose(); }
/// <summary> /// Fires when a depth frame is captured by the Kinect's depth sensor /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void depthSensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame frame = e.OpenDepthImageFrame())//Opens the depth frame sent along with the event argument { //Reads out all of the captured depth image pixels to an array DepthImagePixel[] depthImagePixels = frame.GetRawPixelData(); for (int i = 0; i < depthImagePixels.Length; i++)//Loops the captured depth image pixel array { if (depthImagePixels[i].Depth == DefaultDepth)//If the depth (the distance, in millimeters, from the image pixel to the sensor) meets the configured value, then: { //Generates an unqiue obejct ID for the current object DataIdentifier dataIdentifier = new DataIdentifier() { DataUniqueID = Guid.NewGuid().ToString()}; //Notifies the receivers base.Notify(dataIdentifier); //break; } } } }
public void SensorAllFramesReady(object sender, AllFramesReadyEventArgs e) { // Check if we need to change background if (drawController.backgroundAlreadySet == false) { drawController.ChangeBackground(drawController.background); } // Check if we need to change color if (drawController.shouldChangeColor != -1) { drawController.ChangeColor((Colors)drawController.shouldChangeColor); } using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame != null) { this.ParseDepthFrame(depthFrame); } } }
private void DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { DepthImageFrame frame = e.OpenDepthImageFrame(); if (frame != null) { this.FInvalidate = true; this.frameindex = frame.FrameNumber; lock (m_lock) { frame.CopyPixelDataTo(this.rawdepth); for (int i16 = 0; i16 < 512 * 424; i16++) { int player = rawdepth[i16] & DepthImageFrame.PlayerIndexBitmask; player = player % this.colors.Length; this.playerimage[i16] = this.colors[player]; } } frame.Dispose(); } }
void GetCameraPoint(Skeleton first, AllFramesReadyEventArgs e) { using (DepthImageFrame depth = e.OpenDepthImageFrame()) { if (depth == null || kinectSensorChooser1.Kinect == null) { return; } //Map a joint location to a point on the depth map //head DepthImagePoint headDepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.Head].Position); //left hand DepthImagePoint leftDepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.HandLeft].Position); //right hand DepthImagePoint rightDepthPoint = depth.MapFromSkeletonPoint(first.Joints[JointType.HandRight].Position); //Map a depth point to a point on the color image //head ColorImagePoint headColorPoint = depth.MapToColorImagePoint(headDepthPoint.X, headDepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30); //left hand ColorImagePoint leftColorPoint = depth.MapToColorImagePoint(leftDepthPoint.X, leftDepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30); //right hand ColorImagePoint rightColorPoint = depth.MapToColorImagePoint(rightDepthPoint.X, rightDepthPoint.Y, ColorImageFormat.RgbResolution640x480Fps30); //Set location CameraPosition(headImage, headColorPoint); //CameraPosition(leftEllipse, leftColorPoint); //CameraPosition(rightEllipse, rightColorPoint); } }
private void AllFrameReady(object sender, AllFramesReadyEventArgs e) { DepthImageFrame frame = e.OpenDepthImageFrame(); if (frame != null) { if (frame.FrameNumber != this.frameindex) { this.FInvalidate = true; this.RebuildBuffer(frame.Format, false); this.frameindex = frame.FrameNumber; frame.CopyDepthImagePixelDataTo(this.depthpixels); lock (m_lock) { this.runtime.Runtime.CoordinateMapper.MapDepthFrameToColorFrame(frame.Format, this.depthpixels, ColorImageFormat.RgbResolution640x480Fps30, this.colpoints); } frame.Dispose(); } } }
void _sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e) { using (ColorImageFrame colorFrame = e.OpenColorImageFrame()) { using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame()) { if (colorFrame == null || depthFrame == null || skeletonFrame == null) { return; } // ****************Color Frame*************** ColorFrameToBitmap(colorFrame); //img_color.Source = colorBitmap; //************Skeleton Frame******************* Skeleton user_skeleton = getSkeleton(skeletonFrame); if (user_skeleton == null) { return; } //********From here --->>> Get the hand pixels // void get hand coordinates GetROI(user_skeleton, depthFrame, colorFrame); //img_color.Source = aligned_colorBitmap; img_color.Source = colorBitmap; img_cropped.Source = cb; } } } }
private void DepthImageReady(object sender, DepthImageFrameReadyEventArgs e) { if (worker.IsBusy) { return; } using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (depthFrame != null) { minDepth = depthFrame.MinDepth; maxDepth = depthFrame.MaxDepth; depthFrame.CopyDepthImagePixelDataTo(depthPixels); worker.RunWorkerAsync(); } else { KinectDataContext.Instance.StatusMessage = "No depth frame received"; } } }
int find_code(ColorImageFrame colorFrame, DepthImageFrame depthFrame) { ZXing.Kinect.BarcodeReader reader = new ZXing.Kinect.BarcodeReader(); if (colorFrame != null) { //Decode the colorFrame var result = reader.Decode(colorFrame); if (result != null) { string val = result.Text; int code_num = Convert.ToInt32(val); double center_x = result.ResultPoints[0].X + 0.5 * (result.ResultPoints[2].X - result.ResultPoints[0].X); double center_y = result.ResultPoints[0].Y + 0.5 * (result.ResultPoints[2].Y - result.ResultPoints[0].Y); code_size = new Point((result.ResultPoints[2].X - result.ResultPoints[0].X), (result.ResultPoints[2].Y - result.ResultPoints[0].Y)); // Must mirror the coordinate here -- the depth frame comes in mirrored. center_x = 640 - center_x; // Map the color frame onto the depth frame DepthImagePixel[] depthPixel = new DepthImagePixel[depthFrame.PixelDataLength]; depthFrame.CopyDepthImagePixelDataTo(depthPixel); DepthImagePoint[] depthImagePoints = new DepthImagePoint[sensor.DepthStream.FramePixelDataLength]; sensor.CoordinateMapper.MapColorFrameToDepthFrame(sensor.ColorStream.Format, sensor.DepthStream.Format, depthPixel, depthImagePoints); // Get the point in the depth frame at the center of the barcode int center_point_color_index = (int)center_y * 640 + (int)center_x; DepthImagePoint converted_depth_point = depthImagePoints[center_point_color_index]; Point p = new Point(converted_depth_point.X, converted_depth_point.Y); code_points[code_num] = p; Console.WriteLine("Found code " + code_num + " at (" + center_x + ", " + center_y + ") in color coordinates."); Console.WriteLine("Translated to (" + p.X + ", " + p.Y + ") in depth coordinates."); return(code_num); } } return(-1); }
void kinectSensor_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { if (depthFrame != null) { depthFrame.Dispose(); depthFrame = null; } depthFrame = e.OpenDepthImageFrame(); { if (depthFrame != null) { depthPixelData = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(depthPixelData); this.depthImageBitMap.WritePixels(this.depthImageBitmapRect, depthPixelData, this.depthImageStride, 0); //CreateLighterShadesOfGray(this.depthFrame, depthPixelData); CreatePlayerDepthImage(this.depthFrame, depthPixelData); } } }
private void FuncoesProfundidade(DepthImageFrame quadro, byte[] bytesImagem, int distanciaMaxima) { if (quadro == null || bytesImagem == null) { return; } using (quadro) { DepthImagePixel[] imagemProfundidade = new DepthImagePixel[quadro.PixelDataLength]; quadro.CopyDepthImagePixelDataTo(imagemProfundidade); if (btnDesenhar.IsChecked) { fluxoInteracao.ProcessDepth(imagemProfundidade, quadro.Timestamp); } else if (btnEscalaCinza.IsChecked) { ReconhecerProfundidade(bytesImagem, distanciaMaxima, imagemProfundidade); } } }
private void CompositionTarget_Rendering(object sender, EventArgs e) { DiscoverKinect(); if (this.KinectDevice != null) { try { using (ColorImageFrame colorFrame = this.KinectDevice.ColorStream.OpenNextFrame(1000)) { using (DepthImageFrame depthFrame = this.KinectDevice.DepthStream.OpenNextFrame(1000)) { ColorImageProcessing(this.KinectDevice, colorFrame, depthFrame); } } } catch (Exception) { //Do nothing, because the likely result is that the Kinect has been unplugged. } } }
/// <summary> /// Get 2-dimensional color image position corresponding to a 3-dimensional skeleton position. /// </summary> /// <param name="depthFrame"> /// DepthImageFrame used to perform the coordinate space mapping. /// </param> /// <param name="skeletonPoint"> /// Skeleton position to be mapped into color image space. /// </param> /// <param name="colorFormat"> /// Color image format indicating size of destination coordinate space. /// </param> /// <returns> /// 2D point in color image space. /// </returns> private static Point Get2DPosition( DepthImageFrame depthFrame, SkeletonPoint skeletonPoint, ColorImageFormat colorFormat) { try { #pragma warning disable 0618 DepthImagePoint depthPoint = depthFrame.MapFromSkeletonPoint(skeletonPoint); ColorImagePoint colorPoint = depthFrame.MapToColorImagePoint(depthPoint.X, depthPoint.Y, colorFormat); #pragma warning restore 0618 // map back to skeleton.Width & skeleton.Height return(new Point(colorPoint.X, colorPoint.Y)); } catch (InvalidOperationException) { // The stream must have stopped abruptly // Handle this gracefully return(new Point(0, 0)); } }
//DEPTH_END void GetCameraPoint(Skeleton first, AllFramesReadyEventArgs e) { using (DepthImageFrame depth = e.OpenDepthImageFrame()) { if (depth == null || kinectSensorChooser1.Kinect == null) { return; } CoordinateMapper cm = new CoordinateMapper(kinectSensorChooser1.Kinect); DepthImagePoint headDepthPoint = cm.MapSkeletonPointToDepthPoint(first.Joints[JointType.Head].Position, DepthImageFormat.Resolution640x480Fps30); DepthImagePoint leftDepthPoint = cm.MapSkeletonPointToDepthPoint(first.Joints[JointType.HandLeft].Position, DepthImageFormat.Resolution640x480Fps30); DepthImagePoint rightDepthPoint = cm.MapSkeletonPointToDepthPoint(first.Joints[JointType.HandRight].Position, DepthImageFormat.Resolution640x480Fps30); ColorImagePoint headColorPoint = cm.MapDepthPointToColorPoint(DepthImageFormat.Resolution640x480Fps30, headDepthPoint, ColorImageFormat.RgbResolution640x480Fps30); ColorImagePoint leftColorPoint = cm.MapDepthPointToColorPoint(DepthImageFormat.Resolution640x480Fps30, leftDepthPoint, ColorImageFormat.RgbResolution640x480Fps30); ColorImagePoint rightColorPoint = cm.MapDepthPointToColorPoint(DepthImageFormat.Resolution640x480Fps30, rightDepthPoint, ColorImageFormat.RgbResolution640x480Fps30); //=> CameraPosition(headImage, headColorPoint); //=> CameraPosition(leftEllipse, leftColorPoint); //=> CameraPosition(rightEllipse, rightColorPoint); } }
private void DepthImageReady(object sender, DepthImageFrameReadyEventArgs e) { using (DepthImageFrame imageFrame = e.OpenDepthImageFrame()) { if (imageFrame != null) { // We need to initialize if colorDataNeedsInitialization is true or if the format has changed. depthDataNeedsInitialization |= (lastImageFormat != imageFrame.Format); if (depthDataNeedsInitialization) { pixelData = new short[imageFrame.PixelDataLength]; depthFrame32 = new byte[imageFrame.Width * imageFrame.Height * 4]; } imageFrame.CopyPixelDataTo(pixelData); byte[] convertedDepthBits = convertDepthFrame(pixelData, ((KinectSensor)sender).DepthStream.TooFarDepth); //An interopBitmap is a WPF construct that enables resetting the Bits of the image. //This is more efficient than doing a BitmapSource.Create call every frame. if (depthDataNeedsInitialization) { imageHelper = new InteropBitmapHelper(imageFrame.Width, imageFrame.Height, convertedDepthBits); kinectDepthImage.Source = imageHelper.InteropBitmap; } else { imageHelper.UpdateBits(convertedDepthBits); } // If we succeeded, we can mark the depth data as successfully initialized depthDataNeedsInitialization = false; lastImageFormat = imageFrame.Format; UpdateFrameRate(); } } }
/// <summary> /// Event handler for Kinect Sensor's DepthFrameReady event /// </summary> /// <param name="sender">object sending the event</param> /// <param name="e">event arguments</param> private void OnDepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { bool savedFrame = false; // Open depth frame using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { if (null != depthFrame) { this.StatusMessage = string.Empty; // Save frame timestamp this.FrameTimestamp = depthFrame.Timestamp; // Create local depth pixels buffer if (null == this.DepthImagePixels || this.DepthImagePixels.Length != depthFrame.PixelDataLength) { this.depthImagePixels = new DepthImagePixel[depthFrame.PixelDataLength]; } // Copy depth pixels to local buffer depthFrame.CopyDepthImagePixelDataTo(this.DepthImagePixels); this.depthWidth = depthFrame.Width; this.depthHeight = depthFrame.Height; // Ensure frame resources are ready for Kinect Fusion this.AllocateFrames(); savedFrame = true; } } // Signal that the depth is ready for processing if (savedFrame && null != this.DepthFrameReady) { this.DepthFrameReady(this, null); } }
private void DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { DepthImageFrame frame = e.OpenDepthImageFrame(); if (frame != null) { if (frame.FrameNumber != this.frameindex) { this.FInvalidate = true; this.RebuildBuffer(frame.Format, false); this.frameindex = frame.FrameNumber; frame.CopyDepthImagePixelDataTo(this.depthpixels); lock (m_lock) { this.runtime.Runtime.CoordinateMapper.MapDepthFrameToSkeletonFrame(frame.Format, this.depthpixels, this.skelpoints); } frame.Dispose(); } } }
/// <summary> /// Update player with data from Kinect sensor. /// </summary> /// <param name="skeleton"> /// Skeleton data corresponding to player. /// </param> /// <param name="eventArgs"> /// Event arguments corresponding to specified skeleton. /// </param> public void Update(Skeleton skeleton, AllFramesReadyEventArgs eventArgs) { Skeleton = skeleton; if ((null == skeleton) || (null == eventArgs)) { return; } using (DepthImageFrame depthImageFrame = eventArgs.OpenDepthImageFrame()) { using (ColorImageFrame colorImageFrame = eventArgs.OpenColorImageFrame()) { if ((null == depthImageFrame) || (null == colorImageFrame)) { return; } JointMapping.Clear(); try { // Transform the skeleton coordinates into the color image space foreach (Joint joint in skeleton.Joints) { Point mappedPoint = Get2DPosition( depthImageFrame, joint.Position, colorImageFrame.Format); JointMapping[joint.JointType] = mappedPoint; } } catch (UnauthorizedAccessException) { // Kinect is no longer available. } } } }
/// <summary> /// RGBカメラ、距離カメラ、骨格のフレーム更新イベント /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e) { try { // Kinectのインスタンスを取得する KinectSensor kinect = sender as KinectSensor; if (kinect == null) { return; } // RGBカメラのフレームデータを取得する using (ColorImageFrame colorFrame = e.OpenColorImageFrame()) { if (colorFrame != null) { // RGBカメラの画像を表示する byte[] colorPixel = new byte[colorFrame.PixelDataLength]; colorFrame.CopyPixelDataTo(colorPixel); imageRgb.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96, PixelFormats.Bgr32, null, colorPixel, colorFrame.Width * colorFrame.BytesPerPixel); } } // 距離カメラのフレームデータを取得する using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) { // スケルトンのフレームを取得する using (SkeletonFrame skeletonFrame = e.OpenSkeletonFrame()) { if ((depthFrame != null) && (skeletonFrame != null)) { // 身長を表示する HeightMeasure(kinect, depthFrame, skeletonFrame); } } } } catch (Exception ex) { MessageBox.Show(ex.Message); } }
/// <summary> /// Get informations between two depths values /// </summary> /// <param name="min">min depth</param> /// <param name="max">max depth</param> /// <param name="f">depthFrame</param> /// <returns>a byte array with a value of 1 if the point is inbetween, 0 if not</returns> public byte[] GetEverythingBetween(double min, double max, DepthImageFrame f) { //get the raw data from kinect with the depth for every pixel short[] rawDepthData = new short[f.PixelDataLength]; f.CopyPixelDataTo(rawDepthData); //use depthFrame to create the image to display on-screen //depthFrame contains color information for all pixels in image Byte[] pixels = new byte[f.Height * f.Width]; for (int depthIndex = 0; depthIndex < rawDepthData.Length; depthIndex++) { //gets the depth value int depth = rawDepthData[depthIndex] >> DepthImageFrame.PlayerIndexBitmaskWidth; if (depth < min * 1000 || depth > max * 1000) { pixels[depthIndex] = 0; } else { pixels[depthIndex] = 255; } } byte[] averagedDepthArray = CreateAverageDepthArray(pixels); if (depthQueue.Count == averageFrameCount) { return(averagedDepthArray); } else { return(pixels); } }
private void CreateBitMapFromDepthFrame(DepthImageFrame frame) { if (frame != null) { // get image from frame short[] depthPixels = new short[frame.PixelDataLength]; short[] secondDepthPixels = new short[frame.PixelDataLength]; frame.CopyPixelDataTo(depthPixels); Array.Copy(depthPixels, secondDepthPixels, frame.PixelDataLength); // find closest point in depth frame (in millimeters) this.rawDepthFrame = new DepthFrame() { Pixels = depthPixels, Height = frame.Height, Width = frame.Width }; this.croppedFrame = this.floodFill.Process(depthPixels, frame.Height, frame.Width); this.sensor.DepthStream.Range = this.floodFill.ClosestDistance < 1000 ? DepthRange.Near : DepthRange.Default; if (this.FrameReady != null) { this.FrameReady(this, new DepthFrameEventArgs() { Frame = this.croppedFrame }); } switch (this.mode) { case ProgramMode.Classifying: this.Classify(); break; case ProgramMode.Learning: this.Train(); break; } } }
private void CalculatePlayerSize(DepthImageFrame depthFrame, short[] pixelData) { int depth; int playerIndex; int pixelIndex; int bytesPerPixel = depthFrame.BytesPerPixel; PlayerDepthData[] players = new PlayerDepthData[6]; //First pass - Calculate stats from the pixel data for (int row = 0; row < depthFrame.Height; row++) { for (int col = 0; col < depthFrame.Width; col++) { pixelIndex = col + (row * depthFrame.Width); depth = pixelData[pixelIndex] >> DepthImageFrame.PlayerIndexBitmaskWidth; if (depth != 0) { playerIndex = (pixelData[pixelIndex] & DepthImageFrame.PlayerIndexBitmask) - 1; if (playerIndex > -1) { if (players[playerIndex] == null) { players[playerIndex] = new PlayerDepthData(playerIndex + 1, depthFrame.Width, depthFrame.Height); } players[playerIndex].UpdateData(col, row, depth); } } } } PlayerDepthData.ItemsSource = players; }
void ProcessDepthFrame() { using (DepthImageFrame dif = this.Chooser.Sensor.DepthStream.OpenNextFrame(0)) { if (dif != null && Chooser != null && Chooser.intStream != null) { this.Chooser.intStream.ProcessDepth(dif.GetRawPixelData(), dif.Timestamp); } if (Chooser != null && dif != null) { if (null == this.depthComponent.depthData) { this.depthComponent.depthData = new short[dif.PixelDataLength]; this.depthComponent.depthTexture = new Texture2D( this.GraphicsDevice, dif.Width, dif.Height, false, SurfaceFormat.Bgra4444); this.depthComponent.backBuffer = new RenderTarget2D( this.GraphicsDevice, dif.Width, dif.Height, false, SurfaceFormat.Color, DepthFormat.None, this.GraphicsDevice.PresentationParameters.MultiSampleCount, RenderTargetUsage.PreserveContents); } dif.CopyPixelDataTo(this.depthComponent.depthData); this.depthComponent.needToRedrawBackBuffer = true; } } }
/* private void run_cmd() * { * * string fileName = @"C:\Users\pk1601cs33\codesub.py"; * * Process p = new Process(); * p.StartInfo = new ProcessStartInfo(@"C:\Users\pk1601cs33\AppData\Local\Programs\Python\Python36-32\python.exe", fileName) * { * RedirectStandardOutput = true, * UseShellExecute = false, * CreateNoWindow = true * }; * p.Start(); * * string output = p.StandardOutput.ReadToEnd(); * p.WaitForExit(); * * Console.WriteLine(output); * Console.ReadLine(); * * // Console.WriteLine("gdhflgkf"); * }*/ private Point GetPosition2DLocation(DepthImageFrame depthFrame, SkeletonPoint skeletonPoint) { DepthImagePoint depthPoint = depthFrame.MapFromSkeletonPoint(skeletonPoint); switch (ImageType) { case ImageType.Color: ColorImagePoint colorPoint = depthFrame.MapToColorImagePoint(depthPoint.X, depthPoint.Y, this.Kinect.ColorStream.Format); // map back to skeleton.Width & skeleton.Height return(new Point( (int)(this.RenderSize.Width * colorPoint.X / this.Kinect.ColorStream.FrameWidth), (int)(this.RenderSize.Height * colorPoint.Y / this.Kinect.ColorStream.FrameHeight))); case ImageType.Depth: return(new Point( (int)(this.RenderSize.Width * depthPoint.X / depthFrame.Width), (int)(this.RenderSize.Height * depthPoint.Y / depthFrame.Height))); default: throw new ArgumentOutOfRangeException("ImageType was a not expected value: " + ImageType.ToString()); } }
/// <summary> /// Process a frame and write it to the bitmap. /// </summary> public void WriteToBitmap(DepthImageFrame frame) { if ((null == this.depthBuffer) || (this.depthBuffer.Length != frame.PixelDataLength)) { this.depthBuffer = new DepthImagePixel[frame.PixelDataLength]; this.colorizedDepthBuffer = new byte[frame.PixelDataLength * 4]; } if (null == WriteableBitmap || WriteableBitmap.Format != PixelFormats.Bgra32) { this.CreateWriteableBitmap(frame); } this.depthBuffer = frame.GetRawPixelData(); this.colorizerStrategy.ColorizeDepthPixels(this, this.depthBuffer, this.colorizedDepthBuffer, frame.Width, frame.Height, (int)(frame.Width / WriteableBitmap.Width)); this.WriteableBitmap.WritePixels( new Int32Rect(0, 0, WriteableBitmap.PixelWidth, WriteableBitmap.PixelHeight), this.colorizedDepthBuffer, (int)(WriteableBitmap.Width * 4), 0); this.SendDepthImageReady(this.WriteableBitmap); }
private static void FindNextLargestFrameDimensions(DepthImageFrame frame, int targetWidth, int targetHeight, out int width, out int height) { width = frame.Width; height = frame.Height; if (frame.Width < targetWidth && frame.Height < targetHeight) { return; } while (width >= targetWidth * 2 && height >= targetHeight * 2 && width % 2 == 0 && height % 2 == 0) { width /= 2; height /= 2; } }
private void CreateWriteableBitmap(DepthImageFrame frame) { int fixedTargetWidth = this.TargetWidth; int fixedTargetHeight = this.TargetHeight; int finalWidth; int finalHeight; EnforceAspectRatio((double)frame.Width / frame.Height, ref fixedTargetWidth, ref fixedTargetHeight); FindNextLargestFrameDimensions(frame, fixedTargetWidth, fixedTargetHeight, out finalWidth, out finalHeight); WriteableBitmap = new WriteableBitmap( finalWidth, finalHeight, 96, 96, PixelFormats.Bgra32, null); }