/// <summary> /// 距離データをカラー画像に変換する /// </summary> /// <param name="kinect"></param> /// <param name="depthFrame"></param> /// <returns></returns> private byte[] ConvertDepthColor( KinectSensor kinect, DepthImageFrame depthFrame ) { ColorImageStream colorStream = kinect.ColorStream; DepthImageStream depthStream = kinect.DepthStream; // 距離カメラのピクセルごとのデータを取得する short[] depthPixel = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo( depthPixel ); // 距離カメラの座標に対応するRGBカメラの座標を取得する(座標合わせ) ColorImagePoint[] colorPoint = new ColorImagePoint[depthFrame.PixelDataLength]; kinect.MapDepthFrameToColorFrame( depthStream.Format, depthPixel, colorStream.Format, colorPoint ); byte[] depthColor = new byte[depthFrame.PixelDataLength * Bgr32BytesPerPixel]; for ( int index = 0; index < depthPixel.Length; index++ ) { // 距離カメラのデータから、プレイヤーIDと距離を取得する int player = depthPixel[index] & DepthImageFrame.PlayerIndexBitmask; int distance = depthPixel[index] >> DepthImageFrame.PlayerIndexBitmaskWidth; // 変換した結果が、フレームサイズを超えることがあるため、小さいほうを使う int x = Math.Min( colorPoint[index].X, colorStream.FrameWidth - 1 ); int y = Math.Min( colorPoint[index].Y, colorStream.FrameHeight - 1 ); int colorIndex = ((y * depthFrame.Width) + x) * Bgr32BytesPerPixel; if ( player != 0 ) { depthColor[colorIndex] = 255; depthColor[colorIndex + 1] = 255; depthColor[colorIndex + 2] = 255; } else { // サポート外 0-40cm if ( distance == depthStream.UnknownDepth ) { depthColor[colorIndex] = 0; depthColor[colorIndex + 1] = 0; depthColor[colorIndex + 2] = 255; } // 近すぎ 40cm-80cm(default mode) else if ( distance == depthStream.TooNearDepth ) { depthColor[colorIndex] = 0; depthColor[colorIndex + 1] = 255; depthColor[colorIndex + 2] = 0; } // 遠すぎ 3m(Near),4m(Default)-8m else if ( distance == depthStream.TooFarDepth ) { depthColor[colorIndex] = 255; depthColor[colorIndex + 1] = 0; depthColor[colorIndex + 2] = 0; } // 有効な距離データ else { depthColor[colorIndex] = 0; depthColor[colorIndex + 1] = 255; depthColor[colorIndex + 2] = 255; } } } return depthColor; }
/// <summary> /// 距離データをカラー画像に変換する /// </summary> /// <param name="depthFrame"></param> /// <returns></returns> private static byte[] ConvertDepthToColor( DepthImageFrame depthFrame ) { // 距離カメラのピクセルごとのデータを取得する short[] depthPixel = depthFrame.ToPixelData(); // 画像化データを作成する byte[] depthColor = new byte[depthFrame.Width * depthFrame.Height * BytesPerPixel]; // 画像化する for ( int i = 0; i < depthPixel.Length; i++ ) { // 距離カメラのデータから、距離とプレイヤーIDを取得する int distance = depthPixel[i] >> DepthImageFrame.PlayerIndexBitmaskWidth; int player = depthPixel[i] & DepthImageFrame.PlayerIndexBitmask; // バイトインデックスを計算する int index = i * BytesPerPixel; byte gray = (byte)~(byte)KinectUtility.ScaleTo( distance, 0x0FFF, 0xFF ); depthColor[index + 0] = gray; depthColor[index + 1] = gray; depthColor[index + 2] = gray; } return depthColor; }
private void ReconhecerDistancia(DepthImageFrame quadro, byte[] bytesImagem, int maxDistancia) { if (quadro == null || bytesImagem == null) return; using (quadro) { DepthImagePixel[] imagemProfundidade = new DepthImagePixel[quadro.PixelDataLength]; quadro.CopyDepthImagePixelDataTo(imagemProfundidade); DepthImagePoint[] pontosImagemProfundidade = new DepthImagePoint[640 * 480]; Kinect.CoordinateMapper .MapColorFrameToDepthFrame(Kinect.ColorStream.Format, Kinect.DepthStream.Format, imagemProfundidade, pontosImagemProfundidade); for (int i = 0; i < pontosImagemProfundidade.Length; i++) { var point = pontosImagemProfundidade[i]; if (point.Depth < maxDistancia && KinectSensor.IsKnownPoint(point)) { var pixelDataIndex = i * 4; byte maiorValorCor = Math.Max(bytesImagem[pixelDataIndex], Math.Max(bytesImagem[pixelDataIndex + 1], bytesImagem[pixelDataIndex + 2])); bytesImagem[pixelDataIndex] = maiorValorCor; bytesImagem[pixelDataIndex + 1] = maiorValorCor; bytesImagem[pixelDataIndex + 2] = maiorValorCor; } } } }
public void OnAllFramesReady(ColorImageFrame colorImageFrame, DepthImageFrame depthImageFrame, SkeletonFrame skeletonFrame) { string stat = ""; // Face tracking / timing stuff if (FaceList.Count > 0) { foreach (Face f in FaceList.toList()) { stat += " {" + f.Id + ", " + String.Format("{0:0.00}", f.Velocity) + "}"; if (f.TakePicture) { int[] coordsExpanded = expandBy(f.Coords, 250, colorImageFrame.Width - 1, colorImageFrame.Height - 1); if (coordsExpanded[2] == 0 || coordsExpanded[3] == 0) // width or height can't be 0 continue; string time = System.DateTime.Now.ToString("hh'-'mm'-'ss", CultureInfo.CurrentUICulture.DateTimeFormat); string path = "..\\..\\Images\\CroppedPics\\pic" + f.Id + "--" + time + ".jpg"; f.Path = path; bool success = SaveImage(path, cropImage(colorImageFrame, coordsExpanded), ImageFormat.Jpeg); if (success && USE_BETAFACE) { bfw.enqueue(f); f.ProcessingBetaface = true; } } } } FaceTracking.Status = stat; FaceTracking.Status = ""; }
//基础景深数据处理 public byte[] BasicDepthProcessing(byte[] result, short[] original, DepthImageFrame depthFrame, Int32 loThreashold, Int32 hiThreshold) { Int32 depth; Int32 gray; Int32 alpha; for (int i = 0, j = 0; i < original.Length; i++, j += bytePerPixel) { depth = original[i] >> DepthImageFrame.PlayerIndexBitmaskWidth; if (depth < loThreashold || depth > hiThreshold) { //this gray must be larger than minColorByte //so that can draw the line the whole background gray = 40; alpha = 255; } else { int temp = (depth / 100) % 10 / 3; gray = 255 - (temp * 50); alpha = 255; } result[j] = (byte)gray; result[j + 1] = (byte)gray; result[j + 2] = (byte)gray; result[j + 3] = (byte)alpha; } return result; }
public void Record(DepthImageFrame frame) { // Header writer.Write((int)KinectRecordOptions.Depth); // Data var time = DateTime.Now; TimeSpan timeSpan = time.Subtract(referenceTime); referenceTime = time; writer.Write((long)timeSpan.TotalMilliseconds); writer.Write(frame.BytesPerPixel); writer.Write((int)frame.Format); writer.Write(frame.Width); writer.Write(frame.Height); writer.Write(frame.FrameNumber); // Bytes short[] shorts = new short[frame.PixelDataLength]; frame.CopyPixelDataTo(shorts); writer.Write(shorts.Length); foreach (short s in shorts) { writer.Write(s); } }
private void CreateBetterShadesOfGray(DepthImageFrame depthFrame, short[] pixelData) { int depth; int gray; int bytesPerPixel = 4; byte[] enhPixelData = new byte[depthFrame.Width * depthFrame.Height * bytesPerPixel]; for (int i = 0, j = 0; i < pixelData.Length; i++, j += bytesPerPixel) { depth = pixelData[i] >> DepthImageFrame.PlayerIndexBitmaskWidth; if (depth < LoDepthThreshold || depth > HiDepthThreshold) { gray = 0xFF; } else { gray = (255 * depth / 0xFFF); } enhPixelData[j] = (byte)gray; enhPixelData[j + 1] = (byte)gray; enhPixelData[j + 2] = (byte)gray; } this._DepthImage.WritePixels(this._DepthImageRect, enhPixelData, this._DepthImageStride, 0); }
/// <summary> /// 获取身高 /// </summary> /// <param name="depthFrame">深度图像数据</param> /// <returns></returns> public int GetHeight(DepthImageFrame depthFrame) { int height; short[] rawDepthData = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(rawDepthData); int Max = 0; int Min = 480; for (int depthIndex = 0; depthIndex < rawDepthData.Length; depthIndex++) { int player = rawDepthData[depthIndex] & DepthImageFrame.PlayerIndexBitmask; if (player > 0) { int TempYdata = depthIndex / 640; if (TempYdata > Max) { Max = TempYdata; } if (TempYdata < Min) { Min = TempYdata; } } } height = Max - Min; return height; }
private byte[] GenerateColoredBytes(DepthImageFrame depthFrame) { short[] rawDepthData = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(rawDepthData); Byte[] pixels = new byte[depthFrame.Height * depthFrame.Width * 4]; const int BlueIndex = 0; const int GreenIndex = 1; const int RedIndex = 2; const int alpha = 3; for (int depthIndex = 0, colorIndex = 0; depthIndex < rawDepthData.Length && colorIndex < pixels.Length; depthIndex++, colorIndex += 4) { int player = rawDepthData[depthIndex] & DepthImageFrame.PlayerIndexBitmask; int depth = rawDepthData[depthIndex] >> DepthImageFrame.PlayerIndexBitmaskWidth; if (depth <= 900) { pixels[colorIndex + BlueIndex] = 255; //closest objects blue pixels[colorIndex + GreenIndex] = 0; pixels[colorIndex + RedIndex] = 0; pixels[colorIndex + alpha] = 255; } else if (depth < 2000) { pixels[colorIndex + BlueIndex] = 0; pixels[colorIndex + GreenIndex] = 255; //medium depth is green pixels[colorIndex + RedIndex] = 0; pixels[colorIndex + alpha] = 255; } else { pixels[colorIndex + BlueIndex] = 0; pixels[colorIndex + GreenIndex] = 0; pixels[colorIndex + RedIndex] = 255; //farthest is red pixels[colorIndex + alpha] = 255; } if (player > 0) { pixels[colorIndex + BlueIndex] = 0; pixels[colorIndex + GreenIndex] = 255; pixels[colorIndex + RedIndex] = 255; pixels[colorIndex + alpha] = 255; } } return pixels; }
public void Record(DepthImageFrame frame) { if (writer == null) throw new Exception("This recorder is stopped"); if (depthRecorder == null) throw new Exception("Depth recording is not actived on this KinectRecorder"); depthRecorder.Record(frame); Flush(); }
public ReplayDepthImageFrame(DepthImageFrame frame) { Format = frame.Format; BytesPerPixel = frame.BytesPerPixel; FrameNumber = frame.FrameNumber; TimeStamp = frame.Timestamp; Width = frame.Width; Height = frame.Height; PixelDataLength = frame.PixelDataLength; internalFrame = frame; }
private byte[] GenerateColoredBytes(DepthImageFrame depthFrame) { //get raw data from kinect short[] rawDepthData = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(rawDepthData); Byte[] pixels = new byte[depthFrame.Width * depthFrame.Height * 4]; //constants const int BlueIndex = 0; const int GreenIndex = 1; const int RedIndex = 2; //loop through distances for (int depthIndex = 0, colorIndex = 0; depthIndex < rawDepthData.Length && colorIndex < pixels.Length; depthIndex++, colorIndex += 4) { //get player int player = rawDepthData[depthIndex] & DepthImageFrame.PlayerIndexBitmask; //get depth value int depth = rawDepthData[depthIndex] >> DepthImageFrame.PlayerIndexBitmaskWidth; //.9M or 2.95' if (depth <= 900) { pixels[colorIndex + BlueIndex] = 0; pixels[colorIndex + GreenIndex] = 0; pixels[colorIndex + RedIndex] = 255; } //.9M - 2M or 2.95' - 6.26' else if (depth > 900 && depth < 2000) { pixels[colorIndex + BlueIndex] = 0; pixels[colorIndex + GreenIndex] = 255; pixels[colorIndex + RedIndex] = 0; } //2M+ or 6.26'+ else if (depth > 2000) { pixels[colorIndex + BlueIndex] = 255; pixels[colorIndex + GreenIndex] = 0; pixels[colorIndex + RedIndex] = 0; } //byte intensity = CalculateIntensityFromDepth(depth); //pixels[colorIndex + BlueIndex] = intensity; //pixels[colorIndex + GreenIndex] = intensity; //pixels[colorIndex + RedIndex] = intensity; } return pixels; }
//distance given in inches public BitmapSource ApplyDistanceFilter(DepthImageFrame frame, int dist) { double depthDist = dist * 25.4; short[] depthArray = new short[frame.PixelDataLength]; frame.CopyPixelDataTo(depthArray); depthArray = applyPixelFilter(depthArray, frame.Width, frame.Height); // depthArray = applyWeightedAverageFilter(depthArray, frame.Width, frame.Height); byte[] colorFrame = new byte[frame.Width * frame.Height * 4]; Parallel.For(0, frame.Height, i => { // Process each pixel in the row for (int j = 0; j < frame.Width; j++) { var distanceIndex = j + (i * frame.Width); var index = distanceIndex * 4; double depth = depthArray[distanceIndex]>> DepthImageFrame.PlayerIndexBitmaskWidth; // Map the distance to an intesity that can be represented in RGB if (depth < depthDist && depth > 0) { var intensity = CalculateIntensityFromDistance(depthArray[distanceIndex]); colorFrame[index + BlueIndex] = intensity; colorFrame[index + GreenIndex] = intensity; colorFrame[index + RedIndex] = intensity; colorFrame[index + AlphaIndex] = 250; } else { colorFrame[index + BlueIndex] = 112; colorFrame[index + GreenIndex] = 25; colorFrame[index + RedIndex] = 25; colorFrame[index + AlphaIndex] = 200; } } }); //rgba BitmapSource bmps = BitmapSource.Create(frame.Width, frame.Height, 96, 96, PixelFormats.Bgra32, null, colorFrame, frame.Width * PixelFormats.Bgr32.BitsPerPixel / 8); //rgb //BitmapSource bmps = BitmapSource.Create(frame.Width, frame.Height, 96, 96, PixelFormats.Bgr32, null, colorFrame, frame.Width * PixelFormats.Bgr32.BitsPerPixel / 8); return bmps; }
public void synchronize( DepthImageFrame depthFrame, ColorImageFrame colorFrame, SkeletonFrame skletonFrame, Boolean isPauseMode ) { IsPauseMode = isPauseMode; colorFrame.CopyPixelDataTo(_colorByte); //Console.WriteLine("max depth: "+depthFrame.MaxDepth); depthFrame.CopyDepthImagePixelDataTo(_depthPixels); _sensor.CoordinateMapper.MapColorFrameToDepthFrame( ColorImageFormat.RgbResolution640x480Fps30, DepthImageFormat.Resolution640x480Fps30, _depthPixels, _depthPoint ); for (int i = 0; i < _pixelDepthDataLength; i++) { _depthShort[i] = (short)_depthPoint[i].Depth; _depthByte[i] = (byte)(_depthPoint[i].Depth*0.064-1); } skletonFrame.CopySkeletonDataTo(totalSkeleton); Skeleton firstSkeleton = (from trackskeleton in totalSkeleton where trackskeleton.TrackingState == SkeletonTrackingState. Tracked select trackskeleton).FirstOrDefault(); _isCreation = true; if (firstSkeleton != null) { if (firstSkeleton.Joints[JointType.Spine].TrackingState == JointTrackingState.Tracked) { IsSkeletonDetected = true; UserSkeleton[SkeletonDataType.RIGHT_HAND] = ScalePosition(firstSkeleton.Joints[JointType.HandRight].Position); UserSkeleton[SkeletonDataType.LEFT_HAND] = ScalePosition(firstSkeleton.Joints[JointType.HandLeft].Position); UserSkeleton[SkeletonDataType.SPINE] = ScalePosition(firstSkeleton.Joints[JointType.Spine].Position); return; } } IsSkeletonDetected = false; _isCreation = false; }
private static Byte[] GenerateColoredBytes(DepthImageFrame depthFrame, int minRange, int maxRange) { //get the raw data from kinect with the depth for every pixel short[] rawDepthData = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(rawDepthData); //use depthFrame to create the image to display on-screen //depthFrame contains color information for all pixels in image //Height x Width x 4 (Red, Green, Blue, empty byte) Byte[] pixels = new Byte[depthFrame.Height * depthFrame.Width * 4]; //Bgr32 - Blue, Green, Red, empty byte //Bgra32 - Blue, Green, Red, transparency //You must set transparency for Bgra as .NET defaults a byte to 0 = fully transparent //hardcoded locations to Blue, Green, Red (BGR) index positions const int BlueIndex = 0; const int GreenIndex = 1; const int RedIndex = 2; //loop through all distances //pick a RGB color based on distance for (int depthIndex = 0, colorIndex = 0; depthIndex < rawDepthData.Length && colorIndex < pixels.Length; depthIndex++, colorIndex += 4) { int depth = rawDepthData[depthIndex] >> DepthImageFrame.PlayerIndexBitmaskWidth; if (depth < 850 || depth > 4000) { pixels[colorIndex + BlueIndex] = 0; pixels[colorIndex + GreenIndex] = 0; pixels[colorIndex + RedIndex] = 0; } else if (depth <= minRange || depth > maxRange) { pixels[colorIndex + BlueIndex] = 0; pixels[colorIndex + GreenIndex] = 0; pixels[colorIndex + RedIndex] = 0; } else { pixels[colorIndex + BlueIndex] = 0; pixels[colorIndex + GreenIndex] = 255; pixels[colorIndex + RedIndex] = 0; } } return pixels; }
byte[] GetColoredBytes(DepthImageFrame frame) { var depthData = new short[frame.PixelDataLength]; frame.CopyPixelDataTo(depthData); //4 (Red, Green, Blue, empty byte) var pixels = new byte[frame.Height * frame.Width * 4]; const int blueIndex = 0; const int greenIndex = 1; const int redIndex = 2; //pick a RGB color based on distance for (int depthIndex = 0, colorIndex = 0; depthIndex < depthData.Length && colorIndex < pixels.Length; depthIndex++, colorIndex += 4) { var player = depthData[depthIndex] & DepthImageFrame.PlayerIndexBitmask; var depth = depthData[depthIndex] >> DepthImageFrame.PlayerIndexBitmaskWidth; if (depth <= 900) { pixels[colorIndex + blueIndex] = 255; pixels[colorIndex + greenIndex] = 0; pixels[colorIndex + redIndex] = 0; } else if (depth > 900 && depth < 2000) { pixels[colorIndex + blueIndex] = 0; pixels[colorIndex + greenIndex] = 255; pixels[colorIndex + redIndex] = 0; } else if (depth > 2000) { pixels[colorIndex + blueIndex] = 0; pixels[colorIndex + greenIndex] = 0; pixels[colorIndex + redIndex] = 255; } if (player > 0) { pixels[colorIndex + blueIndex] = Colors.Gold.B; pixels[colorIndex + greenIndex] = Colors.Gold.G; pixels[colorIndex + redIndex] = Colors.Gold.R; } } return pixels; }
/// <summary> /// 距離データをカラー画像に変換する /// </summary> /// <param name="kinect"></param> /// <param name="depthFrame"></param> /// <returns></returns> private byte[] ConvertDepthToColorImage(KinectSensor kinect, DepthImageFrame depthFrame) { DepthImageStream depthStream = kinect.DepthStream; // 距離カメラのピクセルごとのデータを取得する short[] depthPixel = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(depthPixel); byte[] depthColor = new byte[depthFrame.PixelDataLength * Bgr32BytesPerPixel]; for (int index = 0; index < depthPixel.Length; index++) { // 距離カメラのデータから、距離を取得する int distance = depthPixel[index] >> DepthImageFrame.PlayerIndexBitmaskWidth; int colorIndex = index * Bgr32BytesPerPixel; // 赤:サポート外 0-40cm, 8m- if (distance == depthStream.UnknownDepth) { depthColor[colorIndex] = 0; depthColor[colorIndex + 1] = 0; depthColor[colorIndex + 2] = 255; } // 緑:近すぎ 40cm-80cm(default mode) else if (distance == depthStream.TooNearDepth) { depthColor[colorIndex] = 0; depthColor[colorIndex + 1] = 255; depthColor[colorIndex + 2] = 0; } // 青:遠すぎ 3m(Near),4m(Default)-8m else if (distance == depthStream.TooFarDepth) { depthColor[colorIndex] = 255; depthColor[colorIndex + 1] = 0; depthColor[colorIndex + 2] = 0; } // 有効な距離データ else { byte color = (byte)(255 * distance / depthStream.TooFarDepth); depthColor[colorIndex] = 0; depthColor[colorIndex + 1] = color; depthColor[colorIndex + 2] = color; } } return depthColor; }
/// <summary> /// 顔を追跡する /// </summary> /// <param name="colorFrame"></param> /// <param name="depthFrame"></param> /// <param name="skeleton"></param> private void FaceTracking( ColorImageFrame colorFrame, DepthImageFrame depthFrame, Skeleton skeleton ) { var faceFrame = faceTracker.Track( colorFrame.Format, colorFrame.ToPixelData(), depthFrame.Format, depthFrame.ToPixelData(), skeleton ); if ( faceFrame.TrackSuccessful ) { // 四角を移動させる rectFace.Margin = new Thickness( faceFrame.FaceRect.Left, faceFrame.FaceRect.Top, 0, 0 ); rectFace.Width = faceFrame.FaceRect.Width; rectFace.Height = faceFrame.FaceRect.Height; rectFace.Visibility = System.Windows.Visibility.Visible; } else { rectFace.Visibility = System.Windows.Visibility.Hidden; } }
void Context_AllFramesUpdated(KinectSensor sensor, ColorImageFrame cf, DepthImageFrame df, SkeletonFrame sf) { this.sensor = sensor; if (colorImage == null) { colorImage = new byte[cf.PixelDataLength]; depthImage = new short[df.PixelDataLength]; skeletonData = new Skeleton[sf.SkeletonArrayLength]; } cf.CopyPixelDataTo(colorImage); df.CopyPixelDataTo(depthImage); sf.CopySkeletonDataTo(skeletonData); TrackFace(); }
public static ReplayDepthImageFrame FromDepthImageFrame(DepthImageFrame frame) { short[] pixelData = new short[frame.PixelDataLength]; frame.CopyPixelDataTo(pixelData); return new ReplayDepthImageFrame() { FrameNumber = frame.FrameNumber, BytesPerPixel = frame.BytesPerPixel, PixelDataLength = frame.PixelDataLength, Width = frame.Width, Height = frame.Height, Timestamp = frame.Timestamp, PixelData = pixelData }; }
void Kinect_DepthFrameReady(object sender, DepthImageFrameReadyEventArgs e) { if(ClientList.Count > 0) { using(DepthImageFrame frame = e.OpenDepthImageFrame()) { this.DepthImageFrame = frame; if(frame != null) { _memoryStream.SetLength(0); binaryWriter.Write(DepthImageFrame.PlayerIndexBitmask); binaryWriter.Write(DepthImageFrame.PlayerIndexBitmaskWidth); binaryWriter.Write(frame); binaryWriter.Write((int)frame.Format); if(_depth == null || _depth.Length != frame.PixelDataLength) _depth = new short[frame.PixelDataLength]; frame.CopyPixelDataTo(_depth); if(_depthBytes == null || _depthBytes.Length != _depth.Length * 2) _depthBytes = new byte[_depth.Length * 2]; Buffer.BlockCopy(_depth, 0, _depthBytes, 0, _depthBytes.Length); binaryWriter.Write(_depthBytes); _frameCount++; if(_fps == -1 || (_frameCount > 0 && (_frameCount % (GetFps(frame.Format) / _fps)) == 0)) { Parallel.For(0, ClientList.Count, index => { SocketClient sc = ClientList[index]; byte[] data = _memoryStream.ToArray(); sc.Send(BitConverter.GetBytes(data.Length)); sc.Send(data); }); } RemoveClients(); } } } }
private byte[] Generatecoloredbytes(DepthImageFrame depthframe) { short[] rawdepthdata = new short[depthframe.PixelDataLength]; depthframe.CopyPixelDataTo(rawdepthdata); Byte[] pixels = new byte[depthframe.Height * depthframe.Width * 4]; const int blueindex = 0; const int greenindex = 1; const int redindex = 2; for (int depthindex = 0, colorindex = 0; depthindex < rawdepthdata.Length && colorindex < pixels.Length; depthindex++, colorindex += 4) { int player = rawdepthdata[depthindex] & DepthImageFrame.PlayerIndexBitmask; int depth = rawdepthdata[depthindex] >> DepthImageFrame.PlayerIndexBitmaskWidth; if (depth <= 500) { pixels[colorindex + blueindex] = 255; pixels[colorindex + greenindex] = 0; pixels[colorindex + redindex] = 0; } else if (depth >= 500 && depth<= 1300) { pixels[colorindex + blueindex] = 0; pixels[colorindex + greenindex] = 255; pixels[colorindex + redindex] = 0; } else if (depth >1300 && depth<2000) { pixels[colorindex + blueindex] = 0; pixels[colorindex + greenindex] = 0; pixels[colorindex + redindex] = 255; } else if (depth >= 2000) { pixels[colorindex + blueindex] = 255; pixels[colorindex + greenindex] = 255; pixels[colorindex + redindex] = 0; } } return pixels; //throw new NotImplementedException(); }
/// <summary> /// プレーヤーだけ表示する /// </summary> /// <param name="colorFrame"></param> /// <param name="depthFrame"></param> /// <returns></returns> private byte[] BackgroundMask( KinectSensor kinect, ColorImageFrame colorFrame, DepthImageFrame depthFrame ) { ColorImageStream colorStream = kinect.ColorStream; DepthImageStream depthStream = kinect.DepthStream; // RGBカメラのピクセルごとのデータを取得する byte[] colorPixel = new byte[colorFrame.PixelDataLength]; colorFrame.CopyPixelDataTo( colorPixel ); // 距離カメラのピクセルごとのデータを取得する short[] depthPixel = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo( depthPixel ); // 距離カメラの座標に対応するRGBカメラの座標を取得する(座標合わせ) ColorImagePoint[] colorPoint = new ColorImagePoint[depthFrame.PixelDataLength]; kinect.MapDepthFrameToColorFrame( depthStream.Format, depthPixel, colorStream.Format, colorPoint ); // 出力バッファ(初期値は白(255,255,255)) byte[] outputColor = new byte[colorPixel.Length]; for ( int i = 0; i < outputColor.Length; i += Bgr32BytesPerPixel ) { outputColor[i] = 255; outputColor[i + 1] = 255; outputColor[i + 2] = 255; } for ( int index = 0; index < depthPixel.Length; index++ ) { // プレイヤーを取得する int player = depthPixel[index] & DepthImageFrame.PlayerIndexBitmask; // 変換した結果が、フレームサイズを超えることがあるため、小さいほうを使う int x = Math.Min( colorPoint[index].X, colorStream.FrameWidth - 1 ); int y = Math.Min( colorPoint[index].Y, colorStream.FrameHeight - 1 ); int colorIndex = ((y * depthFrame.Width) + x) * Bgr32BytesPerPixel; // プレーヤーを検出した座標だけ、RGBカメラの画像を使う if ( player != 0 ) { outputColor[colorIndex] = colorPixel[colorIndex]; outputColor[colorIndex + 1] = colorPixel[colorIndex + 1]; outputColor[colorIndex + 2] = colorPixel[colorIndex + 2]; } } return outputColor; }
private short[] trimImage(DepthImageFrame frame) { int pixeldatalength = frame.PixelDataLength; short[] pixels = new short[pixeldatalength]; short[] newImagePixels = new short[153600]; frame.CopyPixelDataTo(pixels); int newIndex = 0; for (int x = 0; x < (640 * 320); x++) { if (x % 640 > 80 && x % 640 <= 560) { newImagePixels[newIndex] = pixels[x]; newIndex++; } } return newImagePixels; }
/// <summary> /// 生成深度信息图像 /// </summary> /// <param name="depthFrame">传入深入帧</param> /// <returns>返回深度图像像素集</returns> private byte[] GenerateColoredBytes(DepthImageFrame depthFrame) { short[] rawDepthData = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(rawDepthData); byte[] pixels = new byte[depthFrame.Width * depthFrame.Height * 4]; const int BlueIndex = 0; const int GreenIndex = 1; const int RedIndex = 2; for (int depthIndex = 0, colorIndex = 0; depthIndex < rawDepthData.Length && colorIndex < pixels.Length; ++depthIndex, colorIndex += 4) { int player = rawDepthData[depthIndex] & DepthImageFrame.PlayerIndexBitmask; int depth = rawDepthData[depthIndex] >> DepthImageFrame.PlayerIndexBitmaskWidth; if (depth <= 900) { pixels[colorIndex + BlueIndex] = 255; pixels[colorIndex + GreenIndex] = 0; pixels[colorIndex + RedIndex] = 0; } else if (depth > 900 && depth < 2000) { pixels[colorIndex + BlueIndex] = 0; pixels[colorIndex + GreenIndex] = 255; pixels[colorIndex + RedIndex] = 0; } else if (depth > 2000) { pixels[colorIndex + BlueIndex] = 0; pixels[colorIndex + GreenIndex] = 0; pixels[colorIndex + RedIndex] = 255; } byte intensity = CalculateIntensityFromeDepth(depth); pixels[colorIndex + BlueIndex] = intensity; pixels[colorIndex + GreenIndex] = intensity; pixels[colorIndex + RedIndex] = intensity; //if (player > 0) //{ // pixels[colorIndex + BlueIndex] = Colors.Gold.B; // pixels[colorIndex + GreenIndex] = Colors.Gold.G; // pixels[colorIndex + RedIndex] = Colors.Gold.R; //} } return pixels; }
public static byte[] ConvertDepthFrameToBitmap(DepthImageFrame depthFrame) { if (depthFrame == null) { return null; } short[] depthData = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(depthData); Byte[] depthColors = new Byte[depthData.Length * 4]; //four colors for each pixel for (int colorIndex = 0, depthIndex = 0; colorIndex < depthColors.Length; colorIndex += 4, depthIndex++) { //get the depth, then calculate the intensity (0-255 based on the depth) //depth of -1 = dark brown int depth = GetDepth(depthData[depthIndex]); if (depth == -1) { // dark brown depthColors[colorIndex + RedIndex] = 66; depthColors[colorIndex + GreenIndex] = 66; depthColors[colorIndex + BlueIndex] = 33; } else { var intensity = ImageFrameCommonExtensions.CalculateIntensityFromDepth(depth); depthColors[colorIndex + RedIndex] = intensity; depthColors[colorIndex + GreenIndex] = intensity; depthColors[colorIndex + BlueIndex] = intensity; } //if the pixel is a player, choose a color int player = GetPlayerIndex(depthData[depthIndex]); SkeletonOverlay( ref depthColors[colorIndex + RedIndex], ref depthColors[colorIndex + GreenIndex], ref depthColors[colorIndex + BlueIndex], player); } return depthColors; }
public KServerDepthStreamPaquet(DepthImageFrame _depthFrame, int _idSensor) { depthFrame = _depthFrame; idSensor = _idSensor; pxlData = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(pxlData); setBodySize((uint)(depthFrame.PixelDataLength * sizeof(short) + 13)); byte[] size = BitConverter.GetBytes((uint)(depthFrame.PixelDataLength * sizeof(short) + 13)); Array.Reverse(size); Buffer.BlockCopy(size, 0, data, 0, size.Length); /* Builds the paquet */ build(); }
private byte[] GenerateColoredBytes(DepthImageFrame depthFrame) { short[] rawDepthData = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(rawDepthData); Byte[] pixels = new byte[depthFrame.Height * depthFrame.Width * 4]; const int BlueIndex = 0; const int GreenIndex = 1; const int RedIndex = 2; for (int depthIndex = 0, colorIndex = 0; depthIndex < rawDepthData.Length && colorIndex < pixels.Length; depthIndex++, colorIndex += 4) { int player = rawDepthData[depthIndex] & DepthImageFrame.PlayerIndexBitmask; int depth = rawDepthData[depthIndex] >> DepthImageFrame.PlayerIndexBitmaskWidth; if (depth <= 900) { pixels[colorIndex + BlueIndex] = globalvideo2[colorIndex + BlueIndex]; pixels[colorIndex + GreenIndex] = globalvideo2[colorIndex + GreenIndex]; pixels[colorIndex + RedIndex] = globalvideo2[colorIndex + RedIndex]; } else if (depth < 2000) { pixels[colorIndex + BlueIndex] = globalvideo[colorIndex + BlueIndex]; pixels[colorIndex + GreenIndex] = globalvideo[colorIndex + GreenIndex]; pixels[colorIndex + RedIndex] = globalvideo[colorIndex + RedIndex]; } else { pixels[colorIndex + BlueIndex] = globalvideo3[colorIndex + BlueIndex]; pixels[colorIndex + GreenIndex] = globalvideo3[colorIndex + GreenIndex]; pixels[colorIndex + RedIndex] = globalvideo3[colorIndex + RedIndex]; } } return pixels; }
//Takes a DepthImage frame and performs two noise removal techniques: //(1)pixel filtering (2)weighted moving average (reduces flicker) //It takes a DepthImage Frame and returns a BitmapSource public BitmapSource applyBothFilters(DepthImageFrame image) { // We first want to create a simple array where each index represents a single pixel of depth information. // This will make it easier to work with the data to filter and average it for smoothing. // short[] depthArray = CreateDepthArray(image); short[] depthArray = new short[image.PixelDataLength]; image.CopyPixelDataTo(depthArray); depthArray = applyPixelFilter(depthArray, image.Width, image.Height); // depthArray = applyWeightedAverageFilter(depthArray, image.Width, image.Height); int width = image.Width; int height = image.Height; // After we have processed the data, we can transform it into color channels for final rendering. byte[] colorBytes = CreateColorBytesFromDepthArray(depthArray, width, height); return BitmapSource.Create(width, height, 96, 96, PixelFormats.Bgr32, null, colorBytes, width * PixelFormats.Bgr32.BitsPerPixel / 8); }
private byte[] GenerateColoredBytes(DepthImageFrame depthFrame) { // Get raw depth data short[] raw = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(raw); // Create byte array with size h*w*4 (for bgr_) Byte[] pixels = new byte[depthFrame.Height * depthFrame.Width * 4]; const int BlueIndex = 0; const int GreenIndex = 1; const int RedIndex = 2; // Loop through all distances for (int depthIndex = 0, colorIndex = 0; depthIndex < raw.Length && colorIndex < pixels.Length; depthIndex++, colorIndex += 4) { // Get player int player = raw[depthIndex] & DepthImageFrame.PlayerIndexBitmask; // Get depth int depth = raw[depthIndex] >> DepthImageFrame.PlayerIndexBitmaskWidth; // Set colors based on depth if (depth <= 900) { pixels[colorIndex + BlueIndex] = 255; pixels[colorIndex + GreenIndex] = 0; pixels[colorIndex + RedIndex] = 0; } else if (depth > 900 && depth < 2000) { pixels[colorIndex + BlueIndex] = 0; pixels[colorIndex + GreenIndex] = 255; pixels[colorIndex + RedIndex] = 0; } else if (depth > 2000) { pixels[colorIndex + BlueIndex] = 0; pixels[colorIndex + GreenIndex] = 0; pixels[colorIndex + RedIndex] = 255; } } return pixels; }
public static int GetDistance(this DepthImageFrame image, int x, int y) { return(ImageFrameCommonExtensions.GetDistance(image, x, y)); }
public static short[] ToDepthArray(this DepthImageFrame image) { return(ImageFrameCommonExtensions.ToDepthArray(image)); }