// Listing 2-11 private void DiscoverKinectSensor() { if (this._Kinect != null && this._Kinect.Status != KinectStatus.Connected) { this._Kinect = null; } if (this._Kinect == null) { this._Kinect = KinectSensor.KinectSensors.FirstOrDefault(x => x.Status == KinectStatus.Connected); if (this._Kinect != null) { this._Kinect.ColorStream.Enable(); this._Kinect.Start(); ColorImageStream colorStream = this._Kinect.ColorStream; this._ColorImageBitmap = new WriteableBitmap(colorStream.FrameWidth, colorStream.FrameHeight, 96, 96, PixelFormats.Bgr32, null); this._ColorImageBitmapRect = new Int32Rect(0, 0, colorStream.FrameWidth, colorStream.FrameHeight); this._ColorImageStride = colorStream.FrameWidth * colorStream.FrameBytesPerPixel; this.ColorImageElement.Source = this._ColorImageBitmap; this._ColorImagePixelData = new byte[colorStream.FramePixelDataLength]; } } }
private void WindowLoaded(object sender, RoutedEventArgs e) { KinectSensor kinect = KinectSensor.KinectSensors[0]; ColorImageStream clrStream = kinect.ColorStream; clrStream.Enable(rgbFormat); DepthImageStream depStream = kinect.DepthStream; depStream.Enable(depthFormat); kinect.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30); pixelBuffer = new byte[kinect.ColorStream.FramePixelDataLength]; depthBuffer = new DepthImagePixel[depStream.FramePixelDataLength]; clrPntBuffer = new ColorImagePoint[depStream.FramePixelDataLength]; depthMaskBuffer = new byte[clrStream.FramePixelDataLength]; bmpBuffer = new RenderTargetBitmap(clrStream.FrameWidth, clrStream.FrameHeight, 96, 96, PixelFormats.Default); rgbImage.Source = bmpBuffer; kinect.AllFramesReady += AllFramesReady; textBox1.Clear(); kinect.Start(); kinect.ElevationAngle = 0; }
void ColorWindow_Loaded(object sender, RoutedEventArgs e) { if (kinect != null) { #region 彩色影像與深度影像初始化 ColorImageStream colorStream = kinect.ColorStream; kinect.ColorStream.Enable(); _ColorImageBitmap = new WriteableBitmap(colorStream.FrameWidth, colorStream.FrameHeight, 96, 96, PixelFormats.Bgr32, null); _ColorImageBitmapRect = new Int32Rect(0, 0, colorStream.FrameWidth, colorStream.FrameHeight); _ColorImageStride = colorStream.FrameWidth * colorStream.FrameBytesPerPixel; ColorData.Source = _ColorImageBitmap; DepthImageStream depthStream = kinect.DepthStream; //預設解析度 kinect.DepthStream.Enable(); //降低解析度至 80x60 //kinect.DepthStream.Enable(DepthImageFormat.Resolution80x60Fps30); _DepthImageBitmap = new WriteableBitmap(depthStream.FrameWidth, depthStream.FrameHeight, 96, 96, PixelFormats.Gray16, null); _DepthImageBitmapRect = new Int32Rect(0, 0, depthStream.FrameWidth, depthStream.FrameHeight); _DepthImageStride = depthStream.FrameWidth * depthStream.FrameBytesPerPixel; DepthData.Source = _DepthImageBitmap; #endregion kinect.SkeletonStream.Enable(); kinect.AllFramesReady += mykinect_AllFramesReady; kinect.Start(); } }
/// <summary> /// RGBカメラからの画像を距離カメラの画像の位置に合わせる /// </summary> /// <param name="points">RGB画像と距離の対応付けのデータ</param> /// <param name="colorPixels">RGB画像のバイト配列</param> /// <returns></returns> private byte[] CoordinateColorImage(ColorImagePoint[] points, byte[] colorPixels) { ColorImageStream colorStream = kinect.ColorStream; // 出力バッファ(初期値はRGBカメラの画像) byte[] outputColor = new byte[colorPixels.Length]; for (int i = 0; i < outputColor.Length; i += Bgr32BytesPerPixel) { outputColor[i] = colorPixels[i]; outputColor[i + 1] = colorPixels[i + 1]; outputColor[i + 2] = colorPixels[i + 2]; } for (int index = 0; index < depthPixel.Length; index++) { // 変換した結果が、フレームサイズを超えることがあるため、小さいほうを使う int x = Math.Min(points[index].X, colorStream.FrameWidth - 1); int y = Math.Min(points[index].Y, colorStream.FrameHeight - 1); int colorIndex = ((y * kinect.DepthStream.FrameWidth) + x) * Bgr32BytesPerPixel; int outputIndex = index * Bgr32BytesPerPixel; // カラー画像のピクセルを調整された座標値に変換する outputColor[outputIndex] = colorPixels[colorIndex]; outputColor[outputIndex + 1] = colorPixels[colorIndex + 1]; outputColor[outputIndex + 2] = colorPixels[colorIndex + 2]; } return(outputColor); }
// Habilita ColorStream de KinectSensor recién detectado y establece la imagen de salida private void InitializeKinectSensor(KinectSensor sensor) { if (sensor != null) { ColorImageStream colorStream = sensor.ColorStream; colorStream.Enable(); KinectCameraImage = new WriteableBitmap(colorStream.FrameWidth, colorStream.FrameHeight , 96, 96, PixelFormats.Bgr32, null); _cameraSourceBounds = new Int32Rect(0, 0, colorStream.FrameWidth, colorStream.FrameHeight); _colorStride = colorStream.FrameWidth * colorStream.FrameBytesPerPixel; sensor.ColorFrameReady += KinectSensor_ColorFrameReady; ///METODOS DE KINECT sensor.SkeletonStream.AppChoosesSkeletons = false; sensor.SkeletonStream.Enable(); _skeletons = new Skeleton[sensor.SkeletonStream.FrameSkeletonArrayLength]; sensor.SkeletonFrameReady += KinectSensor_SkeletonFrameReady; try { sensor.Start(); } catch (Exception) { UninitializeKinectSensor(sensor); Kinect = null; ErrorGridVisibility = Visibility.Visible; ErrorGridMessage = "Kinect está siendo utilizado por otro proceso. " + Environment.NewLine + "Trata de desconectar y volver a conectar el dispositivo al ordenador." + Environment.NewLine + "Asegúrese de que todos los programas que utilizan Kinect se han desactivado."; } } }
void ColorWindow_Loaded(object sender, RoutedEventArgs e) { if (kinect != null) { #region 彩色影像相關物件初始化 ColorImageStream colorStream = kinect.ColorStream; colorStream.Enable(); kinect.ColorFrameReady += myKinect_ColorFrameReady; _ColorImageBitmap = new WriteableBitmap(colorStream.FrameWidth, colorStream.FrameHeight, 96, 96, PixelFormats.Bgr32, null); _ColorImageBitmapRect = new Int32Rect(0, 0, colorStream.FrameWidth, colorStream.FrameHeight); _ColorImageStride = colorStream.FrameWidth * colorStream.FrameBytesPerPixel; ColorData.Source = _ColorImageBitmap; #endregion kinect.Start(); speechRecognizer = CreateSpeechRecognizer(); if (speechRecognizer != null) { ready4sTimer = new DispatcherTimer(); ready4sTimer.Tick += ReadyTimerTick; ready4sTimer.Interval = new TimeSpan(0, 0, 4); ready4sTimer.Start(); } } }
/// <summary> /// Enables ColorStream from newly detected KinectSensor and sets output image /// </summary> /// <param name="sensor">Detected KinectSensor</param> private void InitializeKinectSensor(KinectSensor sensor) { if (sensor != null) { ColorImageStream colorStream = sensor.ColorStream; colorStream.Enable(); KinectCameraImage = new WriteableBitmap(colorStream.FrameWidth, colorStream.FrameHeight , 96, 96, PixelFormats.Bgr32, null); _cameraSourceBounds = new Int32Rect(0, 0, colorStream.FrameWidth, colorStream.FrameHeight); _colorStride = colorStream.FrameWidth * colorStream.FrameBytesPerPixel; sensor.ColorFrameReady += KinectSensor_ColorFrameReady; sensor.SkeletonStream.AppChoosesSkeletons = false; sensor.SkeletonStream.Enable(); _skeletons = new Skeleton[sensor.SkeletonStream.FrameSkeletonArrayLength]; sensor.SkeletonFrameReady += KinectSensor_SkeletonFrameReady; try { sensor.Start(); } catch (Exception) { UninitializeKinectSensor(sensor); Kinect = null; ErrorGridVisibility = Visibility.Visible; ErrorGridMessage = "Kinect jest używany przez inny proces." + Environment.NewLine + "Spróbuj odłączyć i ponownie podłączyć urządzenie do komputera." + Environment.NewLine + "Upewnij się, że wszystkie programy używajace Kinecta zostały wyłączone."; } } }
} //fin CompositionTarget_Rendering() private WriteableBitmap PollColor(int numKinect) { Kinect = Sensor[numKinect]; if (this.Kinect != null) { ColorImageStream ColorStream = this.Kinect.ColorStream; this.ColorImagenBitmap = new WriteableBitmap(ColorStream.FrameWidth, ColorStream.FrameHeight, 96, 96, PixelFormats.Bgr32, null); this.ColorImagenRect = new Int32Rect(0, 0, ColorStream.FrameWidth, ColorStream.FrameHeight); this.ColorImagenStride = ColorStream.FrameWidth * ColorStream.FrameBytesPerPixel; this.ColorImagenPixeles = new byte[ColorStream.FramePixelDataLength]; try { using (ColorImageFrame frame = this.Kinect.ColorStream.OpenNextFrame(100)) { if (frame != null) { frame.CopyPixelDataTo(this.ColorImagenPixeles); this.ColorImagenBitmap.WritePixels(this.ColorImagenRect, this.ColorImagenPixeles, this.ColorImagenStride, 0); } } } catch { MessageBox.Show("No se pueden leer los datos del sensor", "Error"); } } if (grabacion == true) { GuardaImagenes(numKinect, ColorImagenBitmap); } return(ColorImagenBitmap); }//fin PollColor()
void ColorWindow_Loaded(object sender, RoutedEventArgs e) { if (kinect != null) { ColorImageStream colorStream = kinect.ColorStream; kinect.ColorStream.Enable(); _ColorImageBitmap = new WriteableBitmap(colorStream.FrameWidth, colorStream.FrameHeight, 96, 96, PixelFormats.Bgra32, null); _ColorImageBitmapRect = new Int32Rect(0, 0, colorStream.FrameWidth, colorStream.FrameHeight); _ColorImageStride = colorStream.FrameWidth * colorStream.FrameBytesPerPixel; ColorData.Source = _ColorImageBitmap; DepthImageStream depthStream = kinect.DepthStream; kinect.DepthStream.Enable(); _DepthImageBitmap = new WriteableBitmap(depthStream.FrameWidth, depthStream.FrameHeight, 96, 96, PixelFormats.Gray16, null); _DepthImageBitmapRect = new Int32Rect(0, 0, depthStream.FrameWidth, depthStream.FrameHeight); _DepthImageStride = depthStream.FrameWidth * depthStream.FrameBytesPerPixel; DepthData.Source = _DepthImageBitmap; kinect.SkeletonStream.Enable(); kinect.AllFramesReady += mykinect_AllFramesReady; kinect.Start(); } }
private void DiscoverKinectSensor() { if (this.kinect != null && this.kinect.Status != KinectStatus.Connected) { //if this sensor is no longer connected, we need to discover a new one: this.kinect = null; } if (this.kinect == null) { //find the firts connected sensor this.kinect = KinectSensor.KinectSensors.FirstOrDefault(x => x.Status == KinectStatus.Connected); if (this.kinect != null) { //initialize the sensor this.kinect.ColorStream.Enable(); this.kinect.Start(); ColorImageStream colorStream = this.kinect.ColorStream; this.ColorImageElementPolling.Dispatcher.BeginInvoke(new Action(() => { this.colorImageBitmap = new WriteableBitmap(colorStream.FrameWidth, colorStream.FrameHeight, 96, 96, PixelFormats.Bgr32, null); this.colorImageBitmapRect = new Int32Rect(0, 0, colorStream.FrameWidth, colorStream.FrameHeight); this.colorImageStride = colorStream.FrameWidth * colorStream.FrameBytesPerPixel; this.ColorImageElementPolling.Source = this.colorImageBitmap; this.colorImagePixelData = new byte[colorStream.FramePixelDataLength]; })); } } }
private void InitializeKinectSensor(KinectSensor kinectSensor) { if (kinectSensor != null) { ColorImageStream colorStream = kinectSensor.ColorStream; colorStream.Enable(); this._ColorImageBitmap = new WriteableBitmap(colorStream.FrameWidth, colorStream.FrameHeight, 96, 96, PixelFormats.Bgr32, null); this._ColorImageBitmapRect = new Int32Rect(0, 0, colorStream.FrameWidth, colorStream.FrameHeight); this._ColorImageStride = colorStream.FrameWidth * colorStream.FrameBytesPerPixel; videoStream.Source = this._ColorImageBitmap; kinectSensor.SkeletonStream.Enable(new TransformSmoothParameters() { Correction = 0.5f, JitterRadius = 0.05f, MaxDeviationRadius = 0.04f, Smoothing = 0.5f }); kinectSensor.SkeletonFrameReady += Kinect_SkeletonFrameReady; kinectSensor.ColorFrameReady += Kinect_ColorFrameReady; kinectSensor.Start(); this.FrameSkeletons = new Skeleton[this.Kinect.SkeletonStream.FrameSkeletonArrayLength]; } }
void ColorWindow_Loaded(object sender, RoutedEventArgs e) { kinect = KinectSensor.KinectSensors[0]; if (kinect != null) { /*MediaPlayer mplayer = new MediaPlayer(); * mplayer.Open(new Uri(@"C:\Users\MCU\Desktop\專\WpfApplication1\Resources\右手錯誤.wav", UriKind.Relative)); * mplayer.Play(); * Thread.Sleep(2000); * mplayer.Open(new Uri(@"C:\Users\MCU\Desktop\Kinectcolortest\Kinectcolortest\Resources\右腳錯誤.wav", UriKind.Relative)); * mplayer.Play(); * Thread.Sleep(2000); */// MessageBox.Show("OK"); ColorImageStream colorStream = kinect.ColorStream; kinect.ColorStream.Enable(ColorImageFormat.RgbResolution640x480Fps30); kinect.DepthStream.Enable(DepthImageFormat.Resolution640x480Fps30); kinect.SkeletonStream.Enable(); Color_pixelData = new byte[640 * 480 * 4]; //Pic = new byte[100 * 100 * 4]; //Pic2 = new byte[100 * 100 * 4]; coordinateMapper = new CoordinateMapper(kinect); kinect.AllFramesReady += this.All_mykinect_SkeletonFrameReady; kinect.Start(); } }
/// <summary> /// 距離データの画像変換 /// </summary> /// <param name="kinect">Kinect センサー</param> /// <param name="depthFrame">深度フレームデータ</param> /// <returns>距離データの画像データ</returns> private byte[] _ConvertDepthColor(KinectSensor kinect, DepthImageFrame depthFrame) { ColorImageStream colorStream = kinect.ColorStream; DepthImageStream depthStream = kinect.DepthStream; // 距離カメラのピクセル毎のデータを取得する short[] depthPixel = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(depthPixel); // 距離カメラの座標に対する RGB カメラ座標を取得する(座標合わせ) ColorImagePoint[] colorPoint = new ColorImagePoint[depthFrame.PixelDataLength]; kinect.MapDepthFrameToColorFrame(depthStream.Format, depthPixel, colorStream.Format, colorPoint); byte[] depthColor = new byte[depthFrame.PixelDataLength * Bgr32BytesPerPixel]; int pxLen = depthPixel.Length; for (int i = 0; i < pxLen; i++) { int distance = depthPixel[i] >> DepthImageFrame.PlayerIndexBitmaskWidth; // 変換した結果がフレームサイズを超えないよう、小さい方を採用 int x = Math.Min(colorPoint[i].X, colorStream.FrameWidth - 1); int y = Math.Min(colorPoint[i].Y, colorStream.FrameHeight - 1); int colorIndex = ((y * depthFrame.Width) + x) * Bgr32BytesPerPixel; // サポート外 0-40cm if (distance == depthStream.UnknownDepth) { depthColor[colorIndex] = 0; depthColor[colorIndex + 1] = 0; depthColor[colorIndex + 2] = 255; } // 近すぎ 40cm-80cm(Default) else if (distance == depthStream.TooNearDepth) { depthColor[colorIndex] = 0; depthColor[colorIndex + 1] = 255; depthColor[colorIndex + 2] = 0; } // 遠すぎ 3m(Near), 4m(Default)-8m else if (distance == depthStream.TooFarDepth) { depthColor[colorIndex] = 255; depthColor[colorIndex + 1] = 0; depthColor[colorIndex + 2] = 0; } // 有効な距離データ else { depthColor[colorIndex] = 0; depthColor[colorIndex + 1] = 255; depthColor[colorIndex + 2] = 255; } } return(depthColor); }
private void InitializeKinectSensor(KinectSensor kinectSensor) { if (kinectSensor != null) { ColorImageStream colorStream = this.kinect.ColorStream; kinectSensor.ColorStream.Enable(); kinectSensor.ColorFrameReady += new EventHandler <ColorImageFrameReadyEventArgs>(kinectSensor_ColorFrameReady); kinectSensor.Start(); } }
public static WriteableBitmap CreateWriteableBitmap(ColorImageStream colorStream) { if (colorStream.Format == ColorImageFormat.InfraredResolution640x480Fps30) { return(CreateWriteableBitmap(colorStream, PixelFormats.Gray16, null)); } else { return(CreateWriteableBitmap(colorStream, PixelFormats.Bgr32, null)); } }
/// <summary> /// DoReadColorFrame Frame Read Behavior /// </summary> /// <param name="rawFrames">Raw frames</param> /// <param name="kinectStream">Kinect stream</param> private void DoReadColorFrame(RawKinectFrames rawFrames, ColorImageStream kinectStream) { ColorImageFrame kinectFrame = kinectStream.OpenNextFrame(AVeryLargeNumberOfMilliseconds); if (kinectFrame != null) { rawFrames.RawColorFrameInfo = new KinectFrameInfo(kinectFrame); rawFrames.RawColorFrameData = new byte[kinectFrame.PixelDataLength]; kinectFrame.CopyPixelDataTo(rawFrames.RawColorFrameData); } }
public static WriteableBitmap CreateWriteableBitmap(ColorImageStream colorStream) { if (colorStream.Format == ColorImageFormat.InfraredResolution640x480Fps30) { return CreateWriteableBitmap(colorStream, PixelFormats.Gray16, null); } else { return CreateWriteableBitmap(colorStream, PixelFormats.Bgr32, null); } }
private void EnableColorImageStreamBasedOnIsChecked( CheckBox checkBox, ColorImageStream imageStream, ComboBox colorFormatsValue) { if (checkBox.IsChecked.HasValue && checkBox.IsChecked.Value) { imageStream.Enable((ColorImageFormat)colorFormatsValue.SelectedItem); } else { imageStream.Disable(); } }
private void Enable_Kinect1_ColorImage() { KinectDevice1.ColorStream.Enable(); KinectDevice1.Start(); ColorImageStream colorStream1 = KinectDevice1.ColorStream; _ColorImageBitmap1 = new WriteableBitmap(colorStream1.FrameWidth, colorStream1.FrameHeight, 96, 96, PixelFormats.Bgr32, null); _ColorImageBitmapRect1 = new Int32Rect(0, 0, colorStream1.FrameWidth, colorStream1.FrameHeight); _ColorImageStride1 = colorStream1.FrameWidth * colorStream1.FrameBytesPerPixel; //ColorImageElement1.Source = _ColorImageBitmap1; _ColorImagePixelData1 = new byte[colorStream1.FramePixelDataLength]; }
/// <summary> /// 光学迷彩 /// </summary> /// <param name="kinect"></param> /// <param name="colorFrame"></param> /// <param name="depthFrame"></param> /// <returns></returns> private byte[] OpticalCamouflage(KinectSensor kinect, ColorImageFrame colorFrame, DepthImageFrame depthFrame) { ColorImageStream colorStream = kinect.ColorStream; DepthImageStream depthStream = kinect.DepthStream; // RGBカメラのピクセルごとのデータを取得する byte[] colorPixel = new byte[colorFrame.PixelDataLength]; colorFrame.CopyPixelDataTo(colorPixel); // 背景がないときは、そのときのフレームを背景として保存する if (backPixel == null) { backPixel = new byte[colorFrame.PixelDataLength]; Array.Copy(colorPixel, backPixel, backPixel.Length); } // 距離カメラのピクセルごとのデータを取得する short[] depthPixel = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(depthPixel); // 距離カメラの座標に対応するRGBカメラの座標を取得する(座標合わせ) ColorImagePoint[] colorPoint = new ColorImagePoint[depthFrame.PixelDataLength]; kinect.MapDepthFrameToColorFrame(depthStream.Format, depthPixel, colorStream.Format, colorPoint); // 出力バッファ(初期値はRGBカメラの画像) byte[] outputColor = new byte[colorPixel.Length]; Array.Copy(colorPixel, outputColor, outputColor.Length); for (int index = 0; index < depthPixel.Length; index++) { // プレイヤーを取得する int player = depthPixel[index] & DepthImageFrame.PlayerIndexBitmask; // 変換した結果が、フレームサイズを超えることがあるため、小さいほうを使う int x = Math.Min(colorPoint[index].X, colorStream.FrameWidth - 1); int y = Math.Min(colorPoint[index].Y, colorStream.FrameHeight - 1); int colorIndex = ((y * depthFrame.Width) + x) * Bgr32BytesPerPixel; // プレーヤーを検出した座標は、背景画像を使う if (player != 0) { outputColor[colorIndex] = backPixel[colorIndex]; outputColor[colorIndex + 1] = backPixel[colorIndex + 1]; outputColor[colorIndex + 2] = backPixel[colorIndex + 2]; } } return(outputColor); }
/// <summary> /// Kinectで取得したデータを点群に変換する /// </summary> /// <returns></returns> bool GetPoint() { KinectSensor kinect = KinectSensor.KinectSensors[0]; ColorImageStream colorStream = kinect.ColorStream; DepthImageStream depthStream = kinect.DepthStream; // RGBカメラと距離カメラのフレームデータを取得する using (ColorImageFrame colorFrame = kinect.ColorStream.OpenNextFrame(100)) { using (DepthImageFrame depthFrame = kinect.DepthStream.OpenNextFrame(100)) { if (colorFrame == null || depthFrame == null) { return(false); } // RGBカメラのデータを作成する byte[] colorPixel = new byte[colorFrame.PixelDataLength]; colorFrame.CopyPixelDataTo(colorPixel); rgb = new RGB[colorFrame.Width * colorFrame.Height]; for (int i = 0; i < rgb.Length; i++) { int colorIndex = i * 4; rgb[i] = new RGB(colorPixel[colorIndex + 2], colorPixel[colorIndex + 1], colorPixel[colorIndex]); } // 距離カメラのピクセルデータを取得する short[] depthPixel = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(depthPixel); // 距離カメラの座標に対応するRGBカメラの座標を取得する(座標合わせ) ColorImagePoint[] colorPoint = new ColorImagePoint[depthFrame.PixelDataLength]; kinect.MapDepthFrameToColorFrame(depthStream.Format, depthPixel, colorStream.Format, colorPoint); // 距離データを作成する depth = new Depth[depthFrame.Width * depthFrame.Height]; for (int i = 0; i < depth.Length; i++) { int x = Math.Min(colorPoint[i].X, colorStream.FrameWidth - 1); int y = Math.Min(colorPoint[i].Y, colorStream.FrameHeight - 1); int distance = depthPixel[i] >> DepthImageFrame.PlayerIndexBitmaskWidth; depth[i] = new Depth(x, y, distance); } } } return(true); }
// kinect センサーの初期化 private void InitKinectSensor(KinectSensor kinect) { //カラーストリームの有効化 ColorImageStream clrStream = kinect.ColorStream; clrStream.Enable(rgbFormat); // 深度ストリームの有効化 DepthImageStream depStream = kinect.DepthStream; // depStream.Enable(depthFormat); // /*TEST_DEPTHFRAME*/ //骨格ストリームの有効化 SkeletonStream skelStream = kinect.SkeletonStream; skelStream.Enable(); // バッファの初期化 pixelBuffer = new byte[clrStream.FramePixelDataLength]; if (TEST_DEPTHFLAG) { depthBuffer = new short[depStream.FramePixelDataLength]; clrPntBuffer = new ColorImagePoint[depStream.FramePixelDataLength]; depMaskBuffer = new byte[clrStream.FramePixelDataLength]; } skeletonBuffer = new Skeleton[skelStream.FrameSkeletonArrayLength]; bmpBuffer = new RenderTargetBitmap(clrStream.FrameWidth, clrStream.FrameHeight, 96, 96, PixelFormats.Default); rgbimage.Source = bmpBuffer; // 音声認識エンジンの初期化 speechEngine = InitSpeechEngine(); // イベントハンドラの登録 kinect.AllFramesReady += AllFramesReady; kinect.AudioSource.SoundSourceAngleChanged += SoundSourceChanged; speechEngine.SpeechRecognized += SpeechRecognized; // 近接モードに変更 kinect.DepthStream.Range = DepthRange.Near; // 着席モードに変更 kinect.SkeletonStream.TrackingMode = SkeletonTrackingMode.Seated; // Kinect センサーからのストリーム取得を開始 System.IO.Stream stream = kinect.AudioSource.Start(); var speechAudioFormat = new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null); speechEngine.SetInputToAudioStream(stream, speechAudioFormat); speechEngine.RecognizeAsync(RecognizeMode.Multiple); }
/// <summary> /// プレーヤーだけ表示する /// </summary> /// <param name="colorFrame"></param> /// <param name="depthFrame"></param> /// <returns></returns> private byte[] BackgroundMask(KinectSensor kinect, ColorImageFrame colorFrame, DepthImageFrame depthFrame) { ColorImageStream colorStream = kinect.ColorStream; DepthImageStream depthStream = kinect.DepthStream; // RGBカメラのピクセルごとのデータを取得する byte[] colorPixel = new byte[colorFrame.PixelDataLength]; colorFrame.CopyPixelDataTo(colorPixel); // 距離カメラのピクセルごとのデータを取得する short[] depthPixel = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(depthPixel); // 距離カメラの座標に対応するRGBカメラの座標を取得する(座標合わせ) ColorImagePoint[] colorPoint = new ColorImagePoint[depthFrame.PixelDataLength]; kinect.MapDepthFrameToColorFrame(depthStream.Format, depthPixel, colorStream.Format, colorPoint); // 出力バッファ(初期値は白(255,255,255)) byte[] outputColor = new byte[colorPixel.Length]; for (int i = 0; i < outputColor.Length; i += Bgr32BytesPerPixel) { outputColor[i] = 255; outputColor[i + 1] = 255; outputColor[i + 2] = 255; } for (int index = 0; index < depthPixel.Length; index++) { // プレイヤーを取得する int player = depthPixel[index] & DepthImageFrame.PlayerIndexBitmask; // 変換した結果が、フレームサイズを超えることがあるため、小さいほうを使う int x = Math.Min(colorPoint[index].X, colorStream.FrameWidth - 1); int y = Math.Min(colorPoint[index].Y, colorStream.FrameHeight - 1); int colorIndex = ((y * depthFrame.Width) + x) * Bgr32BytesPerPixel; // プレーヤーを検出した座標だけ、RGBカメラの画像を使う if (player != 0) { outputColor[colorIndex] = colorPixel[colorIndex]; outputColor[colorIndex + 1] = colorPixel[colorIndex + 1]; outputColor[colorIndex + 2] = colorPixel[colorIndex + 2]; } } return(outputColor); }
//private byte[] colorImagePixelData; private void InitializeKinectSensor(KinectSensor kinectSensor) { if (kinectSensor != null) { ColorImageStream colorStream = kinectSensor.ColorStream; kinectSensor.ColorStream.Enable(ColorImageFormat.RgbResolution1280x960Fps12); this.colorImageBitmap = new WriteableBitmap(colorStream.FrameWidth, colorStream.FrameHeight, 96, 96, PixelFormats.Bgr32, null); this.colorImageBitmapRect = new Int32Rect(0, 0, colorStream.FrameWidth, colorStream.FrameHeight); this.colorImageStride = colorStream.FrameWidth * colorStream.FrameBytesPerPixel; ColorImageElement.Source = this.colorImageBitmap; kinectSensor.ColorFrameReady += kinectSensor_ColorFrameReady; //kinectSensor.ColorFrameReady += new EventHandler<ColorImageFrameReadyEventArgs>(kinectSensor_ColorFrameReady); kinectSensor.Start(); } }
void MainWindow_Loaded(object sender, RoutedEventArgs e) { if (kinects.Count > 0) { ColorImageStream colorStream = kinects[0].ColorStream; colorStream.Enable(); //kinects[0].ColorStream.Enable(ColorImageFormat.InfraredResolution640x480Fps30); kinects[0].ColorFrameReady += Kinect_ColorFrameReady; _ColorImageBitmap = new WriteableBitmap(colorStream.FrameWidth, colorStream.FrameHeight, 96, 96, PixelFormats.Bgr32, null); _ColorImageBitmapRect = new Int32Rect(0, 0, colorStream.FrameWidth, colorStream.FrameHeight); _ColorImageStride = colorStream.FrameWidth * colorStream.FrameBytesPerPixel; ColorData.Source = _ColorImageBitmap; } }
private void DiscoverKinectSensor() { if (this._Kinect != null && this._Kinect.Status != KinectStatus.Connected) { this._Kinect = null; } if (this._Kinect == null) { this.Kinect = KinectSensor.KinectSensors.FirstOrDefault(x => x.Status == KinectStatus.Connected); if (this._Kinect != null) { this._Kinect.ColorStream.Enable(); this._Kinect.Start(); ColorImageStream colorStream = this._Kinect.ColorStream; DepthImageStream depthStream = this._Kinect.DepthStream; this._Kinect.DepthStream.Enable(); if (!lowResource) { this.ColorImageElement.Dispatcher.BeginInvoke(new Action(() => { this._ColorImageBitmap = new WriteableBitmap(colorStream.FrameWidth, colorStream.FrameHeight, 96, 96, PixelFormats.Bgr32, null); this._ColorImageBitmapRect = new Int32Rect(0, 0, colorStream.FrameWidth, colorStream.FrameHeight); this._ColorImageStride = colorStream.FrameWidth * colorStream.FrameBytesPerPixel; ColorImageElement.Source = this._ColorImageBitmap; this._ColorImagePixelData = new byte[colorStream.FramePixelDataLength]; })); this.DepthImageModified.Dispatcher.BeginInvoke(new Action(() => { this._DepthImageBitmap = new WriteableBitmap(depthStream.FrameWidth, depthStream.FrameHeight, 96, 96, PixelFormats.Gray16, null); this._DepthImageBitmapRect = new Int32Rect(0, 0, depthStream.FrameWidth, depthStream.FrameHeight); this._DepthImageStride = depthStream.FrameWidth * depthStream.FrameBytesPerPixel; this._DepthImagePixelData = new short[depthStream.FramePixelDataLength]; })); } else { this._ColorImagePixelData = new byte[colorStream.FramePixelDataLength]; this._DepthImagePixelData = new short[depthStream.FramePixelDataLength]; } } } }
void ColorWindow_Loaded(object sender, RoutedEventArgs e) { if (kinect != null) { ColorImageStream colorStream = kinect.ColorStream; colorStream.Enable(); kinect.ColorFrameReady += myKinect_ColorFrameReady; _ColorImageBitmap = new WriteableBitmap(colorStream.FrameWidth, colorStream.FrameHeight, 96, 96, PixelFormats.Bgr32, null); _ColorImageBitmapRect = new Int32Rect(0, 0, colorStream.FrameWidth, colorStream.FrameHeight); _ColorImageStride = colorStream.FrameWidth * colorStream.FrameBytesPerPixel; ColorData.Source = _ColorImageBitmap; kinect.Start(); } }
private void InitializeKinectSensor(KinectSensor sensor) { if (sensor != null) { ColorImageStream colorStream = sensor.ColorStream; colorStream.Enable(); /* Added in Listing 2-5 */ this._ColorImageBitmap = new WriteableBitmap(colorStream.FrameWidth, colorStream.FrameHeight, 96, 96, PixelFormats.Bgr32, null); this._ColorImageBitmapRect = new Int32Rect(0, 0, colorStream.FrameWidth, colorStream.FrameHeight); this._ColorImageStride = colorStream.FrameWidth * colorStream.FrameBytesPerPixel; this.ColorImageElement.Source = this._ColorImageBitmap; this._ColorImagePixelData = new byte[colorStream.FramePixelDataLength]; /* Added in Listing 2-5 */ sensor.ColorFrameReady += Kinect_ColorFrameReady; sensor.Start(); } }
/// <summary> /// 距離データをカラー画像に変換する /// </summary> /// <param name="kinect"></param> /// <param name="depthFrame"></param> /// <returns></returns> private byte[] ConvertDepthColor(KinectSensor kinect, DepthImageFrame depthFrame) { ColorImageStream colorStream = kinect.ColorStream; DepthImageStream depthStream = kinect.DepthStream; // 距離カメラのピクセルごとのデータを取得する short[] depthPixel = new short[depthFrame.PixelDataLength]; depthFrame.CopyPixelDataTo(depthPixel); // 距離カメラの座標に対応するRGBカメラの座標を取得する(座標合わせ) ColorImagePoint[] colorPoint = new ColorImagePoint[depthFrame.PixelDataLength]; kinect.MapDepthFrameToColorFrame(depthStream.Format, depthPixel, colorStream.Format, colorPoint); byte[] depthColor = new byte[depthFrame.PixelDataLength * Bgr32BytesPerPixel]; for (int index = 0; index < depthPixel.Length; index++) { // 距離カメラのデータから、プレイヤーIDと距離を取得する int player = depthPixel[index] & DepthImageFrame.PlayerIndexBitmask; int distance = depthPixel[index] >> DepthImageFrame.PlayerIndexBitmaskWidth; // 変換した結果が、フレームサイズを超えることがあるため、小さいほうを使う int x = Math.Min(colorPoint[index].X, colorStream.FrameWidth - 1); int y = Math.Min(colorPoint[index].Y, colorStream.FrameHeight - 1); int colorIndex = ((y * depthFrame.Width) + x) * Bgr32BytesPerPixel; // プレイヤーがいるピクセルの場合 if (player != 0) { // 有効なプレーヤーに色付けする if (enablePlayer[player]) { depthColor[colorIndex] = playerColor[player].B; depthColor[colorIndex + 1] = playerColor[player].G; depthColor[colorIndex + 2] = playerColor[player].R; } } } return(depthColor); }
/// <summary> /// Enables ColorStream from newly detected KinectSensor and sets output image /// </summary> /// <param name="sensor">Detected KinectSensor</param> private void InitializeKinectSensor(KinectSensor sensor) { if (sensor != null) { ColorImageStream colorStream = sensor.ColorStream; colorStream.Enable(); _cameraSourceBitmap = new WriteableBitmap(colorStream.FrameWidth, colorStream.FrameHeight , 96, 96, PixelFormats.Bgr32, null); _cameraSourceBounds = new Int32Rect(0, 0, colorStream.FrameWidth, colorStream.FrameHeight); _colorStride = colorStream.FrameWidth * colorStream.FrameBytesPerPixel; KinectCameraImage.Source = _cameraSourceBitmap; sensor.ColorFrameReady += KinectSensor_ColorFrameReady; sensor.SkeletonStream.Enable(); _skeletons = new Skeleton[sensor.SkeletonStream.FrameSkeletonArrayLength]; sensor.SkeletonFrameReady += KinectSensor_SkeletonFrameReady; sensor.Start(); } }
void ColorWindow_Loaded(object sender, RoutedEventArgs e) { if (kinect != null) { ColorImageStream colorStream = kinect.ColorStream; _ColorImageBitmap = new WriteableBitmap(colorStream.FrameWidth, colorStream.FrameHeight, 96, 96, PixelFormats.Bgr32, null); _ColorImageBitmapRect = new Int32Rect(0, 0, colorStream.FrameWidth, colorStream.FrameHeight); _ColorImageStride = colorStream.FrameWidth * colorStream.FrameBytesPerPixel; ColorData.Source = _ColorImageBitmap; kinect.ColorStream.Enable(); kinect.ColorFrameReady += myKinect_ColorFrameReady; kinect.SkeletonStream.Enable(); kinect.SkeletonFrameReady += mykinect_SkeletonFrameReady; kinect.Start(); //if(AcceptButton.uxSBHold!=null) // AcceptButton.uxSBHold.Completed += new EventHandler(uxSBHold_Completed); } }
static void SendColor(UdpClient client, ColorImageStream stream, Commands command, IPEndPoint destination) { using (var frame = stream.OpenNextFrame(50)) { if (frame == null) return; var cc = GetColorCommand(command); if (!cc.HasValue) return; client.Send(ToBytes(cc.Value), sizeof(int), destination); byte[] pixelData = new byte[frame.PixelDataLength]; frame.CopyPixelDataTo(pixelData); Send(client, pixelData, destination); } }
public static WriteableBitmap CreateWriteableBitmap(ColorImageStream colorStream, PixelFormat pixelFormat) { return CreateWriteableBitmap(colorStream, pixelFormat, null); }
public static WriteableBitmap CreateWriteableBitmap(ColorImageStream colorStream, PixelFormat pixelFormat, BitmapPalette palette) { return new WriteableBitmap(colorStream.FrameWidth, colorStream.FrameHeight, DpiX, DpiY, pixelFormat, palette); }
public ColorImageStreamAdapter(ColorImageStream colorImageStream) { _colorImageStream = colorImageStream; }
/// <summary> /// 背丈を表示する /// </summary> /// <param name="kinect"></param> /// <param name="colorStream"></param> /// <param name="head"></param> /// <param name="foot"></param> private void DrawMeasure( KinectSensor kinect, ColorImageStream colorStream, Joint head, Joint foot ) { // 頭と足の座標の差分を身長とする(メートルからセンチメートルに変換する) int height = (int)(Math.Abs( head.Position.Y - foot.Position.Y ) * 100); // 頭と足のスケルトン座標を、RGBカメラの座標に変換する ColorImagePoint headColor = kinect.MapSkeletonPointToColor( head.Position, colorStream.Format ); ColorImagePoint footColor = kinect.MapSkeletonPointToColor( foot.Position, colorStream.Format ); // RGBカメラの座標を、表示する画面の座標に変換する Point headScalePoint = new Point( ScaleTo( headColor.X, colorStream.FrameWidth, canvasMeasure.Width ), ScaleTo( headColor.Y, colorStream.FrameHeight, canvasMeasure.Height ) ); Point footScalePoint = new Point( ScaleTo( footColor.X, colorStream.FrameWidth, canvasMeasure.Width ), ScaleTo( footColor.Y, colorStream.FrameHeight, canvasMeasure.Height ) ); const int lineLength = 50; const int thickness = 10; canvasMeasure.Children.Clear(); // 頭の位置 canvasMeasure.Children.Add( new Line() { Stroke = new SolidColorBrush( Colors.Red ), X1 = headScalePoint.X, Y1 = headScalePoint.Y, X2 = headScalePoint.X + lineLength, Y2 = headScalePoint.Y, StrokeThickness = thickness, } ); // 足の位置 canvasMeasure.Children.Add( new Line() { Stroke = new SolidColorBrush( Colors.Red ), X1 = footScalePoint.X, Y1 = footScalePoint.Y, X2 = headScalePoint.X + lineLength, Y2 = footScalePoint.Y, StrokeThickness = thickness, } ); // 頭と足を結ぶ線 canvasMeasure.Children.Add( new Line() { Stroke = new SolidColorBrush( Colors.Red ), X1 = headScalePoint.X + lineLength, Y1 = headScalePoint.Y, X2 = headScalePoint.X + lineLength, Y2 = footScalePoint.Y, StrokeThickness = thickness, } ); // 身長の表示Y位置 double Y = Math.Abs( headScalePoint.Y + footScalePoint.Y ) / 2; canvasMeasure.Children.Add( new TextBlock() { Margin = new Thickness( headScalePoint.X + lineLength, Y, 0, 0 ), Text = height.ToString(), Height = 36, Width = 60, FontSize = 24, FontWeight = FontWeights.Bold, Background = new SolidColorBrush( Colors.White ), } ); }