Beispiel #1
0
        /// <summary>
        /// Maps every point in a depth frame to the corresponding location in a ColorImageFormat coordinate space.
        ///
        /// </summary>
        /// <param name="depthImageFormat">The depth format of the source.</param><param name="depthPixelData">The depth frame pixel data, as retrieved from DepthImageFrame.CopyPixelDataTo.
        ///             Must be equal in length to Width*Height of the depth format specified by depthImageFormat.
        ///             </param><param name="colorImageFormat">The desired target image format.</param><param name="colorCoordinates">The ColorImagePoint array to receive the data.  Each element will be be the result of mapping the
        ///             corresponding depthPixelDatum to the specified ColorImageFormat coordinate space.
        ///             Must be equal in length to depthPixelData.
        ///             </param>
        public void MapDepthFrameToColorFrame(DepthImageFormat depthImageFormat, short[] depthPixelData,
                                              ColorImageFormat colorImageFormat, ColorImagePoint[] colorCoordinates)
        {
#pragma warning disable 618
            _kinectSensor.MapDepthFrameToColorFrame(depthImageFormat, depthPixelData, colorImageFormat, colorCoordinates);
#pragma warning restore 618
        }
        /// <summary>
        /// 距離データの画像変換
        /// </summary>
        /// <param name="kinect">Kinect センサー</param>
        /// <param name="depthFrame">深度フレームデータ</param>
        /// <returns>距離データの画像データ</returns>
        private byte[] _ConvertDepthColor(KinectSensor kinect, DepthImageFrame depthFrame)
        {
            ColorImageStream colorStream = kinect.ColorStream;
            DepthImageStream depthStream = kinect.DepthStream;

            // 距離カメラのピクセル毎のデータを取得する
            short[] depthPixel = new short[depthFrame.PixelDataLength];
            depthFrame.CopyPixelDataTo(depthPixel);

            // 距離カメラの座標に対する RGB カメラ座標を取得する(座標合わせ)
            ColorImagePoint[] colorPoint = new ColorImagePoint[depthFrame.PixelDataLength];
            kinect.MapDepthFrameToColorFrame(depthStream.Format, depthPixel, colorStream.Format, colorPoint);

            byte[] depthColor = new byte[depthFrame.PixelDataLength * Bgr32BytesPerPixel];
            int    pxLen      = depthPixel.Length;

            for (int i = 0; i < pxLen; i++)
            {
                int distance = depthPixel[i] >> DepthImageFrame.PlayerIndexBitmaskWidth;

                // 変換した結果がフレームサイズを超えないよう、小さい方を採用
                int x          = Math.Min(colorPoint[i].X, colorStream.FrameWidth - 1);
                int y          = Math.Min(colorPoint[i].Y, colorStream.FrameHeight - 1);
                int colorIndex = ((y * depthFrame.Width) + x) * Bgr32BytesPerPixel;

                // サポート外 0-40cm
                if (distance == depthStream.UnknownDepth)
                {
                    depthColor[colorIndex]     = 0;
                    depthColor[colorIndex + 1] = 0;
                    depthColor[colorIndex + 2] = 255;
                }
                // 近すぎ 40cm-80cm(Default)
                else if (distance == depthStream.TooNearDepth)
                {
                    depthColor[colorIndex]     = 0;
                    depthColor[colorIndex + 1] = 255;
                    depthColor[colorIndex + 2] = 0;
                }
                // 遠すぎ 3m(Near), 4m(Default)-8m
                else if (distance == depthStream.TooFarDepth)
                {
                    depthColor[colorIndex]     = 255;
                    depthColor[colorIndex + 1] = 0;
                    depthColor[colorIndex + 2] = 0;
                }
                // 有効な距離データ
                else
                {
                    depthColor[colorIndex]     = 0;
                    depthColor[colorIndex + 1] = 255;
                    depthColor[colorIndex + 2] = 255;
                }
            }

            return(depthColor);
        }
Beispiel #3
0
        public byte MapDepthFrameToColorFrame(string[] args)
        {
            try
            {
                int size = int.Parse(args[1]);
                verifArgs(size + 1, args);

                getKinectSensor(int.Parse(args[0]));

                int tabSize = size - 3;

                DepthImageFormat depthImageFormat = (DepthImageFormat)int.Parse(args[2]);
                short[]          depthPixelData   = new short[tabSize];
                for (int i = 0; i < tabSize; i++)
                {
                    depthPixelData[i] = short.Parse(args[i + 2]);
                }
                ColorImageFormat  colorImageFormat = (ColorImageFormat)int.Parse(args[3 + tabSize]);
                ColorImagePoint[] colorCoordinates = new ColorImagePoint[tabSize];

                Console.WriteLine(colorImageFormat);

                sensor.MapDepthFrameToColorFrame(
                    depthImageFormat,
                    depthPixelData,
                    colorImageFormat,
                    colorCoordinates);

                StringBuilder sb = new StringBuilder();

                for (int i = 0; i < tabSize; i++)
                {
                    if (i > 0)
                    {
                        sb.Append("||");
                    }
                    ColorImagePoint p = colorCoordinates[i];
                    sb.Append(p.X);
                    sb.Append("||");
                    sb.Append(p.Y);
                }

                rData = sb.ToString();

                return(KSuccess.QueryOk);
            }
            catch (KActionException e)
            {
                rData = e.Message;
                return(e.exceptionNumber);
            }
        }
Beispiel #4
0
        void RangeFilter()
        {
            int min = kinect.DepthStream.MinDepth;
            int max = kinect.DepthStream.MaxDepth;

            colorpoints = new ColorImagePoint[depthpixelData.Length];
            kinect.MapDepthFrameToColorFrame(depthframe.Format, depthpixelData, colorframe.Format, colorpoints);

            for (int i = 0; i < depthpixelData.Length; i++)
            {
                PixelInRange(i, min, max);
            }
        }
Beispiel #5
0
        /// <summary>
        /// 光学迷彩
        /// </summary>
        /// <param name="kinect"></param>
        /// <param name="colorFrame"></param>
        /// <param name="depthFrame"></param>
        /// <returns></returns>
        private byte[] OpticalCamouflage(KinectSensor kinect,
                                         ColorImageFrame colorFrame, DepthImageFrame depthFrame)
        {
            ColorImageStream colorStream = kinect.ColorStream;
            DepthImageStream depthStream = kinect.DepthStream;

            // RGBカメラのピクセルごとのデータを取得する
            byte[] colorPixel = new byte[colorFrame.PixelDataLength];
            colorFrame.CopyPixelDataTo(colorPixel);

            // 背景がないときは、そのときのフレームを背景として保存する
            if (backPixel == null)
            {
                backPixel = new byte[colorFrame.PixelDataLength];
                Array.Copy(colorPixel, backPixel, backPixel.Length);
            }

            // 距離カメラのピクセルごとのデータを取得する
            short[] depthPixel = new short[depthFrame.PixelDataLength];
            depthFrame.CopyPixelDataTo(depthPixel);

            // 距離カメラの座標に対応するRGBカメラの座標を取得する(座標合わせ)
            ColorImagePoint[] colorPoint = new ColorImagePoint[depthFrame.PixelDataLength];
            kinect.MapDepthFrameToColorFrame(depthStream.Format, depthPixel,
                                             colorStream.Format, colorPoint);

            // 出力バッファ(初期値はRGBカメラの画像)
            byte[] outputColor = new byte[colorPixel.Length];
            Array.Copy(colorPixel, outputColor, outputColor.Length);

            for (int index = 0; index < depthPixel.Length; index++)
            {
                // プレイヤーを取得する
                int player = depthPixel[index] & DepthImageFrame.PlayerIndexBitmask;

                // 変換した結果が、フレームサイズを超えることがあるため、小さいほうを使う
                int x          = Math.Min(colorPoint[index].X, colorStream.FrameWidth - 1);
                int y          = Math.Min(colorPoint[index].Y, colorStream.FrameHeight - 1);
                int colorIndex = ((y * depthFrame.Width) + x) * Bgr32BytesPerPixel;

                // プレーヤーを検出した座標は、背景画像を使う
                if (player != 0)
                {
                    outputColor[colorIndex]     = backPixel[colorIndex];
                    outputColor[colorIndex + 1] = backPixel[colorIndex + 1];
                    outputColor[colorIndex + 2] = backPixel[colorIndex + 2];
                }
            }

            return(outputColor);
        }
        /// <summary>
        /// Kinectで取得したデータを点群に変換する
        /// </summary>
        /// <returns></returns>
        bool GetPoint()
        {
            KinectSensor     kinect      = KinectSensor.KinectSensors[0];
            ColorImageStream colorStream = kinect.ColorStream;
            DepthImageStream depthStream = kinect.DepthStream;

            // RGBカメラと距離カメラのフレームデータを取得する
            using (ColorImageFrame colorFrame = kinect.ColorStream.OpenNextFrame(100)) {
                using (DepthImageFrame depthFrame = kinect.DepthStream.OpenNextFrame(100)) {
                    if (colorFrame == null || depthFrame == null)
                    {
                        return(false);
                    }

                    // RGBカメラのデータを作成する
                    byte[] colorPixel = new byte[colorFrame.PixelDataLength];
                    colorFrame.CopyPixelDataTo(colorPixel);

                    rgb = new RGB[colorFrame.Width * colorFrame.Height];
                    for (int i = 0; i < rgb.Length; i++)
                    {
                        int colorIndex = i * 4;
                        rgb[i] = new RGB(colorPixel[colorIndex + 2], colorPixel[colorIndex + 1],
                                         colorPixel[colorIndex]);
                    }

                    // 距離カメラのピクセルデータを取得する
                    short[] depthPixel = new short[depthFrame.PixelDataLength];
                    depthFrame.CopyPixelDataTo(depthPixel);

                    // 距離カメラの座標に対応するRGBカメラの座標を取得する(座標合わせ)
                    ColorImagePoint[] colorPoint = new ColorImagePoint[depthFrame.PixelDataLength];
                    kinect.MapDepthFrameToColorFrame(depthStream.Format, depthPixel,
                                                     colorStream.Format, colorPoint);

                    // 距離データを作成する
                    depth = new Depth[depthFrame.Width * depthFrame.Height];
                    for (int i = 0; i < depth.Length; i++)
                    {
                        int x        = Math.Min(colorPoint[i].X, colorStream.FrameWidth - 1);
                        int y        = Math.Min(colorPoint[i].Y, colorStream.FrameHeight - 1);
                        int distance = depthPixel[i] >> DepthImageFrame.PlayerIndexBitmaskWidth;

                        depth[i] = new Depth(x, y, distance);
                    }
                }
            }

            return(true);
        }
Beispiel #7
0
        /// <summary>
        ///  プレーヤーだけ表示する
        /// </summary>
        /// <param name="colorFrame"></param>
        /// <param name="depthFrame"></param>
        /// <returns></returns>
        private byte[] BackgroundMask(KinectSensor kinect,
                                      ColorImageFrame colorFrame, DepthImageFrame depthFrame)
        {
            ColorImageStream colorStream = kinect.ColorStream;
            DepthImageStream depthStream = kinect.DepthStream;

            // RGBカメラのピクセルごとのデータを取得する
            byte[] colorPixel = new byte[colorFrame.PixelDataLength];
            colorFrame.CopyPixelDataTo(colorPixel);

            // 距離カメラのピクセルごとのデータを取得する
            short[] depthPixel = new short[depthFrame.PixelDataLength];
            depthFrame.CopyPixelDataTo(depthPixel);

            // 距離カメラの座標に対応するRGBカメラの座標を取得する(座標合わせ)
            ColorImagePoint[] colorPoint = new ColorImagePoint[depthFrame.PixelDataLength];
            kinect.MapDepthFrameToColorFrame(depthStream.Format, depthPixel,
                                             colorStream.Format, colorPoint);

            // 出力バッファ(初期値は白(255,255,255))
            byte[] outputColor = new byte[colorPixel.Length];
            for (int i = 0; i < outputColor.Length; i += Bgr32BytesPerPixel)
            {
                outputColor[i]     = 255;
                outputColor[i + 1] = 255;
                outputColor[i + 2] = 255;
            }

            for (int index = 0; index < depthPixel.Length; index++)
            {
                // プレイヤーを取得する
                int player = depthPixel[index] & DepthImageFrame.PlayerIndexBitmask;

                // 変換した結果が、フレームサイズを超えることがあるため、小さいほうを使う
                int x          = Math.Min(colorPoint[index].X, colorStream.FrameWidth - 1);
                int y          = Math.Min(colorPoint[index].Y, colorStream.FrameHeight - 1);
                int colorIndex = ((y * depthFrame.Width) + x) * Bgr32BytesPerPixel;

                // プレーヤーを検出した座標だけ、RGBカメラの画像を使う
                if (player != 0)
                {
                    outputColor[colorIndex]     = colorPixel[colorIndex];
                    outputColor[colorIndex + 1] = colorPixel[colorIndex + 1];
                    outputColor[colorIndex + 2] = colorPixel[colorIndex + 2];
                }
            }

            return(outputColor);
        }
Beispiel #8
0
        //////////////////////////////////////////////////////////////////////////////////////////////////////
        private void RangeFilter()
        {
            int min = sensor.DepthStream.MinDepth;
            int max = sensor.DepthStream.MaxDepth;

            colorPoints = new ColorImagePoint[depthPixels.Length];
            //CoordinateMapper map = new CoordinateMapper(sensor); //新版寫法
            //map.MapDepthFrameToColorFrame(depthFrame.Format, depthPixels, colorFrame.Format, colorPoints);

            sensor.MapDepthFrameToColorFrame(depthFrame.Format, depthPixels, colorFrame.Format, colorPoints);

            for (int i = 0; i < depthPixels.Length; i++)
            {
                PixelInRange(i, min, max);
            }
        }
        /// <summary>
        /// 距離データをカラー画像に変換する
        /// </summary>
        /// <param name="kinect"></param>
        /// <param name="depthFrame"></param>
        /// <returns></returns>
        private byte[] ConvertDepthColor(KinectSensor kinect, DepthImageFrame depthFrame)
        {
            ColorImageStream colorStream = kinect.ColorStream;
            DepthImageStream depthStream = kinect.DepthStream;

            // 距離カメラのピクセルごとのデータを取得する
            short[] depthPixel = new short[depthFrame.PixelDataLength];
            depthFrame.CopyPixelDataTo(depthPixel);

            // 距離カメラの座標に対応するRGBカメラの座標を取得する(座標合わせ)
            ColorImagePoint[] colorPoint = new ColorImagePoint[depthFrame.PixelDataLength];
            kinect.MapDepthFrameToColorFrame(depthStream.Format, depthPixel,
                                             colorStream.Format, colorPoint);

            byte[] depthColor = new byte[depthFrame.PixelDataLength * Bgr32BytesPerPixel];
            for (int index = 0; index < depthPixel.Length; index++)
            {
                // 距離カメラのデータから、プレイヤーIDと距離を取得する
                int player   = depthPixel[index] & DepthImageFrame.PlayerIndexBitmask;
                int distance = depthPixel[index] >> DepthImageFrame.PlayerIndexBitmaskWidth;

                // 変換した結果が、フレームサイズを超えることがあるため、小さいほうを使う
                int x          = Math.Min(colorPoint[index].X, colorStream.FrameWidth - 1);
                int y          = Math.Min(colorPoint[index].Y, colorStream.FrameHeight - 1);
                int colorIndex = ((y * depthFrame.Width) + x) * Bgr32BytesPerPixel;

                // プレイヤーがいるピクセルの場合
                if (player != 0)
                {
                    // 有効なプレーヤーに色付けする
                    if (enablePlayer[player])
                    {
                        depthColor[colorIndex]     = playerColor[player].B;
                        depthColor[colorIndex + 1] = playerColor[player].G;
                        depthColor[colorIndex + 2] = playerColor[player].R;
                    }
                }
            }

            return(depthColor);
        }
Beispiel #10
0
        /// <summary>
        /// RGBカメラ、距離カメラのフレーム更新イベント
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        void kinect_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            try {
                using (ColorImageFrame colorFrame = e.OpenColorImageFrame()) {
                    using (DepthImageFrame depthFrame = e.OpenDepthImageFrame()) {
                        if (depthFrame != null && colorFrame != null && kinect.IsRunning)
                        {
                            if (depthPixel == null)
                            {
                                depthPixel           = new short[depthFrame.PixelDataLength];
                                colorImagePixelPoint = new ColorImagePoint[depthFrame.PixelDataLength];
                            }

                            // 描画を3フレームに1回にする
                            if (depthFrame.FrameNumber % 3 != 0)
                            {
                                return;
                            }

                            depthFrame.CopyPixelDataTo(depthPixel);

                            // Depthデータの座標をRGB画像の座標に変換する
                            kinect.MapDepthFrameToColorFrame(kinect.DepthStream.Format, depthPixel,
                                                             kinect.ColorStream.Format, colorImagePixelPoint);

                            // カメラ画像の描画
                            byte[] colorPixel = new byte[colorFrame.PixelDataLength];
                            colorFrame.CopyPixelDataTo(colorPixel);

                            // RGB画像の位置を距離画像の位置に補正
                            colorPixel = CoordinateColorImage(colorImagePixelPoint, colorPixel);

                            CameraImage.Source = BitmapSource.Create(colorFrame.Width, colorFrame.Height, 96, 96,
                                                                     PixelFormats.Bgr32, null, colorPixel,
                                                                     colorFrame.Width * colorFrame.BytesPerPixel);
                        }
                    }
                }
            }
            catch (Exception ex) {
                MessageBox.Show(ex.Message);
            }

            // モードに応じた処理
            switch (currentMode)
            {
            case SelectMode.SELECTING:
                // 領域を指定中ならば描画も更新
                UpdateRectPosition();

                break;

            case SelectMode.SELECTED:
                // 領域内を触っているかチェック
                Point point = CheckThePointTouchingTheRegion();
                UpdateTouchingPointEllipse(point);
                UpdatePaintCanvas(point);

                break;
            }
        }
Beispiel #11
0
        //***** rotateMotorThread() *****//
        // This is where the motor thread starts (we are doing this because we want to pass parameters to the thread)
        //static Thread rotateMotorThread(int angleRange, int sleepingDuration, int firstKinect)
        //{
        //    //Console.WriteLine("entered thread");
        //    var t = new Thread(() => rotateMotor(angleRange, sleepingDuration, firstKinect));
        //    t.Start();
        //    return t;
        //}


        //***** sensor_AllFramesReady() *****//
        // Called when depth and color frames are synchronized
        // This is where the data gets written
        static void _sensor_AllFramesReady(object sender, AllFramesReadyEventArgs e)
        {
            if (!abortMotorThread)
            {
                DateTime startWritingDepth = DateTime.Now;

                //-------------------COLOR---------------------------------------//
                using (ColorImageFrame colorFrame = e.OpenColorImageFrame())
                {
                    if (colorFrame == null)
                    {
                        return;
                    }
                    colorFrame.CopyPixelDataTo(colorPixels);

                    // ----------------------------------------> WRITE FRAME
                    colorWriter.Write(colorPixels);
                    colorWriter.Flush();
                    // ----------------------------------------> WRITE INFO FILE
                    colorInfoWriter.Write(frameOrderingNo);
                    colorInfoWriter.Write(" ");
                    colorInfoWriter.Write(colorFrame.Timestamp);
                    colorInfoWriter.Write(" ");
                    colorInfoWriter.Write(colorFrame.FrameNumber);
                    colorInfoWriter.Write(" ");
                    colorInfoWriter.Write(_sensor.ElevationAngle);
                    colorInfoWriter.Write(Environment.NewLine);
                    colorInfoWriter.Flush();
                }
                //----------------------------------------------------------------//

                //-------------------DEPTH---------------------------------------//
                using (DepthImageFrame depthFrame = e.OpenDepthImageFrame())
                {
                    if (depthFrame == null)
                    {
                        return;
                    }

                    depthFrame.CopyPixelDataTo(depthPixels);

                    _sensor.MapDepthFrameToColorFrame(depthFrame.Format, depthPixels, _sensor.ColorStream.Format, colorCoordinates); //mapdepthtocolor

                    depthChar = Array.ConvertAll(depthPixels, new Converter <short, char>(shortToChar));

                    // ----------------------------------------> WRITE FRAME
                    depthWriter.Write(depthChar);
                    depthWriter.Flush();
                    // ----------------------------------------> WRITE INFO FILE
                    depthInfoWriter.Write(frameOrderingNo);
                    depthInfoWriter.Write(" ");
                    depthInfoWriter.Write(depthFrame.Timestamp);
                    depthInfoWriter.Write(" ");
                    depthInfoWriter.Write(depthFrame.FrameNumber);
                    depthInfoWriter.Write(" ");
                    depthInfoWriter.Write(_sensor.ElevationAngle);
                    depthInfoWriter.Write(Environment.NewLine);
                    depthInfoWriter.Flush();
                }
                //----------------------------------------------------------------//
                frameOrderingNo++;
                calculateFps();

                //// Calculate time to write frame
                //DateTime stopWritingDepth = DateTime.Now;
                //TimeSpan diffDepth = stopWritingDepth.Subtract(startWritingDepth);
                //Console.WriteLine("Time to write one frame: " + diffDepth.TotalSeconds);
            }
        }
        /// <summary>
        /// 距離データをカラー画像に変換する
        /// </summary>
        /// <param name="kinect"></param>
        /// <param name="depthFrame"></param>
        /// <returns></returns>
        private void HeightMeasure(KinectSensor kinect, DepthImageFrame depthFrame, SkeletonFrame skeletonFrame)
        {
            ColorImageStream colorStream = kinect.ColorStream;
            DepthImageStream depthStream = kinect.DepthStream;

            // トラッキングされている最初のスケルトンを取得する
            // インデックスはプレイヤーIDに対応しているのでとっておく
            Skeleton[] skeletons = new Skeleton[skeletonFrame.SkeletonArrayLength];
            skeletonFrame.CopySkeletonDataTo(skeletons);

            int playerIndex = 0;

            for (playerIndex = 0; playerIndex < skeletons.Length; playerIndex++)
            {
                if (skeletons[playerIndex].TrackingState == SkeletonTrackingState.Tracked)
                {
                    break;
                }
            }
            if (playerIndex == skeletons.Length)
            {
                return;
            }

            // トラッキングされている最初のスケルトン
            Skeleton skeleton = skeletons[playerIndex];

            // 実際のプレイヤーIDは、スケルトンのインデックス+1
            playerIndex++;

            // 頭、足先がトラッキングされていない場合は、そのままRGBカメラのデータを返す
            Joint head      = skeleton.Joints[JointType.Head];
            Joint leftFoot  = skeleton.Joints[JointType.FootLeft];
            Joint rightFoot = skeleton.Joints[JointType.FootRight];

            if ((head.TrackingState != JointTrackingState.Tracked) ||
                (leftFoot.TrackingState != JointTrackingState.Tracked) ||
                (rightFoot.TrackingState != JointTrackingState.Tracked))
            {
                return;
            }

            // 距離カメラのピクセルごとのデータを取得する
            short[] depthPixel = new short[depthFrame.PixelDataLength];
            depthFrame.CopyPixelDataTo(depthPixel);

            // 距離カメラの座標に対応するRGBカメラの座標を取得する(座標合わせ)
            ColorImagePoint[] colorPoint = new ColorImagePoint[depthFrame.PixelDataLength];
            kinect.MapDepthFrameToColorFrame(depthStream.Format, depthPixel,
                                             colorStream.Format, colorPoint);

            // 頭のてっぺんを探す
            DepthImagePoint headDepth = depthFrame.MapFromSkeletonPoint(head.Position);
            int             top       = 0;

            for (int i = 0; (headDepth.Y - i) > 0; i++)
            {
                // 一つ上のY座標を取得し、プレイヤーがいなければ、現在の座標が最上位
                int index  = ((headDepth.Y - i) * depthFrame.Width) + headDepth.X;
                int player = depthPixel[index] & DepthImageFrame.PlayerIndexBitmask;
                if (player == playerIndex)
                {
                    top = i;
                }
            }

            // 頭のてっぺんを3次元座標に戻し、足の座標(下にあるほう)を取得する
            // この差分で身長を測る
            head.Position = depthFrame.MapToSkeletonPoint(headDepth.X, headDepth.Y - top);
            Joint foot = (leftFoot.Position.Y < rightFoot.Position.Y) ? leftFoot : rightFoot;

            // 背丈を表示する
            DrawMeasure(kinect, colorStream, head, foot);
        }
        /// <summary>
        /// 距離データをカラー画像に変換する
        /// </summary>
        /// <param name="kinect"></param>
        /// <param name="depthFrame"></param>
        /// <returns></returns>
        private byte[] ConvertDepthColor(KinectSensor kinect, DepthImageFrame depthFrame)
        {
            ColorImageStream colorStream = kinect.ColorStream;
            DepthImageStream depthStream = kinect.DepthStream;

            // 距離カメラのピクセルごとのデータを取得する
            short[] depthPixel = new short[depthFrame.PixelDataLength];
            depthFrame.CopyPixelDataTo(depthPixel);

            // 距離カメラの座標に対応するRGBカメラの座標を取得する(座標合わせ)
            ColorImagePoint[] colorPoint = new ColorImagePoint[depthFrame.PixelDataLength];
            kinect.MapDepthFrameToColorFrame(depthStream.Format, depthPixel,
                                             colorStream.Format, colorPoint);

            byte[] depthColor = new byte[depthFrame.PixelDataLength * Bgr32BytesPerPixel];
            for (int index = 0; index < depthPixel.Length; index++)
            {
                // 距離カメラのデータから、プレイヤーIDと距離を取得する
                int player   = depthPixel[index] & DepthImageFrame.PlayerIndexBitmask;
                int distance = depthPixel[index] >> DepthImageFrame.PlayerIndexBitmaskWidth;

                // 変換した結果が、フレームサイズを超えることがあるため、小さいほうを使う
                int x          = Math.Min(colorPoint[index].X, colorStream.FrameWidth - 1);
                int y          = Math.Min(colorPoint[index].Y, colorStream.FrameHeight - 1);
                int colorIndex = ((y * depthFrame.Width) + x) * Bgr32BytesPerPixel;

                // プレイヤーがいるピクセルの場合
                if (player != 0)
                {
                    depthColor[colorIndex]     = 255;
                    depthColor[colorIndex + 1] = 255;
                    depthColor[colorIndex + 2] = 255;
                }
                // プレイヤーではないピクセルの場合
                else
                {
                    // サポート外 0-40cm
                    if (distance == depthStream.UnknownDepth)
                    {
                        depthColor[colorIndex]     = 0;
                        depthColor[colorIndex + 1] = 0;
                        depthColor[colorIndex + 2] = 255;
                    }
                    // 近すぎ 40cm-80cm(default mode)
                    else if (distance == depthStream.TooNearDepth)
                    {
                        depthColor[colorIndex]     = 0;
                        depthColor[colorIndex + 1] = 255;
                        depthColor[colorIndex + 2] = 0;
                    }
                    // 遠すぎ 3m(Near),4m(Default)-8m
                    else if (distance == depthStream.TooFarDepth)
                    {
                        depthColor[colorIndex]     = 255;
                        depthColor[colorIndex + 1] = 0;
                        depthColor[colorIndex + 2] = 0;
                    }
                    // 有効な距離データ
                    else
                    {
                        depthColor[colorIndex]     = 0;
                        depthColor[colorIndex + 1] = 255;
                        depthColor[colorIndex + 2] = 255;
                    }
                }
            }

            return(depthColor);
        }
Beispiel #14
0
 /// <summary>
 /// Maps every point in a depth frame to the corresponding location in a ColorImageFormat coordinate space.
 ///
 /// </summary>
 /// <param name="depthImageFormat">The depth format of the source.</param><param name="depthPixelData">The depth frame pixel data, as retrieved from DepthImageFrame.CopyPixelDataTo.
 ///             Must be equal in length to Width*Height of the depth format specified by depthImageFormat.
 ///             </param><param name="colorImageFormat">The desired target image format.</param><param name="colorCoordinates">The ColorImagePoint array to receive the data.  Each element will be be the result of mapping the
 ///             corresponding depthPixelDatum to the specified ColorImageFormat coordinate space.
 ///             Must be equal in length to depthPixelData.
 ///             </param>
 public void MapDepthFrameToColorFrame(DepthImageFormat depthImageFormat, short[] depthPixelData,
                                       ColorImageFormat colorImageFormat, ColorImagePoint[] colorCoordinates)
 {
     _kinectSensor.MapDepthFrameToColorFrame(depthImageFormat, depthPixelData, colorImageFormat, colorCoordinates);
 }
Beispiel #15
0
        private void AllFramesReadyEventHandler(object sender, AllFramesReadyEventArgs args)
        {
            using (ColorImageFrame ciFrame = args.OpenColorImageFrame())
            {
                if (null != ciFrame)
                {
                    ciFrame.CopyPixelDataTo(this.ColorPixels);

                    ColorBitmap.WritePixels(new Int32Rect(0, 0, ColorWidth, ColorHeight),
                                            ColorPixels, ColorWidth * sizeof(int), 0);
                }
            }

            using (DepthImageFrame diFrame = args.OpenDepthImageFrame())
            {
                if (null != diFrame)
                {
                    diFrame.CopyPixelDataTo(this.DepthDatas);
                }
                else
                {
                    return;
                }
            }

            // Clear
            //Array.Clear(PlayerPixels, 0, PlayerPixels.Length);
            //System.Threading.Tasks.Parallel.For(0, PlayerPixels.Length, index =>
            //    {
            //        PlayerPixels[index] = 200;
            //    });

            Array.Clear(CIP, 0, CIP.Length);

            gSensor.MapDepthFrameToColorFrame(DIF, DepthDatas, CIF, CIP);

            byte[] pixels = new byte[gSensor.DepthStream.FramePixelDataLength * sizeof(int)];

            // Fill the Player Image
            for (int hIndex = 0; hIndex < DepthHeight; ++hIndex)
            {
                for (int wIndex = 0; wIndex < DepthWidth; ++wIndex)
                {
                    int index = wIndex + hIndex * DepthWidth;
                    //int player = DepthDatas[index] & DepthImageFrame.PlayerIndexBitmask;

                    if (0 < (DepthDatas[index] & DepthImageFrame.PlayerIndexBitmask)) // Just for Player
                    {
                        ColorImagePoint cip = CIP[index];

                        // scale color coordinates to depth resolution
                        int colorInDepthX = (int)(cip.X / this.Divisor);
                        int colorInDepthY = (int)(cip.Y / this.Divisor);

                        if (colorInDepthX > 0 && colorInDepthX < this.DepthWidth &&
                            colorInDepthY >= 0 && colorInDepthY < this.DepthHeight)
                        {
                            // calculate index into the green screen pixel array
                            int playerIndex = (colorInDepthX + (colorInDepthY * this.DepthWidth)) << 2;
                            int colorIndex  = (cip.X + cip.Y * ColorWidth) << 2;

                            pixels[playerIndex]     = ColorPixels[colorIndex]; //BitConverter.ToInt32(ColorPixels, colorIndex);
                            pixels[playerIndex + 1] = ColorPixels[colorIndex + 1];
                            pixels[playerIndex + 2] = ColorPixels[colorIndex + 2];
                            pixels[playerIndex + 3] = ColorPixels[colorIndex + 3];

                            --playerIndex;
                            --colorIndex;

                            pixels[playerIndex]     = ColorPixels[colorIndex]; //BitConverter.ToInt32(ColorPixels, colorIndex);
                            pixels[playerIndex + 1] = ColorPixels[colorIndex + 1];
                            pixels[playerIndex + 2] = ColorPixels[colorIndex + 2];
                            pixels[playerIndex + 3] = ColorPixels[colorIndex + 3];
                        }

                        HadPlayer = true;
                    }
                    //else
                    //{
                    //    HadPlayer = false;
                    //}
                }
            }

            lock (gLock)
            {
                // Enqueue
                //PixelsQueue.Enqueue(pixels);
                //Average.ResetQueue(PixelsQueue, 3);

                PixelsLinkedList.AddLast(pixels);
                Average.ResetLinkedList(PixelsLinkedList, 3);
            }

            // Smoothen
            if (null == smooth && HadPlayer)
            {
                Color bg = new Color();
                bg.B = bg.G = bg.R = 0;

                // Gaussian
                //smooth = new GaussianFilter(DepthWidth, DepthHeight, PixelFormats.Bgr32, bg);

                // Bilateral
                smooth = new BilateralFilter(DepthWidth, DepthHeight, PixelFormats.Bgr32);

                // Median
                smooth2 = new GenericMedian(DepthWidth, DepthHeight, PixelFormats.Bgr32, bg, 3);

                median = new AForge.Imaging.Filters.Median(5);

                if (null == globalBWorker)
                {
                    globalBWorker         = new BackgroundWorker();
                    globalBWorker.DoWork += DoWorking;

                    globalBWorker.RunWorkerAsync();
                }
            }

            ////PlayerBitmap.WritePixels(new Int32Rect(0, 0, DepthWidth, DepthHeight),
            ////    PlayerPixels, DepthWidth * ((PlayerBitmap.Format.BitsPerPixel + 7) / 8), 0);
        }