Exemple #1
0
 public async void Update(DepthFrame frame)
 {
     if (frame != null)
     {
         frame.CopyFrameDataToArray(_data);
         await UpdateAsync(_data, frame.DepthMinReliableDistance, frame.DepthMaxReliableDistance);
     }
 }
Exemple #2
0
        public RecordDepthFrame(DepthFrame frame)
        {
            this.Codec = Codecs.RawColor;

            this.FrameType = FrameTypes.Depth;
            this.RelativeTime = frame.RelativeTime;

            this.DepthMinReliableDistance = frame.DepthMinReliableDistance;
            this.DepthMaxReliableDistance = frame.DepthMaxReliableDistance;

            this.Width = frame.FrameDescription.Width;
            this.Height = frame.FrameDescription.Height;
            this.BytesPerPixel = frame.FrameDescription.BytesPerPixel;

            _frameData = new ushort[this.Width * this.Height];

            frame.CopyFrameDataToArray(_frameData);
        }
Exemple #3
0
 static ROS_CS.sensor_msgs.Image GetDepthImageFromRaw(DepthFrame new_depth_frame)
 {
     ROS_CS.sensor_msgs.Image depth_image = new ROS_CS.sensor_msgs.Image();
     depth_image.header.frame_id = "kinect2_depth_optical_frame";
     depth_image.header.stamp = KinectTimestampsToROS(new_depth_frame.RelativeTime);
     depth_image.is_bigendian = 0;
     depth_image.height = (uint)new_depth_frame.FrameDescription.Height;
     depth_image.width = (uint)new_depth_frame.FrameDescription.Width;
     depth_image.step = (uint)new_depth_frame.FrameDescription.Width * 2;
     depth_image.encoding = "mono16";
     ushort[] depth_data = new ushort[new_depth_frame.FrameDescription.Height * new_depth_frame.FrameDescription.Width];
     new_depth_frame.CopyFrameDataToArray(depth_data);
     foreach (ushort depth in depth_data)
     {
         ushort cleaned_depth = (ushort)(depth >> 3);
         byte high_byte = (byte)((cleaned_depth & 0xFF00) >> 8);
         byte low_byte = (byte)(cleaned_depth & 0x00FF);
         depth_image.data.Add(high_byte);
         depth_image.data.Add(low_byte);
     }
     return depth_image;
 }
        private ImageSource ConverDepthToImage( DepthFrame depthFrame )
        {
            int BytePerPixel = 4;

            var desc = depthFrame.FrameDescription;
            var depth = new ushort[desc.Width * desc.Height];

            depthFrame.CopyFrameDataToArray( depth );

            var pixel = new byte[desc.Width * desc.Height * BytePerPixel];
            for ( int i = 0; i < depth.Length; i++ ) {
                int index = i * BytePerPixel;

                var gray = (depth[i] * 255) / 4500;

                pixel[index + 0] = (byte)gray;
                pixel[index + 1] = (byte)gray;
                pixel[index + 2] = (byte)gray;
            }

            return BitmapSource.Create( desc.Width, desc.Height, 96, 96,
                PixelFormats.Bgr32, null, pixel, desc.Width * BytePerPixel );
        }
Exemple #5
0
        /// <summary>
        /// Handles the depth frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private async void Reader_FrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            addTotalTime();

            var  reference = e.FrameReference.AcquireFrame();
            bool gotDepthFrame = false, gotRGBFrame = false;
            bool shouldSave = shouldProcFrame();


            using (DepthFrame depthFrame = reference.DepthFrameReference.AcquireFrame())
            {
                if (depthFrame != null && shouldSave)
                {
                    gotDepthFrame = true;
                    depthFrame.CopyFrameDataToArray(this.depthValues);
                    ProcessDepthFrameData(depthFrame.FrameDescription.LengthInPixels, depthFrame.DepthMinReliableDistance, depthFrame.DepthMaxReliableDistance);
                }
            }

            using (ColorFrame frame = reference.ColorFrameReference.AcquireFrame())
            {
                if (frame != null && shouldSave)
                {
                    gotRGBFrame = true;
                    if (rgbCheckBox.IsChecked.Value == true)
                    {
                        makeColorBitmap(frame);
                    }
                    ////Draw color frame
                    createKinectColor(frame);
                }
            }

            //Draw depth frame
            if (gotDepthFrame)
            {
                this.RenderDepthPixels();
            }

            //Write frame to binary file at specified fps
            if (shouldSave && gotDepthFrame && gotRGBFrame)
            {
                addFrames(1);
                if (IsRecording)
                {
                    WriteBinFrame();
                }
            }
            else
            {
                addFrames(0);
            }

            if (gotDepthFrame && gotRGBFrame)
            {
                depthCheckBox.Content = (bool)depthCheckBox.IsChecked ? string.Format("Depth: {0:0.0} fps", getFPS()) : "Depth";
                rgbCheckBox.Content   = (bool)rgbCheckBox.IsChecked ? string.Format("RGB: {0:0.0 fps}", getFPS()) : "RGB";

                simpleFrameCounter++;
                //Calculate level
                levelAvg += CalculateLevel();
                //Average frame at fps rate and display every second
                if (simpleFrameCounter % 30 == 0)
                {
                    levelAvg   /= 30;
                    Degree.Text = levelAvg.ToString("F3");


                    levelAvg = 0;
                }
            }

            addProcTime();
        }
Exemple #6
0
        void Update()
        {
            if (reader != null)
            {
                MultiSourceFrame frame = reader.AcquireLatestFrame();
                if (frame != null)
                {
                    using (ColorFrame colorFrame = frame.ColorFrameReference.AcquireFrame())
                    {
                        if (colorFrame != null)
                        {
                            colorFrame.CopyConvertedFrameDataToArray(colorData, ColorImageFormat.Rgba);
                        }
                    }
                    using (DepthFrame depthFrame = frame.DepthFrameReference.AcquireFrame())
                    {
                        if (depthFrame != null)
                        {
                            //Debug.Log ("bodyIndexFrame not null");
                            depthFrame.CopyFrameDataToArray(depthData);
                        }
                    }
                    using (BodyIndexFrame bodyIndexFrame = frame.BodyIndexFrameReference.AcquireFrame())
                    {
                        if (bodyIndexFrame != null)
                        {
                            //Debug.Log ("bodyIndexFrame not null");
                            bodyIndexFrame.CopyFrameDataToArray(bodyIndexData);
                        }
                    }

                    frame = null;
                }
            }
            else
            {
                return;
            }

            MatUtils.copyToMat(colorData, outputMat);
            MatUtils.copyToMat(colorData, rgbaMat);


            // update mask image from bodyIndexData.
            coordinateMapper.MapColorFrameToDepthSpace(depthData, depthSpacePoints);

            for (int colorY = 0; colorY < colorFrameHeight; colorY++)
            {
                for (int colorX = 0; colorX < colorFrameWidth; colorX++)
                {
                    int colorIndex = colorY * colorFrameWidth + colorX;
                    int depthX     = (int)(depthSpacePoints[colorIndex].X);
                    int depthY     = (int)(depthSpacePoints[colorIndex].Y);
                    if ((0 <= depthX) && (depthX < depthFrameWidth) && (0 <= depthY) && (depthY < depthFrameHeight))
                    {
                        int depthIndex = depthY * depthFrameWidth + depthX;

                        if (bodyIndexData[depthIndex] == 255)
                        {
                            maskData[colorIndex] = 0;
                        }
                        else
                        {
                            maskData[colorIndex] = 255;
                        }
                    }
                }
            }
            MatUtils.copyToMat(maskData, maskMat);


            if (filterType == FilterTypePreset.NONE)
            {
                rgbaMat.copyTo(outputMat, maskMat);

                Imgproc.putText(outputMat, "Filter Type: NONE " + texture.width + "x" + texture.height, new Point(5, texture.height - 5), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 0, 0, 255), 3);
            }
            else if (filterType == FilterTypePreset.SEPIA)
            {
                Core.transform(rgbaMat, rgbaMat, sepiaKernel);
                rgbaMat.copyTo(outputMat, maskMat);

                Imgproc.putText(outputMat, "Filter Type: SEPIA " + texture.width + "x" + texture.height, new Point(5, texture.height - 5), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 0, 0, 255), 3);
            }
            else if (filterType == FilterTypePreset.PIXELIZE)
            {
                Imgproc.resize(rgbaMat, pixelizeIntermediateMat, pixelizeSize0, 0.1, 0.1, Imgproc.INTER_NEAREST);
                Imgproc.resize(pixelizeIntermediateMat, rgbaMat, rgbaMat.size(), 0.0, 0.0, Imgproc.INTER_NEAREST);

                rgbaMat.copyTo(outputMat, maskMat);

                Imgproc.putText(outputMat, "Filter Type: PIXELIZE " + texture.width + "x" + texture.height, new Point(5, texture.height - 5), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 0, 0, 255), 3);
            }
            else if (filterType == FilterTypePreset.COMIC)
            {
                comicFilter.Process(rgbaMat, rgbaMat);
                rgbaMat.copyTo(outputMat, maskMat);

                Imgproc.putText(outputMat, "Filter Type: COMIC " + texture.width + "x" + texture.height, new Point(5, texture.height - 5), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 0, 0, 255), 3);
            }

            Utils.matToTexture2D(outputMat, texture);
        }
        private void OnDepthFrameArrived(object sender, DepthFrameArrivedEventArgs e)
        {
            int    af = 0, depth22 = 0;
            double distance           = 0;
            DepthFrameReference refer = e.FrameReference;

            if (refer == null)
            {
                return;
            }
            DepthFrame frame = refer.AcquireFrame();

            if (frame == null)
            {
                return;
            }

            using (frame)
            {
                FrameDescription frameDesc = frame.FrameDescription;
                if (((frameDesc.Width * frameDesc.Height) == _depthData.Length) && (frameDesc.Width == _depthBitmap.PixelWidth) && (frameDesc.Height == _depthBitmap.PixelHeight))
                {
                    uint size = frame.FrameDescription.LengthInPixels;

                    frame.CopyFrameDataToArray(_depthData);

                    ushort minDepth = frame.DepthMinReliableDistance;

                    int colorPixelIndex = 0;
                    for (int i = 0; i < _depthData.Length; i++)
                    {
                        ushort depth = _depthData[i];
                        if (depth < minDepth)
                        {
                            _depthPixels[colorPixelIndex++] = 0;
                            _depthPixels[colorPixelIndex++] = 0;
                            _depthPixels[colorPixelIndex++] = 0;
                        }
                        else if (depth > maxDepth)
                        {
                            _depthPixels[colorPixelIndex++] = 255;
                            _depthPixels[colorPixelIndex++] = 255;
                            _depthPixels[colorPixelIndex++] = 255;
                        }
                        else
                        {
                            double gray = (Math.Floor((double)depth / 250) * 12.75);
                            _depthPixels[colorPixelIndex++] = (byte)gray;
                            _depthPixels[colorPixelIndex++] = (byte)gray;
                            _depthPixels[colorPixelIndex++] = (byte)gray;
                        }
                        ++colorPixelIndex;
                    }
                    _depthBitmap.WritePixels(new Int32Rect(0, 0, frameDesc.Width, frameDesc.Height), _depthPixels, frameDesc.Width * _bytePerPixel, 0);

                    distance             = 260 + (200 * _kinect.DepthFrameSource.FrameDescription.Width);
                    af                   = (int)distance;
                    depth22              = _depthData[af];
                    distanceTextBox.Text = depth22.ToString();
                }
            }
        }
        /// <summary>
        /// 深度データを可視化した BitmapSource を取得します。
        /// </summary>
        /// <param name="depthFrame">
        /// 深度フレーム。
        /// </param>
        /// <param name="bodyIndexFrame">
        /// BodyIndex フレーム。
        /// </param>
        /// <param name="frameDescription">
        /// 深度フレームの情報。
        /// </param>
        /// <param name="bodyIndexFrameDescription">
        /// BodyIndex フレームの情報。
        /// </param>
        /// <returns>
        /// 深度を可視化し、プレイヤーの画素を識別した BitmapSource。
        /// </returns>
        BitmapSource GetBitmapSource(DepthFrame depthFrame,
                                     BodyIndexFrame bodyIndexFrame,
                                     FrameDescription depthFrameDescription,
                                     FrameDescription bodyIndexFrameDescription)
        {
            ushort[] depths = new ushort[depthFrameDescription.Width
                                         * depthFrameDescription.Height];
            depthFrame.CopyFrameDataToArray(depths);

            byte[] bodyIndexes = new byte[bodyIndexFrameDescription.Width
                                          * bodyIndexFrameDescription.Height];
            bodyIndexFrame.CopyFrameDataToArray(bodyIndexes);

            byte[] depthColors = new byte[depthFrameDescription.Width
                                          * depthFrameDescription.Height
                                          * 4];

            for (int i = 0; i < depths.Length; i += 1)
            {
                ushort depth = depths[i];

                //対象の画素に人が映し出されるとき 0~5 となり、
                //それ以外の場合には 255 となる。
                byte bodyIndex = bodyIndexes[i];

                int depthColorsIndex = i * 4;

                if (bodyIndex == 255)
                {
                    byte grayColor = (byte)(depth % 255);

                    depthColors[depthColorsIndex]     = grayColor; //B
                    depthColors[depthColorsIndex + 1] = grayColor; //G
                    depthColors[depthColorsIndex + 2] = grayColor; //R
                    depthColors[depthColorsIndex + 3] = 255;       //A
                }
                else
                {
                    depthColors[depthColorsIndex]     = 255; //B
                    depthColors[depthColorsIndex + 1] = 0;   //G
                    depthColors[depthColorsIndex + 2] = 0;   //R
                    depthColors[depthColorsIndex + 3] = 255; //A
                }
                //else
                //{
                //    //BodyIndex = 0-5, or 255
                //    //Pattern
                //    // 255, 0, 0    : 1
                //    // 0, 255, 0    : 2
                //    // 0, 0, 255    : 3
                //    // 255, 255, 0  : 4
                //    // 255, 0, 255, : 5
                //    // 0, 255, 255  : 6

                //    byte bValue = 0;
                //    byte gValue = 0;
                //    byte rValue = 0;

                //    if (bodyIndex != 255)
                //    {
                //        bodyIndex += 1;
                //        alpha = 255;

                //        //1, 4, 5 (!= 2, 3, 6)
                //        if (bodyIndex % 4 <= 1)
                //        {
                //            bValue = 255;
                //        }

                //        //2, 4, 6
                //        if (bodyIndex % 2 == 0)
                //        {
                //            gValue = 255;
                //        }

                //        //3, 5, 6 (!= 1, 2, 4)
                //        if (4 % bodyIndex != 0)
                //        {
                //            rValue = 255;
                //        }
                //    }

                //    //BGRA or RGBA
                //    depthColors[depthColorsIndex] = bValue;
                //    depthColors[depthColorsIndex + 1] = gValue;
                //    depthColors[depthColorsIndex + 2] = rValue;
                //    depthColors[depthColorsIndex + 3] = 255;
                //}
            }

            BitmapSource bitmapSource
                = BitmapSource.Create(depthFrameDescription.Width,
                                      depthFrameDescription.Height,
                                      96,
                                      96,
                                      PixelFormats.Bgra32,
                                      null,
                                      depthColors,
                                      depthFrameDescription.Width * 4);

            return(bitmapSource);
        }
    /// <summary>
    /// Called every frame (display) by the Unity engine.
    ///
    /// Checks if there is Kinect data available, stores it in the initialised arrays, and tores results by TimeSpan in the DataManager object.
    /// </summary>
    void Update()
    {
        //Clears flags for previous data.
        bool colorFrameProcessed     = false;
        bool depthFrameProcessed     = false;
        bool bodyIndexFrameProcessed = false;
        bool bodyFrameProcessed      = false;
        bool meshDataProcessed       = false;

        //Conditional access for Kinect data streams, nested to match interdependencies of streams.
        if (multiSourceReader != null)
        {
            var multiSourceFrame = multiSourceReader.AcquireLatestFrame();
            if (multiSourceFrame != null)
            {
                using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame())
                {
                    using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame())
                    {
                        using (BodyIndexFrame bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame())
                        {
                            using (BodyFrame bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame())
                            {
                                //Obtains the depth data stream from the Kinect, if avaiable.
                                //Depth is a set of values showing the relative distance of each point (when mapped on to other sources) in the image from the camera.
                                if (depthFrame != null)
                                {
                                    FrameDescription depthFrameDescription = depthFrame.FrameDescription;
                                    depthWidth  = depthFrameDescription.Width;
                                    depthHeight = depthFrameDescription.Height;

                                    if ((depthWidth * depthHeight) == this.depthFrameData.Length)
                                    {
                                        depthFrame.CopyFrameDataToArray(this.depthFrameData);
                                        depthFrameProcessed = true;
                                    }
                                }

                                //Obtains the color data stream from the Kinect, if available.
                                //Color is an array of the RGB values for an HD (1920x1080p) image.
                                if (colorFrame != null)
                                {
                                    FrameDescription colorFrameDescription = colorFrame.FrameDescription;
                                    colorWidth  = colorFrameDescription.Width;
                                    colorHeight = colorFrameDescription.Height;

                                    if ((colorWidth * colorHeight * bytesPerPixel) == this.colorData.Length)
                                    {
                                        colorTexture = new Texture2D(colorFrameDescription.Width, colorFrameDescription.Height, TextureFormat.RGBA32, false);
                                        colorFrame.CopyConvertedFrameDataToArray(colorData, ColorImageFormat.Rgba);
                                        colorTexture.LoadRawTextureData(colorData);
                                        colorTexture.Apply();
                                        colorFrameProcessed = true;
                                    }
                                }

                                //Obtains the body index data stream from the Kinect, if available.
                                //Body index shows which points (when mapped onto other sources) contain a person as recognised by the Kinect.
                                if (bodyIndexFrame != null)
                                {
                                    FrameDescription bodyIndexFrameDescription = bodyIndexFrame.FrameDescription;
                                    bodyIndexWidth  = bodyIndexFrameDescription.Width;
                                    bodyIndexHeight = bodyIndexFrameDescription.Height;

                                    if ((bodyIndexWidth * bodyIndexHeight) == this.bodyIndexFrameData.Length)
                                    {
                                        bodyIndexFrame.CopyFrameDataToArray(this.bodyIndexFrameData);

                                        for (int i = 0; i < bodyIndexFrameData.Length; i++)
                                        {
                                            bodyIndexFrameArray[i] = (bodyIndexFrameData[i] != 0xff) ? 1 : 0;
                                        }
                                        bodyIndexFrameProcessed = true;
                                    }
                                }

                                //Obtains the body data stream from the Kinect, if available.
                                //Body data stream is the locations and rotations of a set of joints in a representation of a human body, as recognised by the kinect,
                                //as well as other representational data (hand gestures, confidence, etc) that is not used here.
                                if (bodyFrame != null)
                                {
                                    if (this.bodies == null)
                                    {
                                        this.bodies = new Body[bodyFrame.BodyCount];
                                    }

                                    bodyTimeStamp = bodyFrame.RelativeTime;
                                    bodyFrame.GetAndRefreshBodyData(this.bodies);

                                    bodyFrameProcessed = true;
                                }

                                //When the three data streams necessary to create a mesh are present, the data is processed to be stored as MeshData.
                                if (depthFrameProcessed && colorFrameProcessed && bodyIndexFrameProcessed)
                                {
                                    vertices = new Vector3[depthWidth, depthHeight];
                                    uvs      = new Vector2[depthWidth, depthHeight];
                                    dbIndex  = new int[depthWidth, depthHeight];
                                    cbIndex  = new int[colorWidth, colorHeight];
                                    cIndex   = new Color[colorWidth * colorHeight];

                                    //Populate coordinate mapping arrays between depth/color and depth/camera spaces.
                                    //(Camera space being coordinate system with camera at the origin.)
                                    this.coordinateMapper.MapDepthFrameToColorSpace(this.depthFrameData, this.colorPoints);
                                    this.coordinateMapper.MapDepthFrameToCameraSpace(this.depthFrameData, this.cameraPoints);

                                    //Mapping is done between data streams, and non-body depth data is discarded.
                                    //Depth points are stored as vertices.
                                    //Color mappings to depth points are stored as uvs.
                                    //BodyIndex is kept for use in further processing.
                                    for (int x = 0; x < depthWidth; x++)
                                    {
                                        for (int y = 0; y < depthHeight; y++)
                                        {
                                            int  depthIndex = x + y * depthWidth;
                                            byte bodyPixel  = this.bodyIndexFrameData[depthIndex];

                                            CameraSpacePoint cameraPoint = this.cameraPoints[depthIndex];
                                            ColorSpacePoint  colorPoint  = this.colorPoints[depthIndex];

                                            Vector3 vertex       = new Vector3(cameraPoint.X, cameraPoint.Y, cameraPoint.Z);
                                            Vector2 uv           = new Vector2(colorPoint.X / colorWidth, colorPoint.Y / colorHeight);
                                            int     bodyPixelVal = 0;

                                            if (bodyPixel != 0xff)
                                            {
                                                bodyPixelVal = 1;
                                            }
                                            else
                                            {
                                                vertex = Vector3.zero;
                                            }

                                            vertices[x, y] = vertex;
                                            uvs[x, y]      = uv;
                                            dbIndex[x, y]  = bodyPixelVal;
                                        }
                                    }

                                    //Populate coordinate mapping arrays between depth and color spaces.
                                    this.coordinateMapper.MapColorFrameToDepthSpace(this.depthFrameData, depthPoints);

                                    //Maps the body index stream into a masking map for the color stream.
                                    for (int x = 0; x < colorWidth; x++)
                                    {
                                        for (int y = 0; y < colorHeight; y++)
                                        {
                                            int colorIndex = x + y * colorWidth;
                                            int depthX     = (int)depthPoints[colorIndex].X;
                                            int depthY     = (int)depthPoints[colorIndex].Y;
                                            int depthIndex = depthX + depthY * depthWidth;

                                            int colorIndexVal = 0;

                                            if ((depthIndex > 0) && (depthIndex < depthWidth * depthHeight))
                                            {
                                                byte bodyPixel = this.bodyIndexFrameData[depthIndex];
                                                if (bodyPixel != 0xff)
                                                {
                                                    colorIndexVal = 1;
                                                }
                                            }

                                            cbIndex[x, y] = colorIndexVal;
                                        }
                                    }

                                    meshDataProcessed = true;
                                }

                                // If mesh has been processed and the body frame (required to create skeleton model) are both available,
                                // MeshData and SkeletonData objects are created, and passed to the data manager to be stored under the corresponding TimeSpan.
                                // flags are reset.
                                if (meshDataProcessed && bodyFrameProcessed)
                                {
                                    MeshData     meshData     = new MeshData(bodyTimeStamp, vertices, uvs, dbIndex, cbIndex, colorTexture);
                                    SkeletonData skeletonData = new SkeletonData(bodyTimeStamp, bodies);
                                    dataManager.AddToList(bodyTimeStamp, meshData, skeletonData);
                                    meshDataProcessed       = false;
                                    depthFrameProcessed     = false;
                                    colorFrameProcessed     = false;
                                    bodyIndexFrameProcessed = false;
                                    bodyFrameProcessed      = false;
                                }
                                else
                                {
                                    string s = "Proccessing Error Occurred. The following failed : ";
                                    if (!depthFrameProcessed)
                                    {
                                        s = s + "Depth, ";
                                    }
                                    if (!colorFrameProcessed)
                                    {
                                        s = s + "Color, ";
                                    }
                                    if (!bodyIndexFrameProcessed)
                                    {
                                        s = s + "Body Index, ";
                                    }
                                    if (!bodyFrameProcessed)
                                    {
                                        s = s + "Body (Skeleton), ";
                                    }
                                    if (!meshDataProcessed)
                                    {
                                        s = s + "Mesh Data";
                                    }
                                    //Debug.Log(s);
                                }
                            }
                        }
                    }
                }
                multiSourceFrame = null;
            }
        }
    }
        /// <summary>
        /// Updates the bitmap with new frame data.
        /// </summary>
        /// <param name="depthFrame">The specified depth frame.</param>
        /// <param name="colorFrame">The specified color frame.</param>
        /// <param name="bodyIndexFrame">The specified body index frame.</param>
        override public void Update(ColorFrame colorFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame)
        {
            int colorWidth = colorFrame.FrameDescription.Width;
            int colorHeight = colorFrame.FrameDescription.Height;

            int depthWidth = depthFrame.FrameDescription.Width;
            int depthHeight = depthFrame.FrameDescription.Height;

            int bodyIndexWidth = bodyIndexFrame.FrameDescription.Width;
            int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;

            if (Bitmap == null)
            {
                InitBuffers(colorFrame.FrameDescription, depthFrame.FrameDescription, bodyIndexFrame.FrameDescription);
            }

            if (((depthWidth * depthHeight) == _depthData.Length) && ((colorWidth * colorHeight * Constants.BYTES_PER_PIXEL) == Pixels.Length) && ((bodyIndexWidth * bodyIndexHeight) == _bodyData.Length))
            {
                depthFrame.CopyFrameDataToArray(_depthData);

                if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                {
                    colorFrame.CopyRawFrameDataToArray(Pixels);
                }
                else
                {
                    colorFrame.CopyConvertedFrameDataToArray(Pixels, ColorImageFormat.Bgra);
                }

                bodyIndexFrame.CopyFrameDataToArray(_bodyData);

                CoordinateMapper.MapColorFrameToDepthSpace(_depthData, _depthPoints);

                // Loop over each row and column of the color image
                // Zero out any pixels that don't correspond to a body index
                for (int i = 0, ci = 0; i < _depthPoints.Length; ++i, ci += Constants.BYTES_PER_PIXEL)
                {
                    float colorToDepthX = _depthPoints[i].X;
                    float colorToDepthY = _depthPoints[i].Y;

                    // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel.
                    if (!float.IsNegativeInfinity(colorToDepthX) &&
                        !float.IsNegativeInfinity(colorToDepthY))
                    {
                        // Make sure the depth pixel maps to a valid point in color space
                        int depthX = (int)(colorToDepthX + 0.5f);
                        int depthY = (int)(colorToDepthY + 0.5f);

                        // If the point is not valid, there is no body index there.
                        if ((depthX >= 0) && (depthX < depthWidth) && (depthY >= 0) && (depthY < depthHeight))
                        {
                            int depthIndex = (depthY * depthWidth) + depthX;

                            // If we are tracking a body for the current pixel, do not zero out the pixel
                            if (_bodyData[depthIndex] != 0xff)
                            {
                                continue;
                            }
                        }
                    }

                    for (int b = 0; b < Constants.BYTES_PER_PIXEL; ++b)
                    {
                        Pixels[ci + b] = 0;
                    }
                }

                UpdateBitmap();
            }
        }
        /// <summary>
        /// Converts a depth frame to the corresponding System.Windows.Media.Imaging.BitmapSource and removes the background (green-screen effect).
        /// </summary>
        /// <param name="depthFrame">The specified depth frame.</param>
        /// <param name="colorFrame">The specified color frame.</param>
        /// <param name="bodyIndexFrame">The specified body index frame.</param>
        /// <returns>The corresponding System.Windows.Media.Imaging.BitmapSource representation of image.</returns>
        public Bitmap GreenScreen(ColorFrame colorFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame)
        {
            int colorWidth = colorFrame.FrameDescription.Width;
            int colorHeight = colorFrame.FrameDescription.Height;

            int depthWidth = depthFrame.FrameDescription.Width;
            int depthHeight = depthFrame.FrameDescription.Height;

            int bodyIndexWidth = bodyIndexFrame.FrameDescription.Width;
            int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;

            if (_displayPixels == null)
            {
                _depthData = new ushort[depthWidth * depthHeight];
                _bodyData = new byte[depthWidth * depthHeight];
                _colorData = new byte[colorWidth * colorHeight * Constants.BYTES_PER_PIXEL];
                _displayPixels = new byte[depthWidth * depthHeight * Constants.BYTES_PER_PIXEL];
                _colorPoints = new ColorSpacePoint[depthWidth * depthHeight];
                _bitmap = new Bitmap(depthWidth, depthHeight, Constants.FORMAT);
            }

            if (((depthWidth * depthHeight) == _depthData.Length) && ((colorWidth * colorHeight * Constants.BYTES_PER_PIXEL) == _colorData.Length) && ((bodyIndexWidth * bodyIndexHeight) == _bodyData.Length))
            {
                depthFrame.CopyFrameDataToArray(_depthData);

                if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                {
                    colorFrame.CopyRawFrameDataToArray(_colorData);
                }
                else
                {
                    colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra);
                }

                bodyIndexFrame.CopyFrameDataToArray(_bodyData);

                CoordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);

                Array.Clear(_displayPixels, 0, _displayPixels.Length);

                for (int y = 0; y < depthHeight; ++y)
                {
                    for (int x = 0; x < depthWidth; ++x)
                    {
                        int depthIndex = (y * depthWidth) + x;

                        byte player = _bodyData[depthIndex];

                        if (player != 0xff)
                        {
                            ColorSpacePoint colorPoint = _colorPoints[depthIndex];

                            int colorX = (int)Math.Floor(colorPoint.X + 0.5);
                            int colorY = (int)Math.Floor(colorPoint.Y + 0.5);

                            if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                            {
                                int colorIndex = ((colorY * colorWidth) + colorX) * Constants.BYTES_PER_PIXEL;
                                int displayIndex = depthIndex * Constants.BYTES_PER_PIXEL;

                                _displayPixels[displayIndex + 0] = _colorData[colorIndex];
                                _displayPixels[displayIndex + 1] = _colorData[colorIndex + 1];
                                _displayPixels[displayIndex + 2] = _colorData[colorIndex + 2];
                                _displayPixels[displayIndex + 3] = 0xff;
                            }
                        }
                    }
                }

                BitmapData bitmapData = _bitmap.LockBits(new Rectangle(0, 0, depthWidth, depthHeight), ImageLockMode.ReadWrite, _bitmap.PixelFormat);
                Marshal.Copy(_displayPixels, 0, bitmapData.Scan0, _displayPixels.Length);

                _bitmap.UnlockBits(bitmapData);
            }

            return _bitmap;
        }
Exemple #12
0
        private void msfr_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            ///READ INPUT STATE FROM CLIENT
            inputState = Client.inputState;

            ///DECLARE FRAMES
            BodyFrame  bodyFrame  = null;
            DepthFrame depthFrame = null;

            ///ACQUIRE AND VALIDATE FRAME
            MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame();

            if (multiSourceFrame == null)
            {
                return;
            }

            try
            {
                depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame();

                if (inputState == "d")
                {
                    var      depthDesc = depthFrame.FrameDescription;
                    ushort[] depthData = new ushort[depthDesc.LengthInPixels];
                    depthFrame.CopyFrameDataToArray(depthData);
                }


                bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame();

                if ((bodyFrame == null))
                {
                    return;
                }

                ///PROCESS BODY DATA

                this.bodies = new Body[bodyFrame.BodyCount];

                ///REFRESH BODY DATA
                bodyFrame.GetAndRefreshBodyData(this.bodies);

                foreach (Body body in this.bodies)
                {
                    if (body != null)
                    {
                        if (body.IsTracked)
                        {
                            Dictionary <JointType, Point3D> tdPoints = new Dictionary <JointType, Point3D>();
                            List <ColorSpacePoint>          csPoints = new List <ColorSpacePoint>();

                            foreach (JointType type in body.Joints.Keys)
                            {
                                Joint           joint = body.Joints[type];
                                Point3D         point = new Point3D(joint.Position.X, joint.Position.Y, joint.Position.Z);
                                ColorSpacePoint csp   = this.kinect.CoordinateMapper.MapCameraPointToColorSpace(joint.Position);

                                ///GET LIST OF JOINT POSITIONS
                                tdPoints.Add(type, point);

                                ///CANNOT BE SURE THERE WILL BE DATA IF "TRACKED" IS USED
                                if (joint.TrackingState == TrackingState.Tracked)
                                {
                                    ///CALCULATE POSITION TO DRAW POINT
                                    csPoints.Add(csp);
                                }
                            }

                            DrawPoints(csPoints);

                            ///TRANSFER DATA TO SERVER
                            if (inputState == "t")
                            {
                                this.clientHelper.AddBodyData(BiometricID(tdPoints), tdPoints, csPoints);
                            }
                        }
                    }
                }
            }
            finally
            {
                if (inputState == "t")
                {
                    this.clientHelper.SendBodyData();
                }
                Client.inputState = "z";

                ///DISPOSE
                if (bodyFrame != null)
                {
                    bodyFrame.Dispose();
                }

                if (depthFrame != null)
                {
                    depthFrame.Dispose();
                }
            }
        }
        /// <summary>
        /// Converts a depth frame to the corresponding System.Windows.Media.Imaging.BitmapSource and removes the background (green-screen effect).
        /// </summary>
        /// <param name="depthFrame">The specified depth frame.</param>
        /// <param name="colorFrame">The specified color frame.</param>
        /// <param name="bodyIndexFrame">The specified body index frame.</param>
        /// <returns>The corresponding System.Windows.Media.Imaging.BitmapSource representation of image.</returns>
        public WriteableBitmap GreenScreen(ColorFrame colorFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame)
        {
            int colorWidth = colorFrame.FrameDescription.Width;
            int colorHeight = colorFrame.FrameDescription.Height;

            int depthWidth = depthFrame.FrameDescription.Width;
            int depthHeight = depthFrame.FrameDescription.Height;

            int bodyIndexWidth = bodyIndexFrame.FrameDescription.Width;
            int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;

            if (_displayPixels == null)
            {
                _depthData = new ushort[depthWidth * depthHeight];
                _bodyData = new byte[depthWidth * depthHeight];
                _colorData = new byte[colorWidth * colorHeight * Constants.BYTES_PER_PIXEL];
                _displayPixels = new byte[depthWidth * depthHeight * Constants.BYTES_PER_PIXEL];
                _colorPoints = new ColorSpacePoint[depthWidth * depthHeight];
                _bitmap = new WriteableBitmap(depthWidth, depthHeight);
                _stream = _bitmap.PixelBuffer.AsStream();
            }

            if (((depthWidth * depthHeight) == _depthData.Length) && ((colorWidth * colorHeight * Constants.BYTES_PER_PIXEL) == _colorData.Length) && ((bodyIndexWidth * bodyIndexHeight) == _bodyData.Length))
            {
                depthFrame.CopyFrameDataToArray(_depthData);

                if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                {
                    colorFrame.CopyRawFrameDataToArray(_colorData);
                }
                else
                {
                    colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra);
                }

                bodyIndexFrame.CopyFrameDataToArray(_bodyData);

                CoordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);

                Array.Clear(_displayPixels, 0, _displayPixels.Length);

                for (int y = 0; y < depthHeight; ++y)
                {
                    for (int x = 0; x < depthWidth; ++x)
                    {
                        int depthIndex = (y * depthWidth) + x;

                        byte player = _bodyData[depthIndex];

                        if (player != 0xff)
                        {
                            ColorSpacePoint colorPoint = _colorPoints[depthIndex];

                            int colorX = (int)Math.Floor(colorPoint.X + 0.5);
                            int colorY = (int)Math.Floor(colorPoint.Y + 0.5);

                            if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                            {
                                int colorIndex = ((colorY * colorWidth) + colorX) * Constants.BYTES_PER_PIXEL;
                                int displayIndex = depthIndex * Constants.BYTES_PER_PIXEL;

                                _displayPixels[displayIndex + 0] = _colorData[colorIndex];
                                _displayPixels[displayIndex + 1] = _colorData[colorIndex + 1];
                                _displayPixels[displayIndex + 2] = _colorData[colorIndex + 2];
                                _displayPixels[displayIndex + 3] = 0xff;
                            }
                        }
                    }
                }

                _stream.Seek(0, SeekOrigin.Begin);
                _stream.Write(_displayPixels, 0, _displayPixels.Length);

                _bitmap.Invalidate();
            }

            return _bitmap;
        }
        /// <summary>
        /// Updates the bitmap with new frame data.
        /// </summary>
        /// <param name="depthFrame">The specified depth frame.</param>
        /// <param name="colorFrame">The specified color frame.</param>
        /// <param name="bodyIndexFrame">The specified body index frame.</param>
        override public void Update(ColorFrame colorFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame)
        {
            int colorWidth = colorFrame.FrameDescription.Width;
            int colorHeight = colorFrame.FrameDescription.Height;

            int depthWidth = depthFrame.FrameDescription.Width;
            int depthHeight = depthFrame.FrameDescription.Height;

            int bodyIndexWidth = bodyIndexFrame.FrameDescription.Width;
            int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;

            if (Bitmap == null)
            {
                InitBuffers(colorFrame.FrameDescription, depthFrame.FrameDescription, bodyIndexFrame.FrameDescription);
            }

            if (((depthWidth * depthHeight) == _depthData.Length) && ((colorWidth * colorHeight * Constants.BYTES_PER_PIXEL) == _colorData.Length) && ((bodyIndexWidth * bodyIndexHeight) == _bodyData.Length))
            {
                depthFrame.CopyFrameDataToArray(_depthData);

                if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                {
                    colorFrame.CopyRawFrameDataToArray(_colorData);
                }
                else
                {
                    colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra);
                }

                bodyIndexFrame.CopyFrameDataToArray(_bodyData);

                CoordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);

                Array.Clear(Pixels, 0, Pixels.Length);

                for (int y = 0; y < depthHeight; ++y)
                {
                    for (int x = 0; x < depthWidth; ++x)
                    {
                        int depthIndex = (y * depthWidth) + x;

                        if (_bodyData[depthIndex] != 0xff)
                        {
                            ColorSpacePoint colorPoint = _colorPoints[depthIndex];

                            int colorX = (int)(colorPoint.X + 0.5);
                            int colorY = (int)(colorPoint.Y + 0.5);

                            if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                            {
                                int colorIndex = ((colorY * colorWidth) + colorX) * Constants.BYTES_PER_PIXEL;
                                int displayIndex = depthIndex * Constants.BYTES_PER_PIXEL;

                                for (int b = 0; b < Constants.BYTES_PER_PIXEL; ++b)
                                {
                                    Pixels[displayIndex + b] = _colorData[colorIndex + b];
                                }
                            }
                        }
                    }
                }

                UpdateBitmap();
            }
        }
Exemple #15
0
        /// <summary>
        /// Generates the point cloud
        /// </summary>
        /// <param name="df">The Depth Frame from the Kinect sensor</param>
        /// <param name="bif">The Body Index Frame from the Kinect sensor</param>
        /// <returns>The PointCloud generated from the frames</returns>
        public PointCloud generate(DepthFrame df, BodyIndexFrame bif)
        {
            Log.Write("Creating point Cloud");

            /*
             * used to calculate centroid, as well as lowest x value for later on ->
             * saves us looping later, though PointCloud methods do allow you to do that
             */
            double xAccumulator = 0.0, yAccumulator = 0.0, zAccumulator = 0.0, xMean = 0.0, yMean = 0.0, zMean = 0.0, xMinimum = (1 / 0.0);

            int depthFrameWidth  = df.FrameDescription.Width;
            int depthFrameHeight = df.FrameDescription.Height;

            this.depthFrameData     = new ushort[depthFrameWidth * depthFrameHeight];
            this.bodyIndexFrameData = new byte[depthFrameWidth * depthFrameHeight];
            this.cameraPoints       = new CameraSpacePoint[depthFrameWidth * depthFrameHeight];


            df.CopyFrameDataToArray(depthFrameData);
            bif.CopyFrameDataToArray(bodyIndexFrameData);



            coordinateMapper.MapDepthFrameToCameraSpace(depthFrameData, cameraPoints);

            // Create new point cloud for storing points and operating on later
            PointCloud pointCloud     = new PointCloud();
            int        numberOfPoints = 0;

            // loop over each row and column of the depth
            for (int y = 0; y < depthFrameHeight; y++)
            {
                for (int x = 0; x < depthFrameWidth; x++)
                {
                    // calculate index into depth array
                    int depthIndex = (y * depthFrameWidth) + x;

                    byte humanPoint = bodyIndexFrameData[depthIndex];



                    if (humanPoint == 0xff) // Check if human point empty
                    {
                        continue;
                    }



                    CameraSpacePoint p = this.cameraPoints[depthIndex];

                    if (!(Double.IsInfinity(p.X)) && !(Double.IsInfinity(p.Y)) && !(Double.IsInfinity(p.Z)))
                    {
                        if (p.X < depthLimit && p.Y < depthLimit && p.Z < depthLimit)
                        {
                            Point3D scaledPoint = new Point3D(p.X * unitScale, p.Y * unitScale, p.Z * unitScale);

                            pointCloud.addPoint(scaledPoint);

                            xAccumulator += scaledPoint.X;
                            yAccumulator += scaledPoint.Y;
                            zAccumulator += scaledPoint.Z;
                            numberOfPoints++;

                            if (scaledPoint.X < xMinimum)
                            {
                                xMinimum = scaledPoint.X;
                            }
                        }
                    }
                } // end of for(int x..) loop over points
            }     // end of for(int y..) loop over points

            xMean = xAccumulator / numberOfPoints;
            yMean = yAccumulator / numberOfPoints;
            zMean = zAccumulator / numberOfPoints;

            // centroid calculated on the fly so we don't have to loop again unnecessarily
            Point3D centroid = new Point3D(xMean, yMean, zMean);

            pointCloud.subtractFromPointAxis(xMinimum, 0);
            pointCloud.rotateOnSpot(180, PointCloud.Axis.Y, centroid);

            Log.Write("Finished calculating point cloud");

            return(pointCloud);
        }
Exemple #16
0
    public Bitmap GreenScreenImpl(ColorFrame colorFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame)
    {
        int colorWidth  = colorFrame.FrameDescription.Width;
        int colorHeight = colorFrame.FrameDescription.Height;

        int depthWidth  = depthFrame.FrameDescription.Width;
        int depthHeight = depthFrame.FrameDescription.Height;

        int bodyIndexWidth  = bodyIndexFrame.FrameDescription.Width;
        int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;

        if (_displayPixels == null)
        {
            _depthData     = new ushort[depthWidth * depthHeight];
            _bodyData      = new byte[depthWidth * depthHeight];
            _colorData     = new byte[colorWidth * colorHeight * BYTES_PER_PIXEL];
            _displayPixels = new byte[depthWidth * depthHeight * BYTES_PER_PIXEL];
            _colorPoints   = new ColorSpacePoint[depthWidth * depthHeight];
            _bitmap        = new Bitmap(depthWidth, depthHeight, FORMAT);
        }
        if (((depthWidth * depthHeight) == _depthData.Length) && ((colorWidth * colorHeight * BYTES_PER_PIXEL) == _colorData.Length) && ((bodyIndexWidth * bodyIndexHeight) == _bodyData.Length))
        {
            depthFrame.CopyFrameDataToArray(_depthData);

            if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
            {
                colorFrame.CopyRawFrameDataToArray(_colorData);
            }
            else
            {
                colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra);
            }

            bodyIndexFrame.CopyFrameDataToArray(_bodyData);
            _coordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);
            Array.Clear(_displayPixels, 0, _displayPixels.Length);

            for (int y = 0; y < depthHeight; ++y)
            {
                for (int x = 0; x < depthWidth; ++x)
                {
                    int  depthIndex = (y * depthWidth) + x;
                    byte player     = _bodyData[depthIndex];

                    if (player != 0xff)
                    {
                        ColorSpacePoint colorPoint = _colorPoints[depthIndex];

                        int colorX = (int)Math.Floor(colorPoint.X + 0.5);
                        int colorY = (int)Math.Floor(colorPoint.Y + 0.5);
                        if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                        {
                            int colorIndex   = ((colorY * colorWidth) + colorX) * BYTES_PER_PIXEL;
                            int displayIndex = depthIndex * BYTES_PER_PIXEL;

                            _displayPixels[displayIndex + 0] = _colorData[colorIndex];
                            _displayPixels[displayIndex + 1] = _colorData[colorIndex + 1];
                            _displayPixels[displayIndex + 2] = _colorData[colorIndex + 2];
                            _displayPixels[displayIndex + 3] = 0xff;
                        }
                    }
                }
            }

            BitmapData bmapdata = _bitmap.LockBits(new System.Drawing.Rectangle(0, 0, depthWidth, depthHeight), ImageLockMode.ReadWrite, _bitmap.PixelFormat);
            IntPtr     ptr      = bmapdata.Scan0;
            Marshal.Copy(_displayPixels, 0, ptr, _displayPixels.Length);
            _bitmap.UnlockBits(bmapdata);
        }
        return(_bitmap);
    }
        public void GreenScreen(ColorFrame colorFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame) //from https://github.com/Vangos/kinect-2-background-removal
        {
            var colorWidth = colorFrame.FrameDescription.Width;
            var colorHeight = colorFrame.FrameDescription.Height;

            var depthWidth = depthFrame.FrameDescription.Width;
            var depthHeight = depthFrame.FrameDescription.Height;

            var bodyIndexWidth = bodyIndexFrame.FrameDescription.Width;
            var bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;

            if (_displayPixels == null)
            {
                _depthData = new ushort[depthWidth*depthHeight];
                _bodyData = new byte[depthWidth*depthHeight];
                _colorData = new byte[colorWidth*colorHeight*_bytesPerPixel];
                _displayPixels = new byte[depthWidth*depthHeight*_bytesPerPixel];
                _colorPoints = new ColorSpacePoint[depthWidth*depthHeight];
            }

            if (((depthWidth*depthHeight) != _depthData.Length) ||
                ((colorWidth*colorHeight*_bytesPerPixel) != _colorData.Length) ||
                ((bodyIndexWidth*bodyIndexHeight) != _bodyData.Length)) return;

            depthFrame.CopyFrameDataToArray(_depthData);

            if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
            {
                colorFrame.CopyRawFrameDataToArray(_colorData);
            }
            else
            {
                colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra);
            }

            bodyIndexFrame.CopyFrameDataToArray(_bodyData);

            _coordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);

            Array.Clear(_displayPixels, 0, _displayPixels.Length);

            for (var y = 0; y < depthHeight; ++y)
            {
                for (var x = 0; x < depthWidth; ++x)
                {
                    var depthIndex = (y*depthWidth) + x;

                    var player = _bodyData[depthIndex];

                    if (player == 0xff) continue;
                    var colorPoint = _colorPoints[depthIndex];

                    var colorX = (int) Math.Floor(colorPoint.X + 0.5);
                    var colorY = (int) Math.Floor(colorPoint.Y + 0.5);

                    if ((colorX < 0) || (colorX >= colorWidth) || (colorY < 0) || (colorY >= colorHeight)) continue;
                    var colorIndex = ((colorY*colorWidth) + colorX)*_bytesPerPixel;
                    var displayIndex = depthIndex*_bytesPerPixel;

                    _displayPixels[displayIndex + 0] = _colorData[colorIndex];
                    _displayPixels[displayIndex + 1] = _colorData[colorIndex + 1];
                    _displayPixels[displayIndex + 2] = _colorData[colorIndex + 2];
                    _displayPixels[displayIndex + 3] = 0xff;
                }
            }

            Array.Clear(BufBytes, 0, BufBytes.Length); //Zerofill array
            for (var d = 0; d < _displayPixels.Count(); d++)
            {
                if (_displayPixels[d] != 0)
                {
                    BufBytes[d] = MainWindow.InfraPixels[d];
                }
            }
        }
        private void MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            if (m_frameBool)
            {
                //pull multisource frame reference out
                MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame();
                //Return on null
                if (multiSourceFrame is null)
                {
                    return;
                }
                //Calibration and full get sampled number of frames
                if ((m_currentFrameType.Equals(FrameType.Calibration) || m_currentFrameType.Equals(FrameType.Full)))
                {
                    using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame())
                    {
                        //Store one frame
                        m_tempDepthData[m_sampleIndex] = new ushort[m_depthFrameDescription.Width * m_depthFrameDescription.Height];
                        depthFrame.CopyFrameDataToArray(m_tempDepthData[m_sampleIndex]);
                        m_minDepth = depthFrame.DepthMinReliableDistance;
                        m_maxDepth = depthFrame.DepthMaxReliableDistance;
                    }
                    //...until all samples are acquired
                    if (m_sampleIndex == m_samplingRate - 1)
                    {
                        //Then clean the points
                        CleanDepth();
                    }
                    else
                    {
                        //Not done, get next sample
                        m_sampleIndex++;
                        return;
                    }
                }
                //Instantiate images
                m_depthImage    = new WriteableBitmap(m_depthFrameDescription.Width, m_depthFrameDescription.Height, 96, 96, PixelFormats.Bgr32, null);
                m_colorImage    = new WriteableBitmap(m_colorFrameDescription.Width, m_colorFrameDescription.Height, 96, 96, PixelFormats.Bgr32, null);
                m_infraredImage = new WriteableBitmap(m_infraredFrameDescription.Width, m_infraredFrameDescription.Height, 96, 96, PixelFormats.Bgr32, null);
                switch (m_currentFrameType)
                {
                case FrameType.Alignment:
                    using (DepthFrame depthframe = multiSourceFrame.DepthFrameReference.AcquireFrame())
                    {
                        depthframe.CopyFrameDataToArray(m_depthData);
                        m_maxDepth = depthframe.DepthMaxReliableDistance;
                        m_minDepth = depthframe.DepthMinReliableDistance;
                        ProcessDepth();
                        KinectFrameArgs args = new KinectFrameArgs(FrameType.Alignment, m_depthImage);
                        args.pointCloudV3 = m_cameraSpacePoints.Where(x => x.X != float.NegativeInfinity).Select(x => new Vector3(x.X, x.Y, x.Z)).ToArray();
                        FrameArrived.Invoke(HelperContext, args);
                    }
                    break;

                case FrameType.Calibration:
                    ProcessDepth();
                    FrameArrived.Invoke(HelperContext, new KinectFrameArgs(FrameType.Calibration, m_depthImage));
                    break;

                case FrameType.Full:
                    using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame())
                        using (InfraredFrame infraredFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame())
                        {
                            ProcessDepth();
                            ProcessColor(colorFrame);
                            ProcessInfrared(infraredFrame);
                            KinectFrameArgs args = new KinectFrameArgs(FrameType.Full, m_depthImage, m_colorImage, m_infraredImage);
                            args.pointCloudCSP = m_cameraSpacePoints;
                            args.pointCloudV3  = m_cameraSpacePoints.Where(x => x.X != float.NegativeInfinity).Select(x => new Vector3(x.X, x.Y, x.Z)).ToArray();
                            FrameArrived.Invoke(HelperContext, args);
                        }
                    break;
                }
                m_frameBool = false;
            }
            else
            {
                return;
            }
        }
        void Update()
        {
            //局部变量
            int depthWidth  = 0;
            int depthHeight = 0;

            int colorWidth  = 0;
            int colorHeight = 0;

            int bodyIndexWidth  = 0;
            int bodyIndexHeight = 0;

            bool multiSourceFrameProcessed = false;
            bool colorFrameProcessed       = false;
            bool depthFrameProcessed       = false;
            bool bodyIndexFrameProcessed   = false;
            bool infraredFrameProcessed    = false;

            #region _multiSourceReader Begin Work
            if (_multiSourceReader != null)
            {
                MultiSourceFrame multiSourceFrame = _multiSourceReader.AcquireLatestFrame();

                if (multiSourceFrame != null)
                {
                    using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame())
                    {
                        using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame())
                        {
                            using (BodyIndexFrame bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame())
                            {
                                using (InfraredFrame infraredFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame())
                                {
                                    if (depthFrame != null)
                                    {
                                        FrameDescription depthFrameDescription = depthFrame.FrameDescription;
                                        depthWidth  = depthFrameDescription.Width;
                                        depthHeight = depthFrameDescription.Height;

                                        if ((depthWidth * depthHeight) == this.depthFrameData.Length)
                                        {
                                            depthFrame.CopyFrameDataToArray(this.depthFrameData);

                                            depthFrameProcessed = true;
                                        }
                                    }

                                    if (colorFrame != null)
                                    {
                                        FrameDescription colorFrameDescription = colorFrame.FrameDescription;
                                        colorWidth  = colorFrameDescription.Width;
                                        colorHeight = colorFrameDescription.Height;

                                        if ((colorWidth * colorHeight * this.bytesPerPixel) == this.colorFrameData.Length)
                                        {
                                            if (colorFrame.RawColorImageFormat == ColorImageFormat.Rgba)
                                            {
                                                colorFrame.CopyRawFrameDataToArray(this.colorFrameData);
                                            }
                                            else
                                            {
                                                colorFrame.CopyConvertedFrameDataToArray(this.colorFrameData, ColorImageFormat.Rgba);
                                                _kinectColorTexture.LoadRawTextureData(colorFrameData);
                                                _kinectColorTexture.Apply();
                                            }

                                            colorFrameProcessed = true;
                                        }
                                    }

                                    if (bodyIndexFrame != null)
                                    {
                                        FrameDescription bodyIndexFrameDescription = bodyIndexFrame.FrameDescription;
                                        bodyIndexWidth  = bodyIndexFrameDescription.Width;
                                        bodyIndexHeight = bodyIndexFrameDescription.Height;

                                        if ((bodyIndexWidth * bodyIndexHeight) == this.bodyIndexFrameData.Length)
                                        {
                                            bodyIndexFrame.CopyFrameDataToArray(this.bodyIndexFrameData);

                                            bodyIndexFrameProcessed = true;
                                        }
                                    }

                                    if (infraredFrame != null)
                                    {
                                        Debug.Log("AAA");
                                        infraredFrame.CopyFrameDataToArray(_kinectInfraredData);
                                        int index = 0;
                                        foreach (var ir in _kinectInfraredData)
                                        {
                                            byte intensity = (byte)(ir >> 8);
                                            _kinectInfraredRawData[index++] = intensity;
                                            _kinectInfraredRawData[index++] = intensity;
                                            _kinectInfraredRawData[index++] = intensity;
                                            _kinectInfraredRawData[index++] = 255; // Alpha
                                        }

                                        _kinectInfraredTexture.LoadRawTextureData(_kinectInfraredRawData);
                                        _kinectInfraredTexture.Apply();
                                        infraredFrameProcessed = true;
                                    }
                                    multiSourceFrameProcessed = true;
                                }
                            }
                        }
                    }
                }

                // we got all frames
                if (multiSourceFrameProcessed && depthFrameProcessed && colorFrameProcessed && bodyIndexFrameProcessed && infraredFrameProcessed)
                {
                    _coordinateMapper.MapDepthFrameToColorSpace(this.depthFrameData, this.colorPoints);

                    Array.Clear(this.displayPixels, 0, this.displayPixels.Length);

                    // loop over each row and column of the depth
                    for (int y = 0; y < depthHeight; ++y)
                    {
                        for (int x = 0; x < depthWidth; ++x)
                        {
                            // calculate index into depth array
                            int depthIndex = (y * depthWidth) + x;

                            byte player = this.bodyIndexFrameData[depthIndex];

                            // if we're tracking a player for the current pixel, sets its color and alpha to full
                            if (player != 0xff)
                            {
                                // retrieve the depth to color mapping for the current depth pixel
                                ColorSpacePoint colorPoint = this.colorPoints[depthIndex];

                                // make sure the depth pixel maps to a valid point in color space
                                int colorX = (int)Math.Floor(colorPoint.X + 0.5);
                                int colorY = (int)Math.Floor(colorPoint.Y + 0.5);
                                if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                                {
                                    // calculate index into color array
                                    int colorIndex = ((colorY * colorWidth) + colorX) * this.bytesPerPixel;

                                    // set source for copy to the color pixel
                                    int displayIndex = depthIndex * this.bytesPerPixel;

                                    // write out blue byte
                                    this.displayPixels[displayIndex++] = this.colorFrameData[colorIndex++];

                                    // write out green byte
                                    this.displayPixels[displayIndex++] = this.colorFrameData[colorIndex++];

                                    // write out red byte
                                    this.displayPixels[displayIndex++] = this.colorFrameData[colorIndex];

                                    // write out alpha byte
                                    this.displayPixels[displayIndex] = 0xff;
                                }
                            }
                        }
                    }
                }
            }
            #endregion

            DisPlayerTexture();
        }
Exemple #20
0
        /// <summary>
        /// Update to get a new frame.
        /// This code is similar to the code in the Kinect SDK samples.
        /// </summary>
        private static void Update()
        {
            if (!isConnected)
            {
                return;
            }

            dataAvailable.WaitOne();

            MultiSourceFrame multiSourceFrame = null;
            DepthFrame       depthFrame       = null;
            InfraredFrame    irFrame          = null;
            BodyFrame        bodyFrame        = null;

            lock (updateLock)
            {
                try
                {
                    if (frameReference != null)
                    {
                        multiSourceFrame = frameReference.AcquireFrame();

                        if (multiSourceFrame != null)
                        {
                            DepthFrameReference    depthFrameReference = multiSourceFrame.DepthFrameReference;
                            InfraredFrameReference irFrameReference    = multiSourceFrame.InfraredFrameReference;
                            BodyFrameReference     bodyFrameReference  = multiSourceFrame.BodyFrameReference;

                            depthFrame = depthFrameReference.AcquireFrame();
                            irFrame    = irFrameReference.AcquireFrame();

                            if ((depthFrame != null) && (irFrame != null))
                            {
                                FrameDescription depthFrameDescription = depthFrame.FrameDescription;
                                FrameDescription irFrameDescription    = irFrame.FrameDescription;

                                int depthWidth  = depthFrameDescription.Width;
                                int depthHeight = depthFrameDescription.Height;
                                int irWidth     = irFrameDescription.Width;
                                int irHeight    = irFrameDescription.Height;

                                // verify data and write the new registered frame data to the display bitmap
                                if (((depthWidth * depthHeight) == depthFrameData.Length) &&
                                    ((irWidth * irHeight) == irFrameData.Length))
                                {
                                    depthFrame.CopyFrameDataToArray(depthFrameData);
                                    irFrame.CopyFrameDataToArray(irFrameData);
                                }

                                if (bodyFrameReference != null)
                                {
                                    bodyFrame = bodyFrameReference.AcquireFrame();

                                    if (bodyFrame != null)
                                    {
                                        if (bodies == null || bodies.Length < bodyFrame.BodyCount)
                                        {
                                            bodies = new Body[bodyFrame.BodyCount];
                                        }
                                        using (bodyFrame)
                                        {
                                            bodyFrame.GetAndRefreshBodyData(bodies);
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
                catch (Exception)
                {
                    // ignore if the frame is no longer available
                }
                finally
                {
                    if (depthFrame != null)
                    {
                        depthFrame.Dispose();
                        depthFrame = null;
                    }

                    if (irFrame != null)
                    {
                        irFrame.Dispose();
                        irFrame = null;
                    }
                    if (bodyFrame != null)
                    {
                        bodyFrame.Dispose();
                        bodyFrame = null;
                    }
                    if (multiSourceFrame != null)
                    {
                        multiSourceFrame = null;
                    }
                }
            }
        }
        /// <summary>
        /// Converts a depth frame to the corresponding System.Windows.Media.Imaging.BitmapSource and removes the background (green-screen effect).
        /// </summary>
        /// <param name="depthFrame">The specified depth frame.</param>
        /// <param name="colorFrame">The specified color frame.</param>
        /// <param name="bodyIndexFrame">The specified body index frame.</param>
        /// <returns>The corresponding System.Windows.Media.Imaging.BitmapSource representation of image.</returns>
        public WriteableBitmap GreenScreen(ColorFrame colorFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame)
        {
            int colorWidth  = colorFrame.FrameDescription.Width;
            int colorHeight = colorFrame.FrameDescription.Height;

            int depthWidth  = depthFrame.FrameDescription.Width;
            int depthHeight = depthFrame.FrameDescription.Height;

            int bodyIndexWidth  = bodyIndexFrame.FrameDescription.Width;
            int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;

            if (_displayPixels == null)
            {
                _depthData     = new ushort[depthWidth * depthHeight];
                _bodyData      = new byte[depthWidth * depthHeight];
                _colorData     = new byte[colorWidth * colorHeight * Constants.BYTES_PER_PIXEL];
                _displayPixels = new byte[depthWidth * depthHeight * Constants.BYTES_PER_PIXEL];
                _colorPoints   = new ColorSpacePoint[depthWidth * depthHeight];
                _bitmap        = new WriteableBitmap(depthWidth, depthHeight);
                _stream        = _bitmap.PixelBuffer.AsStream();
            }

            if (((depthWidth * depthHeight) == _depthData.Length) && ((colorWidth * colorHeight * Constants.BYTES_PER_PIXEL) == _colorData.Length) && ((bodyIndexWidth * bodyIndexHeight) == _bodyData.Length))
            {
                depthFrame.CopyFrameDataToArray(_depthData);

                if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                {
                    colorFrame.CopyRawFrameDataToArray(_colorData);
                }
                else
                {
                    colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra);
                }

                bodyIndexFrame.CopyFrameDataToArray(_bodyData);

                CoordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);

                Array.Clear(_displayPixels, 0, _displayPixels.Length);

                for (int y = 0; y < depthHeight; ++y)
                {
                    for (int x = 0; x < depthWidth; ++x)
                    {
                        int depthIndex = (y * depthWidth) + x;

                        byte player = _bodyData[depthIndex];

                        if (player != 0xff)
                        {
                            ColorSpacePoint colorPoint = _colorPoints[depthIndex];

                            int colorX = (int)Math.Floor(colorPoint.X + 0.5);
                            int colorY = (int)Math.Floor(colorPoint.Y + 0.5);

                            if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                            {
                                int colorIndex   = ((colorY * colorWidth) + colorX) * Constants.BYTES_PER_PIXEL;
                                int displayIndex = depthIndex * Constants.BYTES_PER_PIXEL;

                                _displayPixels[displayIndex + 0] = _colorData[colorIndex];
                                _displayPixels[displayIndex + 1] = _colorData[colorIndex + 1];
                                _displayPixels[displayIndex + 2] = _colorData[colorIndex + 2];
                                _displayPixels[displayIndex + 3] = 0xff;
                            }
                        }
                    }
                }

                _stream.Seek(0, SeekOrigin.Begin);
                _stream.Write(_displayPixels, 0, _displayPixels.Length);

                _bitmap.Invalidate();
            }

            return(_bitmap);
        }
        private void ProcessFrames(ColorFrame colorFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame, BodyFrame bodyFrame, byte [] psBytes0, byte [] psBytes1)
        {            
            // create multiframe to process
            long ticksCopyData = DateTime.Now.Ticks;

            MultiFrame multiFrame = new MultiFrame();
            multiFrame.FrameNb = Interlocked.Increment(ref frameNb);

            // color
            long ticksCreateColorData = DateTime.Now.Ticks;
            byte[] colorData = new byte[colorByteSize];
            Utils.UpdateTimer("CreateColorData", ticksCreateColorData);

            long ticksCopyColorData = DateTime.Now.Ticks;
            colorFrame.CopyConvertedFrameDataToArray(colorData, ColorImageFormat.Bgra);
            Utils.UpdateTimer("CopyColorData", ticksCopyColorData);

            // depth
            long ticksCreateDepthData = DateTime.Now.Ticks;
            ushort[] depthData = new ushort[depthPixelSize];
            depthFrame.CopyFrameDataToArray(depthData);            
            Utils.UpdateTimer("CreateDepthData", ticksCreateDepthData);

            // body index
            long ticksCreateBodyIndexData = DateTime.Now.Ticks;
            byte[] bodyIndexData = new byte[depthPixelSize];
            bodyIndexFrame.CopyFrameDataToArray(bodyIndexData);
            Utils.UpdateTimer("CreateBodyIndexData", ticksCreateBodyIndexData);

            // bodies
            long ticksCreateBodiesData = DateTime.Now.Ticks;
            Body[] bodies = new Body[bodyFrame.BodyCount];
            bodyFrame.GetAndRefreshBodyData(bodies);
            Utils.UpdateTimer("CreateBodiesData", ticksCreateBodiesData);

            // ps3eye
            byte[] psBytes = null;
            if (psBytes0 != null && psBytes1 != null)
            {
                long ticksCreatePS3EyeData = DateTime.Now.Ticks;
                psBytes = new byte[psByteSize * 2];
                Utils.UpdateTimer("CreatePS3EyeData", ticksCreatePS3EyeData);

                long ticksCopyPS3EyeData = DateTime.Now.Ticks;
                CopyPS3EyeDataMirror(psBytes, psBytes0, psBytes1);
                Utils.UpdateTimer("CopyPS3EyeData", ticksCopyPS3EyeData);
            }

            // multiFrame
            long ticksMultiFrame = DateTime.Now.Ticks;
            multiFrame.DepthData = depthData;
            multiFrame.ColorData = colorData;
            multiFrame.BodyIndexData = bodyIndexData;
            multiFrame.Bodies = bodies;
            multiFrame.PS3EyeData = psBytes;
            multiFrame.HasKinectData = true;
            multiFrame.HasPS3EyeData = psBytes != null ? true : false;
            Utils.UpdateTimer("MultiFrame", ticksMultiFrame);

            long ticksEnqueue = DateTime.Now.Ticks;
            ProcessingManager.Instance.EnqueueMultiFrame(multiFrame);
            Utils.UpdateTimer("Enqueue", ticksEnqueue);

            Utils.UpdateTimer("CopyFramesData", ticksCopyData);

            // display timers & queues
            Context.GUI.DisplayPerformance();
        }
    void UpdateKinect()
    {
        if (_Reader != null)
        {
            MultiSourceFrame frame = _Reader.AcquireLatestFrame();
            if (frame != null)
            {
                //color processing with depth
                ColorFrame colorFrame = frame.ColorFrameReference.AcquireFrame();
                if (colorFrame != null)
                {
                    DepthFrame depthFrame = frame.DepthFrameReference.AcquireFrame();
                    if (depthFrame != null)
                    {
                        colorFrame.CopyConvertedFrameDataToArray(_ColorRawData, ColorImageFormat.Rgba);
                        _ColorTexture.LoadRawTextureData(_ColorRawData);
                        _ColorTexture.Apply();

                        depthFrame.CopyFrameDataToArray(_DepthData);

                        depthFrame.Dispose();
                        depthFrame = null;
                    }

                    colorFrame.Dispose();
                    colorFrame = null;
                }

                //ir processing
                InfraredFrame irFrame = frame.InfraredFrameReference.AcquireFrame();
                if (irFrame != null)
                {
                    irFrame.CopyFrameDataToArray(_IRData);

                    int index = 0;
                    foreach (ushort ir in _IRData)
                    {
                        byte intensity = (byte)(ir >> 8);
                        _IRRawData[index++] = intensity;
                        _IRRawData[index++] = intensity;
                        _IRRawData[index++] = intensity;
                        _IRRawData[index++] = 255; // Alpha
                    }

                    //load raw data
                    _IRTexture.LoadRawTextureData(_IRRawData);
                    _IRTexture.Apply();

                    irFrame.Dispose();
                    irFrame = null;
                }


                //body processing
                BodyFrame bodyFrame = frame.BodyFrameReference.AcquireFrame();
                if (bodyFrame != null)
                {
                    if (_BodyData == null)
                    {
                        _BodyData = new Body[_Sensor.BodyFrameSource.BodyCount];
                    }
                    bodyFrame.GetAndRefreshBodyData(_BodyData);

                    bodyFrame.Dispose();
                    bodyFrame = null;
                }
                frame = null;
            }
        }
    }
        /// <summary>
        /// Initializes a new instance of the <see cref="ReplayDepthFrame"/> class
        /// based on the specified <c>DepthFrame</c>.
        /// </summary>
        /// <param name="frame">The frame.</param>
        internal ReplayDepthFrame(DepthFrame frame)
        {
            this.FrameType = FrameTypes.Depth;
            this.RelativeTime = frame.RelativeTime;

            this.DepthMinReliableDistance = frame.DepthMinReliableDistance;
            this.DepthMaxReliableDistance = frame.DepthMaxReliableDistance;

            this.Width = frame.FrameDescription.Width;
            this.Height = frame.FrameDescription.Height;
            this.BytesPerPixel = frame.FrameDescription.BytesPerPixel;

            _frameData = new ushort[this.Width * this.Height];

            frame.CopyFrameDataToArray(_frameData);
        }
        /// <summary>
        /// Build depth bitmap.
        /// </summary>
        /// <param name="frame"></param>
        /// <param name="bitmap"></param>
        /// <param name="withLock"></param>
        /// <param name="segmentation"></param>
        public void BuildDepthBitmap(DepthFrame depthFrame, SmallFrameBitmap bitmap, bool withLock)
        {
            depthFrame.CopyFrameDataToArray(_depthData);

            _displayFilter.Init(
                DisplayFilterMode.Rainbow,
                Frame.DEPTH_INFRARED_WIDTH,
                Frame.DEPTH_INFRARED_HEIGHT,
                0,
                depthFrame.DepthMaxReliableDistance,
                depthFrame.DepthMinReliableDistance
            );

            Array.Clear(_depthPixels, 0, _depthPixels.Length);
            _displayFilter.Apply(_depthData, _depthPixels, null);

            WriteableBitmap outBitmap = bitmap.Bitmap;
            ValidateBitmap(outBitmap, Frame.DEPTH_INFRARED_WIDTH, Frame.DEPTH_INFRARED_HEIGHT);
            CopyToDisplay(outBitmap, _depthPixels, withLock);
        }
Exemple #26
0
        private BitmapSource ToCombinedData(ColorFrame colorFrame, DepthFrame depthFrame, String formated_date)
        {
            int colorwidth  = colorFrame.FrameDescription.Width;
            int colorheight = colorFrame.FrameDescription.Height;

            var depthWidth  = depthFrame.FrameDescription.Width;
            var depthHeight = depthFrame.FrameDescription.Height;

            PixelFormat format        = PixelFormats.Bgr32;
            int         bytesPerPixel = ((format.BitsPerPixel + 7) / 8);

            byte[] _colorFrameData = new byte[colorwidth * colorheight * bytesPerPixel];
            byte[] pixels          = new byte[depthWidth * depthHeight * bytesPerPixel];


            ushort[]           _depthData         = new ushort[depthWidth * depthHeight];
            ColorSpacePoint[]  _colorSpacePoints  = new ColorSpacePoint[depthWidth * depthHeight];
            CameraSpacePoint[] _cameraSpacePoints = new CameraSpacePoint[depthHeight * depthWidth];

            colorFrame.CopyConvertedFrameDataToArray(_colorFrameData, ColorImageFormat.Bgra);

            depthFrame.CopyFrameDataToArray(_depthData);

            byte[] full_pixelData = new byte[depthHeight * depthWidth];

            Buffer.BlockCopy(_depthData, 0, full_pixelData, 0, depthWidth * depthHeight);
            SaveDepthMatrixTimestamp.AddLast(formated_date);
            SaveDepthMatrix.AddLast(_depthData);

            _sensor.CoordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorSpacePoints);

            _sensor.CoordinateMapper.MapDepthFrameToCameraSpace(_depthData, _cameraSpacePoints);
            SaveXYZTimestamps.AddLast(formated_date);
            SaveCameraSpacePoints.AddLast(_cameraSpacePoints);

            for (int depthY = 0; depthY < depthHeight; depthY++)
            {
                for (int depthX = 0; depthX < depthWidth; depthX++)
                {
                    int depthIndex = depthY * depthWidth + depthX;

                    int colorX     = (int)(_colorSpacePoints[depthIndex].X + 0.5);
                    int colorY     = (int)(_colorSpacePoints[depthIndex].Y + 0.5);
                    int colorIndex = colorY * colorwidth + colorX;


                    if ((0 <= colorX) && (colorX < colorwidth) && (0 <= colorY) && (colorY < colorheight))
                    {
                        pixels[depthIndex * bytesPerPixel + 0] = _colorFrameData[colorIndex * bytesPerPixel + 0];
                        pixels[depthIndex * bytesPerPixel + 1] = _colorFrameData[colorIndex * bytesPerPixel + 1];
                        pixels[depthIndex * bytesPerPixel + 2] = _colorFrameData[colorIndex * bytesPerPixel + 2];
                        pixels[depthIndex * bytesPerPixel + 3] = _colorFrameData[colorIndex * bytesPerPixel + 3];
                    }
                }
            }



            int stride = depthWidth * bytesPerPixel;

            return(BitmapSource.Create(depthWidth, depthHeight, 96, 96, format, null, pixels, stride));
        }
Exemple #27
0
        void depth_FrameArrived(object sender, DepthFrameArrivedEventArgs e)
        {
            using (DepthFrame frame = depth.AcquireLatestFrame())
            {
                if (frame == null)
                {
                    return;
                }

                int minDist     = (int)NearSlider.Value;
                int maxDist     = (int)FarSlider.Value;
                int modelHeight = (int)ModelHeightSlider.Value;
                int modelWidth  = (int)ModelWidthSlider.Value;

                ushort max = depth.DepthFrameSource.DepthMaxReliableDistance;
                ushort min = depth.DepthFrameSource.DepthMinReliableDistance;

                double maxScale = max - maxDist;
                double minScale = minDist - min;
                double scale    = maxDist - minDist;
                int    intScale = maxDist - minDist;

                int width  = frame.FrameDescription.Width;
                int height = frame.FrameDescription.Height;

                if (depthData == null)
                {
                    depthData = new ushort[frame.FrameDescription.LengthInPixels];
                }

                if (gridHeights == null)
                {
                    gridHeights = new ushort[frame.FrameDescription.LengthInPixels];
                }

                frame.CopyFrameDataToArray(depthData);

                if (depthColorImage == null)
                {
                    depthColorImage = new byte[frame.FrameDescription.LengthInPixels * 4];
                }

                int depthColorImagePos = 0;

                average = imageAverage(depthData);

                for (int i = 0; i < average.Length; i++)
                {
                    int depthValue = average[i];
                    // Check for the invalid values

                    if (depthValue > maxDist)
                    {
                        double fraction  = ((double)depthValue - maxDist) / maxScale;
                        byte   depthByte = (byte)(255 - (255.0 * fraction));
                        depthColorImage[depthColorImagePos++] = depthByte; // Blue
                        depthColorImage[depthColorImagePos++] = 0;         // Green
                        depthColorImage[depthColorImagePos++] = 0;         // Red
                        gridHeights[i] = (ushort)0;
                    }
                    else if (depthValue < minDist)
                    {
                        double fraction  = ((double)depthValue - min) / minScale;
                        byte   depthByte = (byte)(255 - (255.0 * fraction));
                        depthColorImage[depthColorImagePos++] = 0;         // Blue
                        depthColorImage[depthColorImagePos++] = 0;         // Green
                        depthColorImage[depthColorImagePos++] = depthByte; // Red
                        gridHeights[i] = (ushort)0;
                    }
                    else
                    {
                        int    absoluteDepth = depthValue - minDist;
                        double fraction      = ((double)absoluteDepth) / scale;
                        byte   depthByte     = (byte)(255 - (255.0 * fraction));
                        depthColorImage[depthColorImagePos++] = depthByte; // Blue
                        depthColorImage[depthColorImagePos++] = depthByte; // Green
                        depthColorImage[depthColorImagePos++] = depthByte; // Red
                        gridHeights[i] = (ushort)(intScale - absoluteDepth);
                    }
                    // transparency
                    depthColorImagePos++;
                }

                if (depthImageBitmap == null)
                {
                    this.depthImageBitmap = new WriteableBitmap(
                        frame.FrameDescription.Width,
                        frame.FrameDescription.Height,
                        96,  // DpiX
                        96,  // DpiY
                        PixelFormats.Bgr32,
                        null);

                    kinectDepthImage.Width  = frame.FrameDescription.Width;
                    kinectDepthImage.Height = frame.FrameDescription.Height;
                    kinectDepthImage.Source = depthImageBitmap;
                }

                checkSelfie();

                if (takeSnapShot)
                {
                    takeSnapShot = false;

                    string filename = FileNameTextBox.Text.Trim();

                    if (filename.Length == 0)
                    {
                        MessageBox.Show("Please enter a filename", "File save failed");
                        return;
                    }

                    string fullFilename = filepath + "\\" + filename + ".stl";

                    try
                    {
                        StoreSTLMesh(gridHeights, (int)kinectDepthImage.Height, (int)kinectDepthImage.Width, modelWidth, modelHeight, fullFilename);
                        // Set the start of the screen flash
                        lastSnapshotTime = DateTime.Now;
                    }
                    catch (Exception ex)
                    {
                        MessageBox.Show("File name: " + fullFilename + " not written.\n" + ex.Message, "File save failed");
                    }
                }

                DateTime now = DateTime.Now;

                // Invert the screen for 200 milliseconds after taking a picture to show it has done something

                if ((now - lastSnapshotTime).TotalMilliseconds < 200)
                {
                    for (int i = 0; i < depthColorImage.Length; i++)
                    {
                        depthColorImage[i] ^= 255;
                    }
                }

                this.depthImageBitmap.WritePixels(
                    new Int32Rect(0, 0, frame.FrameDescription.Width, frame.FrameDescription.Height),
                    depthColorImage,                  // video data
                    frame.FrameDescription.Width * 4, // stride,
                    0                                 // offset into the array - start at 0
                    );
            }
        }
        /// <summary>
        /// Handles the depth/color/body index frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            DepthFrame depthFrame       = null;
            ColorFrame colorFrame       = null;
            var        multiSourceFrame = e.FrameReference.AcquireFrame();

            // If the Frame has expired by the time we process this event, return.
            if (multiSourceFrame == null)
            {
                return;
            }

            // We use a try/finally to ensure that we clean up before we exit the function.
            // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer.
            try
            {
                depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame();
                colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame();
                var mappedByteImage = new byte[1920 * 1080];
                // If any frame has expired by the time we process this event, return.
                // The "finally" statement will Dispose any that are not null.
                if ((depthFrame == null) || (colorFrame == null))
                {
                    return;
                }

                // Process Depth
                _depthBitmap.Lock();
                FrameDescription depthFrameDescription = depthFrame.FrameDescription;

                var depthWidth  = depthFrameDescription.Width;
                var depthHeight = depthFrameDescription.Height;

                var frameData = new ushort[512 * 424];

                // Access the depth frame data directly via LockImageBuffer to avoid making a copy
                using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer())
                {
                    this._coordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr(
                        depthFrameData.UnderlyingBuffer,
                        depthFrameData.Size,
                        this._colorMappedToDepthPoints);
                }

                depthFrame.CopyFrameDataToArray(frameData);
                ushort minDepth = depthFrame.DepthMinReliableDistance;

                ProcessColorImage(colorFrame);
                RenderHeaderLeftCorner();
                ProcessDepthImage(depthWidth, depthHeight, frameData, mappedByteImage, minDepth);
            }
            finally
            {
                if (depthFrame != null)
                {
                    depthFrame.Dispose();
                }
                if (colorFrame != null)
                {
                    colorFrame.Dispose();
                }
            }
        }
Exemple #29
0
    void Update()
    {
        if (reader != null)
        {
            MultiSourceFrame frame = reader.AcquireLatestFrame();
            if (frame != null)
            {
                using (ColorFrame colorFrame = frame.ColorFrameReference.AcquireFrame()) {
                    if (colorFrame != null)
                    {
                        colorFrame.CopyConvertedFrameDataToArray(colorData, ColorImageFormat.Rgba);
                    }
                }
                using (DepthFrame depthFrame = frame.DepthFrameReference.AcquireFrame()) {
                    if (depthFrame != null)
                    {
                        //Debug.Log ("bodyIndexFrame not null");
                        depthFrame.CopyFrameDataToArray(depthData);
                    }
                }
                using (BodyIndexFrame bodyIndexFrame = frame.BodyIndexFrameReference.AcquireFrame()) {
                    if (bodyIndexFrame != null)
                    {
                        //Debug.Log ("bodyIndexFrame not null");
                        bodyIndexFrame.CopyFrameDataToArray(bodyIndexData);
                    }
                }

                frame = null;
            }
        }
        else
        {
            return;
        }

        Utils.copyToMat(colorData, outputMat);

        Utils.copyToMat(colorData, rgbaMat);

        coordinateMapper.MapColorFrameToDepthSpace(depthData, depthSpacePoints);
        int width      = rgbaMat.width();
        int height     = rgbaMat.height();
        int depthWidth = 512;

        byte[] maskOn = new byte[] { 0 };
        for (int y = 0; y < height; y++)
        {
            for (int x = 0; x < width; x++)
            {
                int index = x + y * width;

                int tmp = ((int)depthSpacePoints [index].X + (int)depthSpacePoints [index].Y * depthWidth < 0) ? 0 : (int)depthSpacePoints [index].X + (int)depthSpacePoints [index].Y * depthWidth;

                if (bodyIndexData [tmp] == 255)
                {
                    maskData [index] = 0;
                }
                else
                {
                    maskData [index] = 255;
                }
            }
        }
        Utils.copyToMat(maskData, maskMat);


        if (mode == modeType.original)
        {
            rgbaMat.copyTo(outputMat, maskMat);

            Imgproc.putText(outputMat, "ORIGINAL MODE " + texture.width + "x" + texture.height, new Point(5, texture.height - 5), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 0, 0, 255), 3);
        }
        else if (mode == modeType.sepia)
        {
            Core.transform(rgbaMat, rgbaMat, sepiaKernel);

            rgbaMat.copyTo(outputMat, maskMat);

            Imgproc.putText(outputMat, "SEPIA MODE " + texture.width + "x" + texture.height, new Point(5, texture.height - 5), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 0, 0, 255), 3);
        }
        else if (mode == modeType.pixelize)
        {
            Imgproc.resize(rgbaMat, pixelizeIntermediateMat, pixelizeSize0, 0.1, 0.1, Imgproc.INTER_NEAREST);
            Imgproc.resize(pixelizeIntermediateMat, rgbaMat, rgbaMat.size(), 0.0, 0.0, Imgproc.INTER_NEAREST);

            rgbaMat.copyTo(outputMat, maskMat);

            Imgproc.putText(outputMat, "PIXELIZE MODE" + texture.width + "x" + texture.height, new Point(5, texture.height - 5), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 0, 0, 255), 3);
        }
        else if (mode == modeType.comic)
        {
            Imgproc.cvtColor(rgbaMat, comicGrayMat, Imgproc.COLOR_RGBA2GRAY);

            comicBgMat.copyTo(comicDstMat);

            Imgproc.GaussianBlur(comicGrayMat, comicLineMat, new Size(3, 3), 0);


            Utils.copyFromMat(comicGrayMat, comicGrayPixels);

            for (int i = 0; i < comicGrayPixels.Length; i++)
            {
                comicMaskPixels [i] = 0;

                if (comicGrayPixels [i] < 70)
                {
                    comicGrayPixels [i] = 0;

                    comicMaskPixels [i] = 1;
                }
                else if (70 <= comicGrayPixels [i] && comicGrayPixels [i] < 120)
                {
                    comicGrayPixels [i] = 100;
                }
                else
                {
                    comicGrayPixels [i] = 255;

                    comicMaskPixels [i] = 1;
                }
            }


            Utils.copyToMat(comicGrayPixels, comicGrayMat);

            Utils.copyToMat(comicMaskPixels, comicMaskMat);

            comicGrayMat.copyTo(comicDstMat, comicMaskMat);


            Imgproc.Canny(comicLineMat, comicLineMat, 20, 120);

            comicLineMat.copyTo(comicMaskMat);

            Core.bitwise_not(comicLineMat, comicLineMat);

            comicLineMat.copyTo(comicDstMat, comicMaskMat);


            Imgproc.cvtColor(comicDstMat, rgbaMat, Imgproc.COLOR_GRAY2RGBA);


            rgbaMat.copyTo(outputMat, maskMat);

            Imgproc.putText(outputMat, "COMIC MODE " + texture.width + "x" + texture.height, new Point(5, texture.height - 5), Imgproc.FONT_HERSHEY_PLAIN, 4.0, new Scalar(255, 0, 0, 255), 3);
        }

        Utils.matToTexture(outputMat, texture);
    }
Exemple #30
0
        private void MapColorToDepth(DepthFrame depthFrame, Shared <Image> colorImage)
        {
            const int colorImageWidth  = 1920;
            const int colorImageHeight = 1080;

            if (!this.configuration.OutputColorToCameraMapping && !this.configuration.OutputRGBD)
            {
                return;
            }

            ushort[] depthData = new ushort[depthFrame.FrameDescription.LengthInPixels];
            depthFrame.CopyFrameDataToArray(depthData);

            if (this.configuration.OutputColorToCameraMapping)
            {
                // Writing out a mapping from color space to camera space
                CameraSpacePoint[] colorToCameraMapping = new CameraSpacePoint[colorImageWidth * colorImageHeight];
                this.kinectSensor.CoordinateMapper.MapColorFrameToCameraSpace(depthData, colorToCameraMapping);
                var time = this.pipeline.GetCurrentTimeFromElapsedTicks(depthFrame.RelativeTime.Ticks);
                this.ColorToCameraMapper.Post(colorToCameraMapping, time);
            }

            if (this.configuration.OutputRGBD)
            {
                unsafe
                {
                    DepthSpacePoint[] depthSpacePoints = new DepthSpacePoint[colorImageWidth * colorImageHeight];
                    this.kinectSensor.CoordinateMapper.MapColorFrameToDepthSpace(depthData, depthSpacePoints);
                    using (var rgbd = ImagePool.GetOrCreate(colorImageWidth, colorImageHeight, Imaging.PixelFormat.RGBA_64bpp))
                    {
                        byte *srcRow      = (byte *)colorImage.Resource.ImageData.ToPointer();
                        byte *dstRow      = (byte *)rgbd.Resource.ImageData.ToPointer();
                        int   depthWidth  = depthFrame.FrameDescription.Width;
                        int   depthHeight = depthFrame.FrameDescription.Height;
                        for (int y = 0; y < colorImage.Resource.Height; y++)
                        {
                            byte *  srcCol = srcRow;
                            ushort *dstCol = (ushort *)dstRow;
                            int     offset = y * colorImageWidth;
                            for (int x = 0; x < colorImage.Resource.Width; x++)
                            {
                                dstCol[0] = (ushort)(srcCol[2] << 8);
                                dstCol[1] = (ushort)(srcCol[1] << 8);
                                dstCol[2] = (ushort)(srcCol[0] << 8);
                                DepthSpacePoint pt = depthSpacePoints[offset];
                                if (pt.X >= 0 && pt.X < depthWidth && pt.Y >= 0 && pt.Y < depthHeight)
                                {
                                    dstCol[3] = depthData[((int)pt.Y * depthWidth) + (int)pt.X];
                                }
                                else
                                {
                                    dstCol[3] = 0;
                                }

                                dstCol += 4;
                                srcCol += colorImage.Resource.BitsPerPixel / 8;
                                offset++;
                            }

                            srcRow += colorImage.Resource.Stride;
                            dstRow += rgbd.Resource.Stride;
                        }

                        var time = this.pipeline.GetCurrentTimeFromElapsedTicks(depthFrame.RelativeTime.Ticks);
                        this.RGBDImage.Post(rgbd, time);
                    }
                }
            }
        }
Exemple #31
0
        private void MultiFrameSourceReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs ex)
        {
            __AvatarData__ = new List <byte>();

            int depthWidth  = 0;
            int depthHeight = 0;

            int colorWidth  = 0;
            int colorHeight = 0;

            int bodyIndexWidth  = 0;
            int bodyIndexHeight = 0;

            bool multiSourceFrameProcessed = false;
            bool colorFrameProcessed       = false;
            bool depthFrameProcessed       = false;
            bool bodyIndexFrameProcessed   = false;
            bool bodyFrameProcessed        = false;

            MultiSourceFrame multiSourceFrame = ex.FrameReference.AcquireFrame();

            if (multiSourceFrame != null)
            {
                using (_frameCounter.Increment())
                {
                    using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame())
                    {
                        using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame())
                        {
                            using (BodyIndexFrame bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame())
                            {
                                if (depthFrame != null)
                                {
                                    FrameDescription depthFrameDescription = depthFrame.FrameDescription;
                                    depthWidth  = depthFrameDescription.Width;
                                    depthHeight = depthFrameDescription.Height;

                                    if ((depthWidth * depthHeight) == this.depthFrameData.Length)
                                    {
                                        depthFrame.CopyFrameDataToArray(this.depthFrameData);

                                        depthFrameProcessed = true;
                                    }

                                    if (colorFrame != null)
                                    {
                                        FrameDescription colorFrameDescription = colorFrame.FrameDescription;
                                        colorWidth  = colorFrameDescription.Width;
                                        colorHeight = colorFrameDescription.Height;

                                        if ((colorWidth * colorHeight * this.bytesPerPixel) == this.colorFrameData.Length)
                                        {
                                            if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                                            {
                                                colorFrame.CopyRawFrameDataToArray(this.colorFrameData);
                                            }
                                            else
                                            {
                                                colorFrame.CopyConvertedFrameDataToArray(this.colorFrameData, ColorImageFormat.Bgra);
                                            }

                                            colorFrameProcessed = true;
                                        }
                                    }

                                    if (bodyIndexFrame != null)
                                    {
                                        FrameDescription bodyIndexFrameDescription = bodyIndexFrame.FrameDescription;
                                        bodyIndexWidth  = bodyIndexFrameDescription.Width;
                                        bodyIndexHeight = bodyIndexFrameDescription.Height;

                                        if ((bodyIndexWidth * bodyIndexHeight) == this.bodyIndexFrameData.Length)
                                        {
                                            bodyIndexFrame.CopyFrameDataToArray(this.bodyIndexFrameData);

                                            bodyIndexFrameProcessed = true;
                                        }
                                    }
                                    multiSourceFrameProcessed = true;
                                }
                            }

                            using (BodyFrame bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame())
                            {
                                if (bodyFrame != null)
                                {
                                    if (this.bodies == null)
                                    {
                                        this.bodies = new Body[bodyFrame.BodyCount];
                                    }
                                    bodyFrame.GetAndRefreshBodyData(this.bodies);
                                    bodyFrameProcessed = true;
                                }
                            }
                        }
                    }
                }

                if (multiSourceFrameProcessed && depthFrameProcessed && colorFrameProcessed && bodyIndexFrameProcessed && bodyFrameProcessed)
                {
                    _connectionLine.Stroke = _tcp.Connected ? Brushes.Green : Brushes.Red;

                    this.displayFrame = new byte[depthWidth * depthHeight * this.bytesPerPixel];

                    this.coordinateMapper.MapDepthFrameToColorSpace(this.depthFrameData, this.colorPoints);
                    this.coordinateMapper.MapDepthFrameToCameraSpace(this.depthFrameData, this.cameraPoints);

                    Array.Clear(displayFrame, 0, displayFrame.Length);

                    int step = 1;

                    int depthWidthIndex  = 0;
                    int depthHeightIndex = 0;
                    for (int depthIndex = 0; depthIndex < depthFrameData.Length; depthIndex += step)
                    {
                        byte player = this.bodyIndexFrameData[depthIndex];

                        bool?c   = OnlyPlayersMenuItem.IsChecked;
                        bool val = c != null ? (bool)c : false;
                        if (!val || player != 0xff)
                        //if (!val || player == 0)
                        {
                            CameraSpacePoint p          = this.cameraPoints[depthIndex];
                            ColorSpacePoint  colorPoint = this.colorPoints[depthIndex];

                            // make sure the depth pixel maps to a valid point in color space
                            int colorX = (int)Math.Floor(colorPoint.X + 0.5);
                            int colorY = (int)Math.Floor(colorPoint.Y + 0.5);

                            // set source for copy to the color pixel
                            int displayIndex = depthIndex * this.bytesPerPixel;

                            if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight) && p.Z > 0)
                            {
                                // calculate index into color array
                                int colorIndex = ((colorY * colorWidth) + colorX) * this.bytesPerPixel;

                                this.displayFrame[displayIndex]     = this.colorFrameData[colorIndex];     // B
                                this.displayFrame[displayIndex + 1] = this.colorFrameData[colorIndex + 1]; // G
                                this.displayFrame[displayIndex + 2] = this.colorFrameData[colorIndex + 2]; // R
                                this.displayFrame[displayIndex + 3] = this.colorFrameData[colorIndex + 3]; // A


                                if (player != 0xff && (!(Double.IsInfinity(p.X)) && !(Double.IsInfinity(p.Y)) && !(Double.IsInfinity(p.Z))))
                                {
                                    #region Void Compression Algorithm

                                    //Int16 depth = Convert.ToInt16(Math.Round((Decimal)(p.Z * 1000.0f), 0));
                                    ushort depth = depthFrameData[depthIndex];
                                    __ZDepth0__ = (byte)(depth % 256);
                                    __ZDepth1__ = (byte)(depth / 256);
                                    __Blue__    = this.colorFrameData[colorIndex];
                                    __Green__   = this.colorFrameData[colorIndex + 1];
                                    __Red__     = this.colorFrameData[colorIndex + 2];

                                    //Int16 d1 = (Int16)__ZDepth0__;
                                    //Int16 d2 = (Int16)(__ZDepth1__ * 256);
                                    //int d3 = d1 + d2;

                                    //if (shouldwrite)
                                    {
                                        __index0__ = (byte)(depthWidthIndex % 256);
                                        __index1__ = (byte)(depthWidthIndex / 256);
                                        __AvatarData__.Add(__index0__);
                                        __AvatarData__.Add(__index1__);
                                    }
                                    __AvatarData__.Add(__Blue__);
                                    __AvatarData__.Add(__Green__);
                                    __AvatarData__.Add(__Red__);
                                    __AvatarData__.Add(__ZDepth0__);
                                    __AvatarData__.Add(__ZDepth1__);
                                    #endregion

                                    //shouldwrite = false;
                                }
                                else
                                {
                                    //shouldwrite = true;
                                }
                            }
                            else
                            {
                                this.displayFrame[displayIndex]     = 0;
                                this.displayFrame[displayIndex + 1] = 0;
                                this.displayFrame[displayIndex + 2] = 0;
                                this.displayFrame[displayIndex + 3] = 100;
                            }
                        }


                        //
                        if (++depthWidthIndex == depthWidth)
                        {
                            depthWidthIndex = 0;
                            ++depthHeightIndex;

                            int  newline = 5000;
                            byte nl0     = (byte)(newline % 256);
                            byte nl1     = (byte)(newline / 256);
                            __AvatarData__.Add(nl0);
                            __AvatarData__.Add(nl1);
                        }
                    }//


                    int numOfElements = __AvatarData__.Count;
                    if (numOfElements > 0)
                    {
                        if (_tcp.Connected)
                        {
                            new Thread(() =>
                            {
                                Thread.CurrentThread.IsBackground = true;

                                try
                                {
                                    byte[] s = BitConverter.GetBytes(numOfElements); // [4]
                                    for (int i = s.Length - 1; i >= 0; i--)
                                    {
                                        __AvatarData__.Insert(0, s[i]);
                                    }
                                    _tcp.write(__AvatarData__.ToArray());
                                }
                                catch (Exception e)
                                {
                                    Console.WriteLine(e.Message);
                                    Console.WriteLine(e.StackTrace);
                                }
                            }).Start();
                        }
                    }

                    colorBitmap.WritePixels(
                        new Int32Rect(0, 0, depthWidth, depthHeight),
                        this.displayFrame,
                        depthWidth * bytesPerPixel,
                        0);
                }
            }
        }
        public void ProcessBackgroundNew(DepthFrame depthFrame, ColorFrame colorFrame, BodyIndexFrame bodyIndexFrame)
        {
            var depthWidth  = _displayWidth;
            var depthHeight = _displayHeight;

            var bodyIndexWidth  = bodyIndexFrame.FrameDescription.Width;
            var bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;

            if (((depthWidth * depthHeight) == _depthData.Length) && ((_colorWidth * _colorHeight * BYTES_PER_PIXEL) == _colorData.Length) && ((bodyIndexWidth * bodyIndexHeight) == _bodyData.Length))
            {
                depthFrame.CopyFrameDataToArray(_depthData);

                if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                {
                    colorFrame.CopyRawFrameDataToArray(_colorData);
                }
                else
                {
                    colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra);
                }

                bodyIndexFrame.CopyFrameDataToArray(_bodyData);

                _coordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);

                Array.Clear(_displayPixels, 0, _displayPixels.Length);

                for (var y = 0; y < depthHeight; ++y)
                {
                    for (var x = 0; x < depthWidth; ++x)
                    {
                        var depthIndex = (y * depthWidth) + x;

                        var player = _bodyData[depthIndex];

                        if (player == 0xff)
                        {
                            continue;
                        }
                        var colorPoint = _colorPoints[depthIndex];

                        var colorX = (int)Math.Floor(colorPoint.X + 0.5);
                        var colorY = (int)Math.Floor(colorPoint.Y + 0.5);

                        if ((colorX >= 0) && (colorX < _colorWidth) && (colorY >= 0) && (colorY < _colorHeight))
                        {
                            var colorIndex   = ((colorY * _colorWidth) + colorX) * BYTES_PER_PIXEL;
                            var displayIndex = depthIndex * BYTES_PER_PIXEL;

                            _displayPixels[displayIndex + 0] = _colorData[colorIndex];
                            _displayPixels[displayIndex + 1] = _colorData[colorIndex + 1];
                            _displayPixels[displayIndex + 2] = _colorData[colorIndex + 2];
                            _displayPixels[displayIndex + 3] = 0xff;
                        }
                    }
                }

                _bitmap.Lock();

                Marshal.Copy(_displayPixels, 0, _bitmap.BackBuffer, _displayPixels.Length);
                _bitmap.AddDirtyRect(new Int32Rect(0, 0, depthWidth, depthHeight));

                _bitmap.Unlock();
            }
        }
Exemple #33
0
        /// <summary>
        /// Handle the processing when Kinect frame arrived
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void MultiSourceFrameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            if (isDownApplication)
            {
                Application.Current.Shutdown();
            }

            MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame();

            if (multiSourceFrame != null)
            {
                using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame())
                {
                    using (BodyFrame bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame())
                    {
                        using (DrawingContext drawingContext = drawingGroup.Open())
                        {
                            if (depthFrame != null && bodyFrame != null)
                            {
                                // Refresh the foreground of 2D top view for positioning
                                Plot.RefreshForegroundCanvas(Canvas_Position_Foreground, activities);

                                // Find templates
                                if (isFindingTemplate)
                                {
                                    ushort[] depthFrameData = new ushort[depthFrame.FrameDescription.Height * depthFrame.FrameDescription.Width];
                                    depthFrame.CopyFrameDataToArray(depthFrameData);

                                    cameraSpacePoints = new CameraSpacePoint[depthFrame.FrameDescription.Height * depthFrame.FrameDescription.Width];
                                    kinectSensor.CoordinateMapper.MapDepthFrameToCameraSpace(depthFrameData, cameraSpacePoints);

                                    TemplateDetector.heightLow          = -2.4f;
                                    TemplateDetector.heightHigh         = -1.9f;
                                    TemplateDetector.canvas_width       = Canvas_Position_Background.Width;
                                    TemplateDetector.canvas_height      = Canvas_Position_Background.Height;
                                    TemplateDetector.canvas_environment = Canvas_Position_Environment;

                                    // AsyncTask
                                    BackgroundWorker worker = new BackgroundWorker();
                                    worker.WorkerReportsProgress = true;
                                    worker.DoWork             += TemplateDetector.DoInBackgrond;
                                    worker.ProgressChanged    += TemplateDetector.OnProgress;
                                    worker.RunWorkerCompleted += TemplateDetector.OnPostExecute;
                                    worker.RunWorkerAsync();

                                    isFindingTemplate = false;
                                }

                                // Display depth frame
                                // Uncomment to enable the display for height segmentation result
                                //if (!isHeightSegmented)
                                if (true)
                                {
                                    drawingContext.DrawImage(Transformation.ToBitmap(depthFrame, depthFramePixels, true),
                                                             new Rect(0.0, 0.0, kinectSensor.DepthFrameSource.FrameDescription.Width, kinectSensor.DepthFrameSource.FrameDescription.Height));
                                }
                                else
                                {
                                    drawingContext.DrawImage(Transformation.ToBitmap(depthFrame, segmentedDepthFramePixels, false),
                                                             new Rect(0.0, 0.0, kinectSensor.DepthFrameSource.FrameDescription.Width, kinectSensor.DepthFrameSource.FrameDescription.Height));
                                }

                                // Display top view in height
                                if (TemplateDetector.isDrawDone)
                                {
                                    using (DrawingContext drawingContext_heightview = drawingGroup_topView.Open())
                                    {
                                        drawingContext_heightview.DrawImage(Transformation.ToBitmap(TemplateDetector.area_width, TemplateDetector.area_height, TemplateDetector.pixels),
                                                                            new Rect(0.0, 0.0, TemplateDetector.area_width, TemplateDetector.area_height));

                                        foreach (Template t in TemplateDetector.templates)
                                        {
                                            drawingContext_heightview.DrawRectangle(null, new Pen(t.Brush, 2),
                                                                                    new Rect(new Point(t.TopLeft.X, t.TopLeft.Y), new Size(t.Width, t.Height)));
                                        }
                                    }

                                    TemplateDetector.isDrawDone = false;
                                }

                                // Load raw body joints info from Kinect
                                bodyFrame.GetAndRefreshBodyData(bodies);

                                // Update personal infomation from raw joints
                                for (int i = 0; i < kinectSensor.BodyFrameSource.BodyCount; ++i)
                                {
                                    if (persons[i] == null)
                                    {
                                        persons[i] = new Person();
                                    }

                                    ulong trackingId = bodies[i].TrackingId;
                                    if (trackingId != gestureDetectorList[i].TrackingId)
                                    {
                                        gestureDetectorList[i].TrackingId = trackingId;
                                        gestureDetectorList[i].IsPaused   = trackingId == 0;
                                    }

                                    if (bodies[i].IsTracked)
                                    {
                                        // Update tracking status
                                        persons[i].IsTracked = true;

                                        persons[i].ID = bodies[i].TrackingId;

                                        // Assign color to person in the top view for positioning
                                        persons[i].Color = Plot.BodyColors[i];

                                        // Get person's 3D postion in camera's coordinate system
                                        CameraSpacePoint headPositionCamera = bodies[i].Joints[JointType.Head].Position; // Meter

                                        // Convert to 3D position in horizontal coordinate system
                                        CameraSpacePoint headPositionGournd = Transformation.RotateBackFromTilt(TILT_ANGLE, true, headPositionCamera);

                                        // Convert to 2D top view position on canvas
                                        Transformation.ConvertGroundSpaceToPlane(headPositionGournd, persons[i]);

                                        // Determine body orientation using shoulder joints
                                        CameraSpacePoint leftShoulderPositionGround  = Transformation.RotateBackFromTilt(TILT_ANGLE, true, bodies[i].Joints[JointType.ShoulderLeft].Position);
                                        CameraSpacePoint rightShoulderPositionGround = Transformation.RotateBackFromTilt(TILT_ANGLE, true, bodies[i].Joints[JointType.ShoulderRight].Position);
                                        BodyOrientation.DecideOrientation(leftShoulderPositionGround, rightShoulderPositionGround, persons[i],
                                                                          Transformation.CountZeroInRec(depthFramePixels, kinectSensor.CoordinateMapper.MapCameraPointToDepthSpace(headPositionCamera),
                                                                                                        16, kinectSensor.DepthFrameSource.FrameDescription.Width), Canvas_Position_Foreground);
                                    }
                                    else
                                    {
                                        persons[i].IsTracked = false;
                                    }
                                }

                                DrawPeopleOnDepth(drawingContext);
                                DrawPeopleOnCanvas();
                                DetermineSystemStatus();
                                DrawSystemStatus();

                                // Recognize and record activities when recording requirements are satisfied
                                if (isRecording)
                                {
                                    CheckActivity();
                                    DrawActivityOnCanvas();
                                    Record();
                                }
                            }
                        }
                    }
                }
            }
        }
        void Update()
        {
            // If the sensor reference does not exist or it is not ready, ensure that the frame
            // arrived flags are reset and then cancel the update
            if (!DoesSensorExist() || !IsSensorReady())
            {
                // If the frame handler is not already subscribed to the frame arrived event, do
                // this and then flip the flag
                if (!subscribedFrameHandler)
                {
                    multiSourceReader.MultiSourceFrameArrived += FirstFrameArrivedHandler;
                    subscribedFrameHandler = true;
                }

                if (firstFrameArrived)
                {
                    firstFrameArrived = false;
                }

                return;
            }

            // Otherwise, if the sensor is available and ready but the first frame is yet to arrive,
            // simply return until it has arrived
            if (!firstFrameArrived)
            {
                return;
            }

            // If all above checks have passed, the current frame of incoming data can be processed and
            // made available to other classes
            MultiSourceFrame newMultiFrame = multiSourceReader.AcquireLatestFrame();

            if (newMultiFrame != null)
            {
                ColorFrame colourFrame = newMultiFrame.ColorFrameReference.AcquireFrame();
                if (colourFrame != null)
                {
                    DepthFrame depthFrame = newMultiFrame.DepthFrameReference.AcquireFrame();
                    if (depthFrame != null)
                    {
                        // If all frame references are valid, first generate the 2D texture from the current
                        // colour image by copying the frame pixel data into the byte array. Then, load
                        // this into the texture and apply the changes to render them
                        colourFrame.CopyConvertedFrameDataToArray(colourData, ColorImageFormat.Rgba);
                        ColourTexture.LoadRawTextureData(colourData);
                        ColourTexture.Apply();

                        // Copy the current depth frame data into the ushort array
                        depthFrame.CopyFrameDataToArray(DepthData);

                        // Finally, once all data has been copied, dispose of all frames and dereference
                        // them to free resources for the next incoming frame
                        depthFrame.Dispose();
                        depthFrame = null;
                    }

                    colourFrame.Dispose();
                    colourFrame = null;
                }

                newMultiFrame = null;
            }
        }
        private void ProcessFrameData(MultiSourceFrameArrivedEventArgs e)
        {
            MultiSourceFrameReference frameReference   = e.FrameReference;
            MultiSourceFrame          multiSourceFrame = null;
            DepthFrame depthFrame = null;
            ColorFrame colorFrame = null;

            try
            {
                multiSourceFrame = frameReference.AcquireFrame();
                if (multiSourceFrame != null)
                {
                    lock (rawDataLock)
                    {
                        ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference;
                        DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference;
                        colorFrame = colorFrameReference.AcquireFrame();
                        depthFrame = depthFrameReference.AcquireFrame();

                        if ((depthFrame != null) && (colorFrame != null))
                        {
                            FrameDescription colorFrameDescription = colorFrame.FrameDescription;
                            int colorWidth  = colorFrameDescription.Width;
                            int colorHeight = colorFrameDescription.Height;
                            if ((colorWidth * colorHeight * sizeof(int)) == colorImagePixels.Length)
                            {
                                colorFrame.CopyConvertedFrameDataToArray(colorImagePixels, ColorImageFormat.Bgra);
                            }

                            FrameDescription depthFrameDescription = depthFrame.FrameDescription;
                            int depthWidth  = depthFrameDescription.Width;
                            int depthHeight = depthFrameDescription.Height;

                            if ((depthWidth * depthHeight) == depthImagePixels.Length)
                            {
                                depthFrame.CopyFrameDataToArray(depthImagePixels);
                            }
                        }
                    }
                }
                CaptureCurrent = false; //We got both color and depth, everything went ok, stop trying to capture this image
            }
            catch (Exception)
            {
                // ignore if the frame is no longer available
            }
            finally
            {
                // MultiSourceFrame, DepthFrame, ColorFrame, BodyIndexFrame are IDispoable
                if (depthFrame != null)
                {
                    depthFrame.Dispose();
                    depthFrame = null;
                }
                if (colorFrame != null)
                {
                    colorFrame.Dispose();
                    colorFrame = null;
                }
                if (multiSourceFrame != null)
                {
                    multiSourceFrame = null;
                }
            }
        }
Exemple #36
0
        private void Reader_MultiSourceFrameArrived(MultiSourceFrameReader sender, MultiSourceFrameArrivedEventArgs e)
        {
            MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame();

            // If the Frame has expired by the time we process this event, return.
            if (multiSourceFrame == null)
            {
                return;
            }
            DepthFrame     depthFrame           = null;
            ColorFrame     colorFrame           = null;
            InfraredFrame  infraredFrame        = null;
            BodyFrame      bodyFrame            = null;
            BodyIndexFrame bodyIndexFrame       = null;
            IBuffer        depthFrameDataBuffer = null;
            IBuffer        bodyIndexFrameData   = null;
            // Com interface for unsafe byte manipulation
            IBufferByteAccess bufferByteAccess = null;

            switch (CurrentDisplayFrameType)
            {
            case DisplayFrameType.Infrared:
                using (infraredFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame())
                {
                    ShowInfraredFrame(infraredFrame);
                }
                break;

            case DisplayFrameType.Color:
                using (colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame())
                {
                    ShowColorFrame(colorFrame);
                }
                break;

            case DisplayFrameType.Depth:
                using (depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame())
                {
                    ShowDepthFrame(depthFrame);
                }
                break;

            case DisplayFrameType.BodyMask:
                // Put in a try catch to utilise finally() and clean up frames
                try
                {
                    depthFrame     = multiSourceFrame.DepthFrameReference.AcquireFrame();
                    bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame();
                    colorFrame     = multiSourceFrame.ColorFrameReference.AcquireFrame();
                    if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null))
                    {
                        return;
                    }

                    // Access the depth frame data directly via LockImageBuffer to avoid making a copy
                    depthFrameDataBuffer = depthFrame.LockImageBuffer();
                    this.coordinateMapper.MapColorFrameToDepthSpaceUsingIBuffer(depthFrameDataBuffer, this.colorMappedToDepthPoints);
                    // Process Color
                    colorFrame.CopyConvertedFrameDataToBuffer(this.bitmap.PixelBuffer, ColorImageFormat.Bgra);
                    // Access the body index frame data directly via LockImageBuffer to avoid making a copy
                    bodyIndexFrameData = bodyIndexFrame.LockImageBuffer();
                    ShowMappedBodyFrame(depthFrame.FrameDescription.Width, depthFrame.FrameDescription.Height, bodyIndexFrameData, bufferByteAccess);
                }
                finally
                {
                    if (depthFrame != null)
                    {
                        depthFrame.Dispose();
                    }
                    if (colorFrame != null)
                    {
                        colorFrame.Dispose();
                    }
                    if (bodyIndexFrame != null)
                    {
                        bodyIndexFrame.Dispose();
                    }

                    if (depthFrameDataBuffer != null)
                    {
                        // We must force a release of the IBuffer in order to ensure that we have dropped all references to it.
                        System.Runtime.InteropServices.Marshal.ReleaseComObject(depthFrameDataBuffer);
                    }
                    if (bodyIndexFrameData != null)
                    {
                        System.Runtime.InteropServices.Marshal.ReleaseComObject(bodyIndexFrameData);
                    }
                    if (bufferByteAccess != null)
                    {
                        System.Runtime.InteropServices.Marshal.ReleaseComObject(bufferByteAccess);
                    }
                }
                break;

            case DisplayFrameType.BodyJoints:
                using (bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame())
                {
                    ShowBodyJoints(bodyFrame);
                }
                break;

            case DisplayFrameType.BackgroundRemoved:
                // Put in a try catch to utilise finally() and clean up frames
                try
                {
                    depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame();
                    colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame();
                    if ((depthFrame == null) || (colorFrame == null))
                    {
                        return;
                    }
                    depthFrame.CopyFrameDataToArray(depthFrameData);
                    this.coordinateMapper.MapColorFrameToDepthSpace(depthFrameData, this.colorMappedToDepthPoints);
                    // Process Color.
                    colorFrame.CopyConvertedFrameDataToBuffer(this.bitmap.PixelBuffer, ColorImageFormat.Bgra);

                    ShowMappedColorBackgroundRemoved(colorMappedToDepthPoints, depthFrameData, depthFrame.FrameDescription);
                }
                finally
                {
                    if (depthFrame != null)
                    {
                        depthFrame.Dispose();
                    }
                    if (colorFrame != null)
                    {
                        colorFrame.Dispose();
                    }
                }
                break;

            case DisplayFrameType.FaceOnColor:
                using (colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame())
                {
                    ShowColorFrame(colorFrame);
                    this.faceManager.DrawLatestFaceResults(this.FacePointsCanvas, this.faceFrameFeatures);
                }
                break;

            case DisplayFrameType.FaceOnInfrared:
                using (infraredFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame())
                {
                    ShowInfraredFrame(infraredFrame);
                    DrawFaceOnInfrared();
                }
                break;

            case DisplayFrameType.FaceGame:
                FaceGameLookUpdate();
                break;

            default:
                break;
            }
        }
Exemple #37
0
        /// <summary>
        /// Process the depth frames and update UI
        /// </summary>
        private void OnDepthFrameArrived(object sender, DepthFrameArrivedEventArgs e)
        {
            DepthFrameReference refer = e.FrameReference;

            if (refer == null)
            {
                return;
            }

            DepthFrame frame = refer.AcquireFrame();

            if (frame == null)
            {
                return;
            }

            using (frame)
            {
                FrameDescription frameDesc = frame.FrameDescription;

                if (((frameDesc.Width * frameDesc.Height) == _depthData.Length) && (frameDesc.Width == _depthBitmap.PixelWidth) && (frameDesc.Height == _depthBitmap.PixelHeight))
                {
                    // Copy depth frames
                    frame.CopyFrameDataToArray(_depthData);

                    // Get min & max depth
                    ushort minDepth = frame.DepthMinReliableDistance;
                    ushort maxDepth = frame.DepthMaxReliableDistance;

                    // Adjust visualisation
                    int colorPixelIndex = 0;
                    for (int i = 0; i < _depthData.Length; ++i)
                    {
                        // Get depth value
                        ushort depth = _depthData[i];

                        if (depth == 0)
                        {
                            _depthPixels[colorPixelIndex++] = 41;
                            _depthPixels[colorPixelIndex++] = 239;
                            _depthPixels[colorPixelIndex++] = 242;
                        }
                        else if (depth < minDepth || depth > maxDepth)
                        {
                            _depthPixels[colorPixelIndex++] = 25;
                            _depthPixels[colorPixelIndex++] = 0;
                            _depthPixels[colorPixelIndex++] = 255;
                        }
                        else
                        {
                            double gray = (Math.Floor((double)depth / 250) * 12.75);

                            _depthPixels[colorPixelIndex++] = (byte)gray;
                            _depthPixels[colorPixelIndex++] = (byte)gray;
                            _depthPixels[colorPixelIndex++] = (byte)gray;
                        }

                        // Increment
                        ++colorPixelIndex;
                    }

                    // Copy output to bitmap
                    _depthBitmap.WritePixels(
                        new Int32Rect(0, 0, frameDesc.Width, frameDesc.Height),
                        _depthPixels,
                        frameDesc.Width * _bytePerPixel,
                        0);
                }
            }
        }
        private void ProcessFrame(DepthFrame frame)
        {
            int width = frame.FrameDescription.Width;
            int height = frame.FrameDescription.Height;

            //ushort minDepth = 0;
            ushort minDepth = frame.DepthMinReliableDistance;

            ushort maxDepth = ushort.MaxValue;
            //ushort maxDepth = frame.DepthMaxReliableDistance;

            ushort[] depthData = new ushort[width * height];

            int stride = (width * format.BitsPerPixel + 7) / 8;
            byte[] pixelData = new byte[stride * height];

            frame.CopyFrameDataToArray(depthData);
            
            int colorIndex = 0;
            for (int depthIndex = 0; depthIndex < depthData.Length; ++depthIndex)
            {
                ushort depth = depthData[depthIndex];
                byte intensity = (byte)(depth >= minDepth && depth <= maxDepth ? depth : 0);

                pixelData[colorIndex++] = intensity; // Blue
                pixelData[colorIndex++] = intensity; // Green
                pixelData[colorIndex++] = intensity; // Red

                ++colorIndex;
            }

            if (depthBitmap == null)
            {
                this.depthBitmap = new WriteableBitmap(displayWidth, displayHeight, 96.0, 96.0,format, null);
            }

            Random r = new Random();
            r.NextBytes(pixelData);

            depthBitmap.Lock();

            Marshal.Copy(pixelData, 0, depthBitmap.BackBuffer, pixelData.Length);
            depthBitmap.AddDirtyRect(new Int32Rect(0, 0, displayWidth, displayHeight));

            depthBitmap.Unlock();

            this.depthData = depthData;
        }
Exemple #39
0
        // multi frame reader event handler
        private void ReaderMultiFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            // Get a reference to the multi-frame
            var reference = e.FrameReference.AcquireFrame();

            // depth
            using (DepthFrame depthFrame = reference.DepthFrameReference.AcquireFrame())
            {
                string label_coords_blob = "";//label_blobsの文字列
                if (depthFrame != null)
                {
                    FrameDescription depthFrameDescription = depthFrame.FrameDescription;
                    int width  = depthFrameDescription.Width;
                    int height = depthFrameDescription.Height;

                    var depthData = new ushort[width * height];
                    depthFrame.CopyFrameDataToArray(depthData);
                    this.depthBitmap.WritePixels(depthRect, depthData, width * (int)depthFrameDescription.BytesPerPixel, 0);

                    // depthData -> camera space data
                    CameraSpacePoint[] cs_points = new CameraSpacePoint[width * height];
                    _sensor.CoordinateMapper.MapDepthFrameToCameraSpace(depthData, cs_points);

                    // calculate camera space coordinates of each marker(blob) // plan: 以下のループは別functionにする
                    float[,] coord_blobs_center = new float[list_arr_index.Count(), 3]; // blob中心座標の2次元配列
                    //label_sample.Content = list_arr_index.Count().ToString();
                    int i_blob = 0;                                                     // blobのindex
                    foreach (int[] arr_index in list_arr_index)
                    {
                        // 各blobのcamera space pointからx, y, z座標を取り出して配列 -> 平均
                        float[] coord_blob_center = new float[3];//blob (反射マーカー)の中心座標を入れる

                        // select camera space points corresponding each blob
                        CameraSpacePoint[] cs_points_blob = new CameraSpacePoint[arr_index.Length];// camera space配列宣言
                        // x,y,z座標のlist
                        List <float> list_x_cs_points_blob = new List <float>();
                        List <float> list_y_cs_points_blob = new List <float>();
                        List <float> list_z_cs_points_blob = new List <float>();

                        // x,y,z座標の平均
                        float x_coord_cs_points_blob = 0;
                        float y_coord_cs_points_blob = 0;
                        float z_coord_cs_points_blob = 0;

                        // listの初期化. 念のため
                        list_x_cs_points_blob.Clear();
                        list_y_cs_points_blob.Clear();
                        list_z_cs_points_blob.Clear();

                        // for loop
                        int i_points_blob = 0; // blob内のcs_pointsのindex
                        //int i_coord_blob = 0; // blob内の座標のindex
                        foreach (int i_point in arr_index)
                        {
                            // arr_index: blobのcamera space pointsに対応するindexes
                            // cs_points_blobをまとめる
                            cs_points_blob[i_points_blob] = cs_points[i_point];
                            i_points_blob += 1;
                            // x,y,z座標のlistを求める: infinityを外す
                            if (!Double.IsInfinity(cs_points[i_point].X))
                            {
                                list_x_cs_points_blob.Add(cs_points[i_point].X);
                                list_y_cs_points_blob.Add(cs_points[i_point].Y);
                                list_z_cs_points_blob.Add(cs_points[i_point].Z);
                                // 座標の足し算
                                x_coord_cs_points_blob += cs_points[i_point].X;
                                y_coord_cs_points_blob += cs_points[i_point].Y;
                                z_coord_cs_points_blob += cs_points[i_point].Z;
                            }
                        }
                        // listを配列に変換
                        float[] arr_x_cs_points_blob = list_x_cs_points_blob.ToArray();
                        float[] arr_y_cs_points_blob = list_y_cs_points_blob.ToArray();
                        float[] arr_z_cs_points_blob = list_z_cs_points_blob.ToArray();

                        // cs_points_blobからblobの中心座標を求める ////////////////////

                        // infの割合を求める
                        float ratio_valid_points_blob = (float)arr_x_cs_points_blob.Length /
                                                        (float)arr_index.Length;// blobの内infinityでなかったpointの割合

                        // infの割合が1割以以上だったら中心座標の計算
                        if (ratio_valid_points_blob > 0.0)
                        {
                            // 足し算したものを数で割る
                            x_coord_cs_points_blob = x_coord_cs_points_blob / (float)arr_x_cs_points_blob.Count();
                            y_coord_cs_points_blob = y_coord_cs_points_blob / (float)arr_y_cs_points_blob.Count(); // 分母はどれも同じ
                            z_coord_cs_points_blob = z_coord_cs_points_blob / (float)arr_z_cs_points_blob.Count(); // 分母はどれも同じ
                        }
                        else
                        {
                            x_coord_cs_points_blob = 0;
                            y_coord_cs_points_blob = 0;
                            z_coord_cs_points_blob = 0;
                        }
                        coord_blob_center = new float[]
                        {
                            x_coord_cs_points_blob,
                            y_coord_cs_points_blob,
                            z_coord_cs_points_blob
                        };
                        // 座標coord_blob_centerを二次元配列にまとめる+ label_coordsのstringを生成
                        for (int i_xyz = 0; i_xyz < 3; i_xyz++)
                        {
                            coord_blobs_center[i_blob, i_xyz] = coord_blob_center[i_xyz];
                        }

                        label_coords_blob +=
                            string.Format("X: {0:+000.0;-000.0;+   0.0}, ", coord_blob_center[0] * 100) +
                            string.Format("Y: {0:+000.0;-000.0;+   0.0}, ", coord_blob_center[1] * 100) +
                            string.Format("Z: {0:+000.0;-000.0;+   0.0}\n", coord_blob_center[2] * 100);

                        i_blob += 1;
                    }

                    // coord_blobs_centerを画面に出力
                    label_coords.Content = label_coords_blob;
                }
            }

            // IR
            using (InfraredFrame infraredFrame = reference.InfraredFrameReference.AcquireFrame())
            {
                if (infraredFrame != null)
                {
                    FrameDescription infraredFrameDescription = infraredFrame.FrameDescription;
                    int width  = infraredFrameDescription.Width;
                    int height = infraredFrameDescription.Height;

                    //ushort[] infraredData = new ushort[width * height];
                    // http://www.naturalsoftware.jp/entry/2014/07/25/020750
                    var infraredData = new ushort[width * height]; // ushort array
                    infraredFrame.CopyFrameDataToArray(infraredData);
                    this.infraredBitmap.Lock();
                    this.infraredBitmap.WritePixels(infraredRect, infraredData, width * (int)infraredFrameDescription.BytesPerPixel, 0);
                    //depthImage.WritePixels(depthRect, depthBuffer, depthStride, 0);// template
                    this.infraredBitmap.Unlock();
                    ColorImage.Source = this.infraredBitmap;

                    // OpenCV: Count blobs and
                    CountBlobs(this.infraredBitmap);
                }
            }
        }
        /// <summary>
        /// Converts a depth frame to the corresponding System.Windows.Media.Imaging.BitmapSource and removes the background (green-screen effect).
        /// </summary>
        /// <param name="depthFrame">The specified depth frame.</param>
        /// <param name="colorFrame">The specified color frame.</param>
        /// <param name="bodyIndexFrame">The specified body index frame.</param>
        /// <returns>The corresponding System.Windows.Media.Imaging.BitmapSource representation of image.</returns>
        public BitmapSource GreenScreen(ColorFrame colorFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame)
        {
            int colorWidth  = colorFrame.FrameDescription.Width;
            int colorHeight = colorFrame.FrameDescription.Height;

            int depthWidth  = depthFrame.FrameDescription.Width;
            int depthHeight = depthFrame.FrameDescription.Height;

            int bodyIndexWidth  = bodyIndexFrame.FrameDescription.Width;
            int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;

            if (_displayPixels == null)
            {
                _depthData     = new ushort[depthWidth * depthHeight];
                _bodyData      = new byte[bodyIndexWidth * bodyIndexHeight];
                _colorData     = new byte[colorWidth * colorHeight * BYTES_PER_PIXEL];
                _displayPixels = new byte[depthWidth * depthHeight * BYTES_PER_PIXEL];
                _colorPoints   = new ColorSpacePoint[depthWidth * depthHeight];
                _bitmap        = new WriteableBitmap(depthWidth, depthHeight, DPI, DPI, FORMAT, null);
            }

            if (((depthWidth * depthHeight) == _depthData.Length) && ((colorWidth * colorHeight * BYTES_PER_PIXEL) == _colorData.Length) && ((bodyIndexWidth * bodyIndexHeight) == _bodyData.Length))
            {
                depthFrame.CopyFrameDataToArray(_depthData);

                if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                {
                    colorFrame.CopyRawFrameDataToArray(_colorData);
                }
                else
                {
                    colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra);
                }

                bodyIndexFrame.CopyFrameDataToArray(_bodyData);

                CoordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);

                Array.Clear(_displayPixels, 0, _displayPixels.Length);

                for (int y = 0; y < depthHeight; ++y)
                {
                    for (int x = 0; x < depthWidth; ++x)
                    {
                        int depthIndex = (y * depthWidth) + x;

                        byte player = _bodyData[depthIndex];

                        if (player != 0xff)
                        {
                            ColorSpacePoint colorPoint = _colorPoints[depthIndex];

                            int colorX = (int)Math.Floor(colorPoint.X + 0.5);
                            int colorY = (int)Math.Floor(colorPoint.Y + 0.5);

                            if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                            {
                                int colorIndex   = ((colorY * colorWidth) + colorX) * BYTES_PER_PIXEL;
                                int displayIndex = depthIndex * BYTES_PER_PIXEL;

                                //_displayPixels[displayIndex + 0] = _colorData[colorIndex];
                                //_displayPixels[displayIndex + 1] = _colorData[colorIndex + 1];
                                //_displayPixels[displayIndex + 2] = _colorData[colorIndex + 2];
                                _displayPixels[displayIndex + 0] = System.Drawing.Color.Blue.B;
                                _displayPixels[displayIndex + 1] = System.Drawing.Color.Blue.G;
                                _displayPixels[displayIndex + 2] = System.Drawing.Color.Blue.R;
                                _displayPixels[displayIndex + 3] = 255;
                            }
                        }
                    }
                }

                _bitmap.Lock();

                Marshal.Copy(_displayPixels, 0, _bitmap.BackBuffer, _displayPixels.Length);
                _bitmap.AddDirtyRect(new Int32Rect(0, 0, depthWidth, depthHeight));

                _bitmap.Unlock();
            }

            return(_bitmap);
        }
        public static BitmapSource SliceDepthImageWithoutPlane(this DepthFrame image, Floor floor, CoordinateMapper coordinateMapper, float planePos, int min = 20, int max = 1000)
        {
            ushort[] _depthData = new ushort[512 * 424];
            image.CopyFrameDataToArray(_depthData);
            CameraSpacePoint[] depthMappedToCameraPoints = new CameraSpacePoint[512 * 424];
            coordinateMapper.MapDepthFrameToCameraSpace(
                _depthData,
                depthMappedToCameraPoints);

            CameraSpacePoint s;
            double           dist;

            var tmFrameDescription = image.FrameDescription;

            int width  = tmFrameDescription.Width;  //image.Width;
            int height = tmFrameDescription.Height; // image.Height;

            //var depthFrame = image.Image.Bits;
            //short[] rawDepthData = new short[tmFrameDescription.LengthInPixels]; //new short[image.PixelDataLength];
            ushort[] rawDepthData = new ushort[tmFrameDescription.LengthInPixels];

            //image.CopyPixelDataTo(rawDepthData);
            image.CopyFrameDataToArray(rawDepthData);


            var pixels = new byte[height * width * 4];

            const int BlueIndex  = 0;
            const int GreenIndex = 1;
            const int RedIndex   = 2;

            for (int depthIndex = 0, colorIndex = 0;
                 depthIndex < rawDepthData.Length && colorIndex < pixels.Length;
                 depthIndex++, colorIndex += 4)
            {
                s    = depthMappedToCameraPoints[depthIndex];
                dist = floor.DistanceFrom(s);

                //if (dist > -0.2f)
                if (dist > planePos)
                {
                    // Calculate the distance represented by the two depth bytes
                    //int depth = rawDepthData[depthIndex] >> DepthImageFrame.PlayerIndexBitmaskWidth;
                    //int depth = rawDepthData[depthIndex] >> image.DepthMinReliableDistance;
                    int depth = rawDepthData[depthIndex];
                    // Map the distance to an intesity that can be represented in RGB
                    var intensity = CalculateIntensityFromDistance(depth);

                    if (depth > min && depth < max)
                    {
                        // Apply the intensity to the color channels
                        pixels[colorIndex + BlueIndex]  = intensity; //blue
                        pixels[colorIndex + GreenIndex] = intensity; //green
                        pixels[colorIndex + RedIndex]   = intensity; //red
                    } /* else
                       * {
                       * Console.WriteLine("Plane : " + depthIndex);
                       * }*/
                }
            }

            return(BitmapSource.Create(width, height, 96, 96, PixelFormats.Bgr32, null, pixels, width * 4));
        }
        private void ProcessFrames(DepthFrame depthFrame, ColorFrame colorFrame, BodyIndexFrame bodyIndexFrame, BodyFrame bodyFrame)
        {



            FrameDescription depthFrameDescription = depthFrame.FrameDescription;
            FrameDescription colorFrameDescription = colorFrame.FrameDescription;
            FrameDescription bodyIndexFrameDescription = bodyIndexFrame.FrameDescription;




            int bodyIndexWidth = bodyIndexFrameDescription.Width;
            int bodyIndexHeight = bodyIndexFrameDescription.Height;


            // The ImageModel object is used to transfer Kinect data into the DataFlow rotunies. 
            ImageModel imageModel = new ImageModel()
            {
                DepthWidth = depthFrameDescription.Width,
                DepthHeight = depthFrameDescription.Height,
                ColorWidth = colorFrameDescription.Width,
                ColorHeight = colorFrameDescription.Height,
                ShowTrails = _vm.LeaveTrails,
                PersonFill = _vm.PersonFill,
                MaxDistance = _vm.BackgroundDistance
            };
            imageModel.ColorFrameData = new byte[imageModel.ColorWidth * imageModel.ColorHeight * this.bytesPerPixel];

            imageModel.DisplayPixels = new byte[_PreviousFrameDisplayPixels.Length];
            imageModel.BodyIndexFrameData = new byte[imageModel.DepthWidth * imageModel.DepthHeight];
            imageModel.ColorPoints = new ColorSpacePoint[imageModel.DepthWidth * imageModel.DepthHeight];
            imageModel.BytesPerPixel = bytesPerPixel;
            imageModel.Bodies = new Body[this.kinectSensor.BodyFrameSource.BodyCount];
            bodyFrame.GetAndRefreshBodyData(imageModel.Bodies);
            imageModel.DepthData = new ushort[imageModel.DepthWidth * imageModel.DepthHeight];
            
            depthFrame.CopyFrameDataToArray(imageModel.DepthData);
            depthFrame.CopyFrameDataToArray(this.DepthFrameData);
            
            if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
            {
                colorFrame.CopyRawFrameDataToArray(imageModel.ColorFrameData);
            }
            else
            {
                colorFrame.CopyConvertedFrameDataToArray(imageModel.ColorFrameData, ColorImageFormat.Bgra);
            }
            imageModel.PixelFormat = PixelFormats.Bgra32;



            _ColorBitmap.WritePixels(new Int32Rect(0, 0, imageModel.ColorWidth, imageModel.ColorHeight),
                                          imageModel.ColorFrameData,
                                          imageModel.ColorWidth * imageModel.BytesPerPixel,
                                          0);


            //RenderTargetBitmap renderBitmap = new RenderTargetBitmap((int)CompositeImage.ActualWidth, (int)CompositeImage.ActualHeight, 96.0, 96.0, PixelFormats.Pbgra32);
            //DrawingVisual dv = new DrawingVisual();
            //VisualBrush brush = new VisualBrush(CompositeImage);

            //foreach(Body body in _bodies)
            //{
            //    if (body.IsTracked)
            //    {
            //        Joint joint = body.Joints[JointType.HandRight];
            //        using (DrawingContext dc = dv.RenderOpen())
            //        {

            //            dc.DrawRectangle(brush, null, new Rect(new Point(), new Size(CompositeImage.ActualWidth, CompositeImage.ActualHeight)));
            //            ImageBrush brush2 = new ImageBrush(_pointerBitmap);
            //            brush2.Opacity = 1.0;
            //            dc.DrawRectangle(brush2, null, new Rect(new Point(0, CompositeImage.ActualHeight - _Overlay.Height), new Size(_pointerBitmap.Width, _pointerBitmap.Height)));
            //        }
            //    }
            //}

            //ConvertIRDataToByte();






            ImagePreview.Source = _ColorBitmap;


            bodyIndexFrame.CopyFrameDataToArray(imageModel.BodyIndexFrameData);

            this.coordinateMapper.MapDepthFrameToColorSpace(DepthFrameData, imageModel.ColorPoints);

            if (_vm.LeaveTrails)
            {
                Array.Copy(this._PreviousFrameDisplayPixels, imageModel.DisplayPixels, this._PreviousFrameDisplayPixels.Length);
            }


            try
            {
                //Send the imageModel to the DataFlow transformer
                _ImageTransformer.Post(imageModel);
            }
            catch (Exception ex)
            {
#if DEBUG
                Console.WriteLine(ex);
#endif
            }


        }
Exemple #43
0
        private void MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            var reference = e.FrameReference.AcquireFrame();

            // process color frame
            using (ColorFrame colorFrame = reference.ColorFrameReference.AcquireFrame())
            {
                if (colorFrame != null)
                {
                    FrameDescription colorFrameDescription = colorFrame.FrameDescription;

                    using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                    {
                        this.rawColorBitmap.Lock();

                        // verify data and write the new color frame data to the display bitmap
                        if ((colorFrameDescription.Width == this.rawColorBitmap.PixelWidth) && (colorFrameDescription.Height == this.rawColorBitmap.PixelHeight))
                        {
                            colorFrame.CopyConvertedFrameDataToIntPtr(
                                this.rawColorBitmap.BackBuffer,
                                (uint)(colorFrameDescription.Width * colorFrameDescription.Height * 4),
                                ColorImageFormat.Bgra);

                            this.rawColorBitmap.AddDirtyRect(new Int32Rect(0, 0, this.rawColorBitmap.PixelWidth, this.rawColorBitmap.PixelHeight));
                        }

                        this.rawColorBitmap.Unlock();
                    }
                }
            }

            // process depth frame
            bool depthFrameProcessed = false;

            // query depth frame
            using (DepthFrame depthFrame = reference.DepthFrameReference.AcquireFrame())
            {
                if (depthFrame != null)
                {
                    // the fastest way to process the body index data is to directly access
                    // the underlying buffer
                    using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                    {
                        // verify data and write the color data to the display bitmap
                        if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) == (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)) &&
                            (this.depthFrameDescription.Width == this.depthBitmap.PixelWidth) && (this.depthFrameDescription.Height == this.depthBitmap.PixelHeight))
                        {
                            // Note: In order to see the full range of depth (including the less reliable far field depth)
                            // we are setting maxDepth to the extreme potential depth threshold
                            ushort maxDepth = ushort.MaxValue;

                            // If you wish to filter by reliable depth distance, uncomment the following line:
                            //// maxDepth = depthFrame.DepthMaxReliableDistance

                            this.ProcessDepthFrameData(depthBuffer.UnderlyingBuffer, depthBuffer.Size, depthFrame.DepthMinReliableDistance, maxDepth);
                            depthFrameProcessed = true;

                            // align color image
                            ushort[] depthFrameData = new ushort[depthFrameDescription.Width * depthFrameDescription.Height];
                            // align color image
                            depthFrame.CopyFrameDataToArray(depthFrameData);
                            MapColorToDepth(depthFrameData);
                        }
                    }
                }
            }

            if (depthFrameProcessed)
            {
                this.RenderDepthPixels();
            }

            // save frames
            //new Task(() => SaveFrame(string.Format("{0}{1}_color.png", saveRoot, frameId), colorBitmap)).Start();
            //new Task(() => SaveFrame(string.Format("{0}{1}_depth.png", saveRoot, frameId), depthBitmap)).Start();
            allColorImgs.Add(colorBitmap.Clone());
            allDepthImgs.Add(depthBitmap.Clone());

            frameId++;
        }
        public ImageSource ToBitmap(DepthFrame frame)
        {
            int width = frame.FrameDescription.Width;
            int height = frame.FrameDescription.Height;
            PixelFormat format = PixelFormats.Bgr32;

            ushort minDepth = frame.DepthMinReliableDistance;
            ushort maxDepth = frame.DepthMaxReliableDistance;

            ushort[] pixelData = new ushort[width * height];
            byte[] pixels = new byte[width * height * (format.BitsPerPixel + 7) / 8];

            frame.CopyFrameDataToArray(pixelData);

            int colorIndex = 0;
            for (int depthIndex = 0; depthIndex < pixelData.Length; ++depthIndex)
            {
                ushort depth = pixelData[depthIndex];

                byte intensity = (byte)(depth >= minDepth && depth <= maxDepth ? depth : 0);

                pixels[colorIndex++] = intensity; // Blue
                pixels[colorIndex++] = intensity; // Green
                pixels[colorIndex++] = intensity; // Red

                ++colorIndex;
            }

            int stride = width * format.BitsPerPixel / 8;

            return BitmapSource.Create(width, height, 96, 96, format, null, pixels, stride);
        }
        /// <summary>
        /// Converts a depth frame to the corresponding System.Windows.Media.Imaging.BitmapSource and removes the background (green-screen effect).
        /// </summary>
        /// <param name="depthFrame">The specified depth frame.</param>
        /// <param name="colorFrame">The specified color frame.</param>
        /// <param name="bodyIndexFrame">The specified body index frame.</param>
        /// <returns>The corresponding System.Windows.Media.Imaging.BitmapSource representation of image.</returns>
        public BitmapSource GreenScreen(ColorFrame colorFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame)
        {
            int colorWidth = colorFrame.FrameDescription.Width;
            int colorHeight = colorFrame.FrameDescription.Height;

            int depthWidth = depthFrame.FrameDescription.Width;
            int depthHeight = depthFrame.FrameDescription.Height;

            int bodyIndexWidth = bodyIndexFrame.FrameDescription.Width;
            int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;

            if (_displayPixels == null)
            {
                _depthData = new ushort[depthWidth * depthHeight];
                _bodyData = new byte[depthWidth * depthHeight];
                _colorData = new byte[colorWidth * colorHeight * BYTES_PER_PIXEL];
                _displayPixels = new byte[depthWidth * depthHeight * BYTES_PER_PIXEL];
                _colorPoints = new ColorSpacePoint[depthWidth * depthHeight];
                _bitmap = new WriteableBitmap(depthWidth, depthHeight, DPI, DPI, FORMAT, null);
            }

            if (((depthWidth * depthHeight) == _depthData.Length) && ((colorWidth * colorHeight * BYTES_PER_PIXEL) == _colorData.Length) && ((bodyIndexWidth * bodyIndexHeight) == _bodyData.Length))
            {
                depthFrame.CopyFrameDataToArray(_depthData);

                if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                {
                    colorFrame.CopyRawFrameDataToArray(_colorData);
                }
                else
                {
                    colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra);
                }

                bodyIndexFrame.CopyFrameDataToArray(_bodyData);

                _coordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);

                Array.Clear(_displayPixels, 0, _displayPixels.Length);

                for (int y = 0; y < depthHeight; ++y)
                {
                    for (int x = 0; x < depthWidth; ++x)
                    {
                        int depthIndex = (y * depthWidth) + x;

                        byte player = _bodyData[depthIndex];

                        if (player != 0xff)
                        {
                            ColorSpacePoint colorPoint = _colorPoints[depthIndex];

                            int colorX = (int)Math.Floor(colorPoint.X + 0.5);
                            int colorY = (int)Math.Floor(colorPoint.Y + 0.5);

                            if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                            {
                                int colorIndex = ((colorY * colorWidth) + colorX) * BYTES_PER_PIXEL;
                                int displayIndex = depthIndex * BYTES_PER_PIXEL;

                                _displayPixels[displayIndex + 0] = 255;
                                _displayPixels[displayIndex + 1] = 255;
                                _displayPixels[displayIndex + 2] = 255;
                                _displayPixels[displayIndex + 3] = 127;
                                // 79 195 247
                            }
                        }
                    }
                }

                _bitmap.Lock();

                Marshal.Copy(_displayPixels, 0, _bitmap.BackBuffer, _displayPixels.Length);
                _bitmap.AddDirtyRect(new Int32Rect(0, 0, depthWidth, depthHeight));

                _bitmap.Unlock();
            }

            return _bitmap;
        }
Exemple #46
0
        private void ShowDepthFrame(DepthFrame depthFrame)
        {
            bool depthFrameProcessed = false;
            ushort minDepth = 0;
            ushort maxDepth = 0;

            if (depthFrame != null)
            {
                FrameDescription depthFrameDescription = depthFrame.FrameDescription;

                // verify data and write the new infrared frame data to the display bitmap
                if (((depthFrameDescription.Width * depthFrameDescription.Height)
                    == this.infraredFrameData.Length) &&
                    (depthFrameDescription.Width == this.bitmap.PixelWidth) &&
                    (depthFrameDescription.Height == this.bitmap.PixelHeight))
                {
                    // Copy the pixel data from the image to a temporary array
                    depthFrame.CopyFrameDataToArray(this.depthFrameData);

                    minDepth = depthFrame.DepthMinReliableDistance;
                    maxDepth = depthFrame.DepthMaxReliableDistance;
                    //maxDepth = 8000;

                    depthFrameProcessed = true;
                }
            }

            // we got a frame, convert and render
            if (depthFrameProcessed)
            {
                ConvertDepthDataToPixels(minDepth, maxDepth);
                RenderPixelArray(this.depthPixels);
            }
        }
Exemple #47
0
        /// <summary>
        /// Device-specific implementation of Update.
        /// Updates data buffers of all active channels with data of current frame.
        /// </summary>
        /// <remarks>This method is implicitely called by <see cref="Camera.Update"/> inside a camera lock.</remarks>
        /// <seealso cref="Camera.Update"/>
        protected override void UpdateImpl()
        {
            // TODO: This method could yield rather asynchronous channels. If necessary: Try to find a mechanism that updates frames that are already fetched when waiting for others that are not yet available.
            MultiSourceFrame multiSourceFrame       = null;
            bool             bodyIndexRequired      = IsChannelActive(CustomChannelNames.BodyIndex);
            bool             depthRequired          = IsChannelActive(ChannelNames.Distance) || IsChannelActive(ChannelNames.Point3DImage);
            bool             amplitudeRequired      = IsChannelActive(ChannelNames.Amplitude);
            bool             colorRequired          = IsChannelActive(ChannelNames.Color);
            bool             longExposureIRRequired = IsChannelActive(CustomChannelNames.LongExposureIR);

            do
            {
                if (!dataAvailable.WaitOne(UpdateTimeoutMilliseconds))
                {
                    throw ExceptionBuilder.BuildFromID(typeof(MetriCam2Exception), this, 005);
                }

                lock (newFrameLock)
                {
                    try
                    {
                        if (multiFrameReference != null)
                        {
                            multiSourceFrame = multiFrameReference.AcquireFrame();
                        }
                    }
                    catch (Exception)
                    {
                        // ignore if the frame is no longer available
                        continue;// throw
                    }
                }

                try
                {
                    // fetch depth?
                    if (depthRequired)
                    {
                        DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference;
                        if (depthFrameReference != null)
                        {
                            // always synchornize on depth frames if possible.
                            if (lastTimeStamp == GetAbsoluteTimeStamp(depthFrameReference.RelativeTime.Ticks))
                            {
                                continue;
                            }
                            using (DepthFrame depthFrame = depthFrameReference.AcquireFrame())
                            {
                                if (depthFrame == null)
                                {
                                    continue;
                                }

                                depthFrameDescription = depthFrame.FrameDescription;
                                int depthWidth  = depthFrameDescription.Width;
                                int depthHeight = depthFrameDescription.Height;
                                if ((depthWidth * depthHeight) == this.depthFrameData.Length)
                                {
                                    lock (this.depthFrameData)
                                    {
                                        depthFrame.CopyFrameDataToArray(this.depthFrameData);
                                        lastTimeStamp  = GetAbsoluteTimeStamp(depthFrameReference.RelativeTime.Ticks);
                                        timestampDepth = lastTimeStamp;
                                    }
                                    depthRequired = false;
                                }
                            }
                        }
                    }

                    // fetch IR?
                    if (amplitudeRequired)
                    {
                        InfraredFrameReference irFrameReference = multiSourceFrame.InfraredFrameReference;
                        if (irFrameReference != null)
                        {
                            // If depth data is inactive, synchronize on IR frames. If depth and IR are inactive, we synchronize on color frames.
                            if (!(IsChannelActive(ChannelNames.Distance) || IsChannelActive(ChannelNames.Point3DImage)) && lastTimeStamp == GetAbsoluteTimeStamp(irFrameReference.RelativeTime.Ticks))
                            {
                                continue;
                            }

                            using (InfraredFrame irFrame = irFrameReference.AcquireFrame())
                            {
                                if (irFrame == null)
                                {
                                    continue;
                                }

                                FrameDescription irFrameDescription = irFrame.FrameDescription;
                                int irWidth  = irFrameDescription.Width;
                                int irHeight = irFrameDescription.Height;
                                if ((irWidth * irHeight) == this.irFrameData.Length)
                                {
                                    lock (this.irFrameData)
                                    {
                                        irFrame.CopyFrameDataToArray(this.irFrameData);
                                        lastTimeStamp = GetAbsoluteTimeStamp(irFrameReference.RelativeTime.Ticks);
                                        timestampIR   = lastTimeStamp;
                                    }
                                    amplitudeRequired = false;
                                }
                            }
                        }
                    }

                    // (always) fetch body frame
                    BodyFrameReference bodyFrameReference = multiSourceFrame.BodyFrameReference;
                    if (bodyFrameReference != null)
                    {
                        using (BodyFrame bodyFrame = bodyFrameReference.AcquireFrame())
                        {
                            if (bodyFrame != null)
                            {
                                this.bodies = new Body[bodyFrame.BodyCount];
                                using (bodyFrame)
                                {
                                    bodyFrame.GetAndRefreshBodyData(this.bodies);
                                }
                            }
                            else
                            {
                                // TODO: check if channel is activated.
                            }
                        }
                    }

                    // fetch color?
                    if (colorRequired)
                    {
                        ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference;
                        if (colorFrameReference == null)
                        {
                            continue;
                        }
                        // If depth and IR data is inactive, synchronize on color frames. If color, depth and IR are inactive, we don't care for synchronization.
                        if (!(IsChannelActive(ChannelNames.Distance) || IsChannelActive(ChannelNames.Point3DImage) || IsChannelActive(ChannelNames.Amplitude)) && lastTimeStamp == GetAbsoluteTimeStamp(colorFrameReference.RelativeTime.Ticks))
                        {
                            continue;
                        }

                        using (ColorFrame colorFrame = colorFrameReference.AcquireFrame())
                        {
                            //FrameDescription colorFrameDescription = colorFrame.FrameDescription;
                            //int cWidth = colorFrameDescription.Width;
                            //int cHeight = colorFrameDescription.Width;
                            if (colorFrame != null)
                            {
                                using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                                {
                                    lock (this.colorFrameData)
                                    {
                                        colorFrame.CopyConvertedFrameDataToArray(this.colorFrameData, ColorImageFormat.Bgra);
                                        lastTimeStamp  = GetAbsoluteTimeStamp(colorFrameReference.RelativeTime.Ticks);
                                        timestampColor = lastTimeStamp;
                                    }
                                }
                                colorRequired = false;
                            }
                        }
                    }

                    // fetch long exposure IR? (this is independent of the IR images and are acquired at the same rate, so every new frame also
                    // has one of these.)
                    if (longExposureIRRequired)
                    {
                        LongExposureInfraredFrameReference longExposureIRFrameRef = multiSourceFrame.LongExposureInfraredFrameReference;
                        using (LongExposureInfraredFrame longIRFrame = longExposureIRFrameRef.AcquireFrame())
                        {
                            if (longIRFrame == null)
                            {
                                continue;
                            }

                            int longIRWidth  = longIRFrame.FrameDescription.Width;
                            int longIRHeight = longIRFrame.FrameDescription.Height;
                            if (longExposureIRData == null || (longIRWidth * longIRHeight) != longExposureIRData.Length)
                            {
                                longExposureIRData = new ushort[longIRWidth * longIRHeight];
                            }
                            longIRFrame.CopyFrameDataToArray(longExposureIRData);
                            longExposureIRRequired = false;
                        }
                    }

                    // fetch body index frames?
                    if (bodyIndexRequired)
                    {
                        BodyIndexFrameReference bodyIndexFrameRef = multiSourceFrame.BodyIndexFrameReference;
                        using (BodyIndexFrame bodyIndexFrame = bodyIndexFrameRef.AcquireFrame())
                        {
                            if (bodyIndexFrame == null)
                            {
                                log.Debug("bodyIndexFrame is NULL.");
                                continue;
                            }

                            int bodyIndexWidth  = bodyIndexFrame.FrameDescription.Width;
                            int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;
                            if (bodyIndexData == null || (bodyIndexWidth * bodyIndexHeight) != bodyIndexData.Length)
                            {
                                bodyIndexData = new byte[bodyIndexWidth * bodyIndexHeight];
                            }
                            bodyIndexFrame.CopyFrameDataToArray(bodyIndexData);
                            bodyIndexRequired = false;
                        }
                    }
                }
                catch (Exception)
                {
                    // ignore if the frame is no longer available
                }
                finally
                {
                    multiSourceFrame = null;
                }
            } while (depthRequired || colorRequired || bodyIndexRequired || longExposureIRRequired || amplitudeRequired);
        }