/// <summary>
        /// Updates the bitmap with new frame data.
        /// </summary>
        /// <param name="frame">The specified Kinect color frame.</param>
        public void Update(ColorFrame frame)
        {
            if (Bitmap == null)
            {
                _width = frame.FrameDescription.Width;
                _height = frame.FrameDescription.Height;
                _pixels = new byte[_width * _height * 4];
                Bitmap = new WriteableBitmap(_width, _height, 96.0, 96.0, PixelFormats.Bgr32, null);
            }

            if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
            {
                frame.CopyRawFrameDataToArray(_pixels);
            }
            else
            {
                frame.CopyConvertedFrameDataToArray(_pixels, ColorImageFormat.Bgra);
            }

            Bitmap.Lock();

            Marshal.Copy(_pixels, 0, Bitmap.BackBuffer, _pixels.Length);
            Bitmap.AddDirtyRect(new Int32Rect(0, 0, _width, _height));

            Bitmap.Unlock();
        }
 public void Update(ColorFrame frame)
 {
     if (frame != null)
     {
         _bitmap.Lock();
         frame.CopyConvertedFrameDataToIntPtr(_bitmap.BackBuffer, (uint)_bytes.Length, ColorImageFormat.Bgra);
         _bitmap.AddDirtyRect(_dirtyRect);
         _bitmap.Unlock();
     }
 }
Beispiel #3
0
 static ROS_CS.sensor_msgs.Image GetColorImageFromRaw(ColorFrame new_color_frame)
 {
     ROS_CS.sensor_msgs.Image color_image = new ROS_CS.sensor_msgs.Image();
     color_image.header.frame_id = "kinect2_color_optical_frame";
     color_image.header.stamp = KinectTimestampsToROS(new_color_frame.RelativeTime);
     color_image.is_bigendian = 0;
     color_image.height = (uint)new_color_frame.FrameDescription.Height;
     color_image.width = (uint)new_color_frame.FrameDescription.Width;
     color_image.step = (uint)new_color_frame.FrameDescription.Width * 4;
     color_image.encoding = "rgba8";
     byte[] color_data = new byte[color_image.step * color_image.height];
     new_color_frame.CopyConvertedFrameDataToArray(color_data, ColorImageFormat.Rgba);
     color_image.data.AddRange(color_data);
     return color_image;
 }
        public void BuildColorBitmap(ColorFrame colorFrame, LargeFrameBitmap bitmap, bool withLock)
        {
            WriteableBitmap outBitmap = bitmap.Bitmap;
            ValidateBitmap(outBitmap, Frame.COLOR_WIDTH, Frame.COLOR_HEIGHT);

            if (withLock) outBitmap.Lock();

            // Direct copy
            colorFrame.CopyConvertedFrameDataToIntPtr(outBitmap.BackBuffer, (uint)(Frame.COLOR_PIXELS * FrameBitmap.BYTES_PER_PIXEL), ColorImageFormat.Bgra);

            if (withLock)
            {
                outBitmap.AddDirtyRect(new Int32Rect(0, 0, Frame.COLOR_WIDTH, Frame.COLOR_HEIGHT));
                outBitmap.Unlock();
            }
        }
Beispiel #5
0
        public RecordColorFrame(ColorFrame frame)
        {
            this.Codec = Codecs.RawColor;

            this.FrameType = FrameTypes.Color;
            this.RelativeTime = frame.RelativeTime;

            this.Width = frame.FrameDescription.Width;
            this.Height = frame.FrameDescription.Height;

            this.FrameDataSize = this.Width * this.Height * 4;
            this._frameData = new Byte[this.FrameDataSize];

            if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
            {
                frame.CopyRawFrameDataToArray(_frameData);
            }
            else
            {
                frame.CopyConvertedFrameDataToArray(_frameData, ColorImageFormat.Bgra);
            }
        }
Beispiel #6
0
        private void ShowColorFrame(ColorFrame colorFrame)
        {
            bool colorFrameProcessed = false;

            if (colorFrame != null)
            {
                FrameDescription colorFrameDescription =
                    colorFrame.FrameDescription;

                // verify data and write the new color frame data to
                // the Writeable bitmap
                if ((colorFrameDescription.Width ==
                     this.bitmap.PixelWidth) &&
                    (colorFrameDescription.Height == this.bitmap.PixelHeight))
                {
                    if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                    {
                        colorFrame.CopyRawFrameDataToBuffer(
                            this.bitmap.PixelBuffer);
                    }
                    else
                    {
                        colorFrame.CopyConvertedFrameDataToBuffer(
                            this.bitmap.PixelBuffer,
                            ColorImageFormat.Bgra);
                    }

                    colorFrameProcessed = true;
                }
            }

            if (colorFrameProcessed)
            {
                this.bitmap.Invalidate();
                FrameDisplayImage.Source = this.bitmap;
            }
        }
        public static BitmapSource ToBitmap(this ColorFrame frame)
        {
            if (_bitmap == null)
            {
                _width  = frame.FrameDescription.Width;
                _height = frame.FrameDescription.Height;
                _pixels = new byte[_width * _height * Constants.BYTES_PER_PIXEL];
                _bitmap = new WriteableBitmap(_width, _height,
                                              Constants.DPI, Constants.DPI, Constants.FORMAT, null);
            }


            if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
            {
                frame.CopyRawFrameDataToArray(_pixels);
            }
            else
            {
                frame.CopyConvertedFrameDataToArray(_pixels, ColorImageFormat.Bgra);
            }

            try
            {
                if (_bitmap.TryLock(waitingDuration))
                {
                    Marshal.Copy(_pixels, 0, _bitmap.BackBuffer, _pixels.Length);
                    _bitmap.AddDirtyRect(new Int32Rect(0, 0, _width, _height));

                    _bitmap.Unlock();
                }
            }
            catch (Exception)
            {
                Debug.WriteLine("Frame loss", "Warning");
            }
            return(_bitmap);
        }
Beispiel #8
0
        //private bool ProcessDepthFrameOld(MultiSourceFrame reference)
        //{
        //    // Depth
        //    using (var frame = reference.DepthFrameReference.AcquireFrame())
        //    {

        //        if (frame != null)
        //        {
        //            DepthFrame frameDepth = frame;
        //            if (PointCloudScannerSettings.ScannerMode == ScannerMode.Depth || PointCloudScannerSettings.ScannerMode == ScannerMode.Color_Depth || PointCloudScannerSettings.ScannerMode == ScannerMode.Color_Depth_3DDisplay)
        //            {
        //                this.DepthMetaData = new DepthMetaData(frameDepth, false);

        //                if (!PointCloudScannerSettings.BackgroundRemoved)
        //                {


        //                    if (PointCloudScannerSettings.CutFrames)
        //                        this.DepthMetaData.FrameData = DepthMetaData.CutDepth(this.DepthMetaData.FrameData, PointCloudScannerSettings.CutFrameMaxDistance, PointCloudScannerSettings.CutFrameMinDistance, ref numberOfCutPoints);

        //                    //this.pictureBoxDepth.Image = this.bitmapDepth.Update_Gray(DepthMetaData.FrameData);
        //                    if (this.openGLPart.ShowingIn3DControl)
        //                    {
        //                        openGLCounter++;
        //                        if (openGLCounter == this.openGLRefreshAt)
        //                        {
        //                            openGLCounter = 0;
        //                            this.UpdateOpenGLControl();

        //                        }

        //                    }
        //                    else
        //                    {
        //                        this.bitmapDepth.Update_Gray(DepthMetaData.FrameData);
        //                        if (PointCloudScannerSettings.ScannerMode != ScannerMode.Color_Depth_3DDisplay)
        //                            this.pictureBoxDepth.Refresh();

        //                    }
        //                }
        //                else
        //                {
        //                    backgroundRemovalTool.DepthFrameData_RemoveBackground(this.DepthMetaData, this.BodyMetaData);
        //                    if (PointCloudScannerSettings.CutFrames)
        //                        this.DepthMetaData.FrameData = DepthMetaData.CutDepth(this.DepthMetaData.FrameData, PointCloudScannerSettings.CutFrameMaxDistance, PointCloudScannerSettings.CutFrameMinDistance, ref numberOfCutPoints);

        //                    //System.Drawing.Image im = ImageUtils.UpdateFromByteArray_Color(bitmapColor, ColorMetaData.Pixels);
        //                    if (!PointCloudScannerSettings.ShowOpenGLWindow)
        //                    {
        //                        this.bitmapDepth.Update_Gray(DepthMetaData.Pixels);
        //                        if (PointCloudScannerSettings.ScannerMode != ScannerMode.Color_Depth_3DDisplay)
        //                            this.pictureBoxDepth.Refresh();
        //                    }

        //                }
        //                if (PointCloudScannerSettings.InterpolateFrames)
        //                {
        //                    CalculateInterpolatedPixels();
        //                }

        //            }
        //            return true;


        //        }

        //    }

        //    return false;
        //}

        private bool ProcessColorFrame(MultiSourceFrame reference)
        {
            // Color
            using (var frame = reference.ColorFrameReference.AcquireFrame())
            {
                ColorFrame frameColor = frame;
                if (frame != null)
                {
                    this.ColorMetaData = new ColorMetaData(frameColor);

                    if (PointCloudScannerSettings.ScannerMode == ScannerMode.Color || PointCloudScannerSettings.ScannerMode == ScannerMode.Color_Depth)
                    {
                        //if (PointCloudScannerSettings.BackgroundRemoved)
                        //{
                        //    if (!PointCloudScannerSettings.ShowOpenGLWindow)
                        //    {
                        //        backgroundRemovalTool.Color_Image(ColorMetaData, this.DepthMetaData, this.BodyMetaData);
                        //        this.pictureBoxColor.Invalidate(false);

                        //    }

                        //}
                        //else
                        //{
                        if (!PointCloudScannerSettings.ShowOpenGLWindow)
                        {
                            bitmapColor.Update_Color(ColorMetaData.Pixels);
                            this.pictureBoxColor.Invalidate(false);
                        }
                        //}
                    }

                    return(true);
                }
            }
            return(false);
        }
Beispiel #9
0
        /// <summary>
        /// Kinect がカラー画像を取得したとき実行されるメソッド(イベントハンドラ)。
        /// </summary>
        /// <param name="sender">
        /// イベントを通知したオブジェクト。ここでは Kinect になる。
        /// </param>
        /// <param name="e">
        /// イベントの発生時に渡されるデータ。ここではカラー画像の情報が含まれる。
        /// </param>
        void ColorFrameReader_FrameArrived(object sender, ColorFrameArrivedEventArgs e)
        {
            //通知されたフレームを取得する。
            ColorFrame colorFrame = e.FrameReference.AcquireFrame();

            //フレームが上手く取得できない場合がある。
            if (colorFrame == null)
            {
                return;
            }

            //画素情報を確保する領域(バッファ)を用意する。
            //"高さ * 幅 * 画素あたりのデータ量"だけ保存できれば良い。
            byte[] colors = new byte[this.colorFrameDescription.Width
                                     * this.colorFrameDescription.Height
                                     * this.colorFrameDescription.BytesPerPixel];

            //用意した領域に画素情報を複製する。
            colorFrame.CopyConvertedFrameDataToArray(colors, this.colorImageFormat);

            //画素情報をビットマップとして扱う。
            BitmapSource bitmapSource
                = BitmapSource.Create(this.colorFrameDescription.Width,
                                      this.colorFrameDescription.Height,
                                      96,
                                      96,
                                      PixelFormats.Bgra32,
                                      null,
                                      colors,
                                      this.colorFrameDescription.Width * (int)this.colorFrameDescription.BytesPerPixel);

            //キャンバスに表示する。
            this.canvas.Background = new ImageBrush(bitmapSource);

            //取得したフレームを破棄する。
            colorFrame.Dispose();
        }
Beispiel #10
0
        private void Reader_ColorFrameArrived(object sender, ColorFrameArrivedEventArgs e)
        {
            var currentrecord = record;

            // ColorFrame is IDisposable
            using (ColorFrame colorFrame = e.FrameReference.AcquireFrame())
            {
                if (colorFrame != null)
                {
                    FrameDescription colorFrameDescription = colorFrame.FrameDescription;

                    using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                    {
                        this.colorBitmap.Lock();

                        // verify data and write the new color frame data to the display bitmap
                        if ((colorFrameDescription.Width == this.colorBitmap.PixelWidth) && (colorFrameDescription.Height == this.colorBitmap.PixelHeight))
                        {
                            colorFrame.CopyConvertedFrameDataToIntPtr(
                                this.colorBitmap.BackBuffer,
                                (uint)(colorFrameDescription.Width * colorFrameDescription.Height * 4),
                                ColorImageFormat.Bgra);

                            this.colorBitmap.AddDirtyRect(new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight));
                        }

                        this.colorBitmap.Unlock();
                        if (currentrecord)
                        {
                            var i = this.colorBitmap.Clone();
                            i.Freeze();
                            encoderframes.Add(i);
                        }
                    }
                }
            }
        }
Beispiel #11
0
 // Token: 0x06002A18 RID: 10776 RVA: 0x000D6810 File Offset: 0x000D4C10
 private void Update()
 {
     if (this._Reader != null)
     {
         MultiSourceFrame multiSourceFrame = this._Reader.AcquireLatestFrame();
         if (multiSourceFrame != null)
         {
             ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame();
             if (colorFrame != null)
             {
                 DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame();
                 if (depthFrame != null)
                 {
                     colorFrame.CopyConvertedFrameDataToArray(this._ColorData, ColorImageFormat.Rgba);
                     this._ColorTexture.LoadRawTextureData(this._ColorData);
                     this._ColorTexture.Apply();
                     depthFrame.CopyFrameDataToArray(this._DepthData);
                     depthFrame.Dispose();
                 }
                 colorFrame.Dispose();
             }
         }
     }
 }
Beispiel #12
0
        private void Color_FrameArrived(object sender, ColorFrameArrivedEventArgs e)
        {
            using (ColorFrame colorFrame = e.FrameReference.AcquireFrame())
            {
                if (colorFrame != null)
                {
                    if ((colorFrameDesc.Width == colorBitmap.PixelWidth) && (colorFrameDesc.Height == colorBitmap.PixelHeight))
                    {
                        using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                        {
                            colorBitmap.Lock();

                            Mat img = new Mat(colorFrameDesc.Height, colorFrameDesc.Width, Emgu.CV.CvEnum.DepthType.Cv8U, 4);
                            colorFrame.CopyConvertedFrameDataToIntPtr(img.DataPointer, (uint)(colorFrameDesc.Width * colorFrameDesc.Height * 4), ColorImageFormat.Bgra);



                            //uncomment to gray scale
                            //CvInvoke.CvtColor(img, img, Emgu.CV.CvEnum.ColorConversion.Bgra2Gray);
                            ////Threshold call
                            ////CvInvoke.Threshold(img, img, 220, 255, Emgu.CV.CvEnum.ThresholdType.Binary);

                            //CopyMemory(colorBitmap.BackBuffer, img.DataPointer, (uint)(colorFrameDesc.Width * colorFrameDesc.Height));

                            //comment out if grayscaled
                            CopyMemory(colorBitmap.BackBuffer, img.DataPointer, (uint)(colorFrameDesc.Width * colorFrameDesc.Height * 4));

                            colorBitmap.AddDirtyRect(new Int32Rect(0, 0, colorBitmap.PixelWidth, colorBitmap.PixelHeight));

                            colorBitmap.Unlock();
                            img.Dispose();
                        }
                    }
                }
            }
        }
Beispiel #13
0
        //check if the recording has to be stopped
        private void CheckStopRecording(ColorFrame colorframe)
        {
            bool isTalking = false;

            for (int f = 0; f < 6; f++)
            {
                if (_faceFrameResults[f] != null)
                {
                    FaceFrameResult face  = _faceFrameResults[f];
                    bool            moved = face.FaceProperties[FaceProperty.MouthMoved] == DetectionResult.Yes || face.FaceProperties[FaceProperty.MouthMoved] == DetectionResult.Maybe;
                    bool            open  = face.FaceProperties[FaceProperty.MouthOpen] == DetectionResult.Yes || face.FaceProperties[FaceProperty.MouthOpen] == DetectionResult.Maybe;
                    isTalking = moved || open; //is talking condition = mouse moved or open (yes or maybe)
                    if (isTalking)
                    {
                        break;
                    }
                }
            }
            if (!isTalking)
            {
                _frameStopTalking++;
            }
            else
            {
                _frameStopTalking = 0;
            }

            //if for for half of the frame rate nobody is talking -> stop recording
            if (_frameStopTalking > _framerate / 2f)
            {
                if (_audioSource.IsRecording())
                {
                    _audioSource.Stop();
                }
            }
        }
        /// <summary>
        /// Serializes a color frame.
        /// </summary>
        /// <param name="frame">The specified color frame.</param>
        /// <returns>A binary representation of the frame.</returns>
        public static byte[] Serialize(this ColorFrame frame)
        {
            if (_colorBitmap == null)
            {
                _colorWidth  = frame.FrameDescription.Width;
                _colorHeight = frame.FrameDescription.Height;
                _colorStride = _colorWidth * Constants.PIXEL_FORMAT.BitsPerPixel / 8;
                _colorPixels = new byte[_colorHeight * _colorWidth * ((PixelFormats.Bgr32.BitsPerPixel + 7) / 8)];
                _colorBitmap = new WriteableBitmap(_colorWidth, _colorHeight, Constants.DPI, Constants.DPI, Constants.PIXEL_FORMAT, null);
            }

            if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
            {
                frame.CopyRawFrameDataToArray(_colorPixels);
            }
            else
            {
                frame.CopyConvertedFrameDataToArray(_colorPixels, ColorImageFormat.Bgra);
            }

            _colorBitmap.WritePixels(new Int32Rect(0, 0, _colorWidth, _colorHeight), _colorPixels, _colorStride, 0);

            return(FrameSerializer.CreateBlob(_colorBitmap, Constants.CAPTURE_FILE_COLOR));
        }
Beispiel #15
0
        private void showColorFrame(ColorFrame colorFrame)
        {             
            //FrameDescription desc = colorFrame.FrameDescription;
                
                if (this.currentFrameDescription.Width == this.colorBitmap.PixelWidth && this.currentFrameDescription.Height == colorBitmap.PixelHeight)
                {
                    if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                    {
                        colorFrame.CopyRawFrameDataToArray(colorPixels);
                    }
                    else colorFrame.CopyConvertedFrameDataToArray(colorPixels, ColorImageFormat.Bgra);

                    colorPixels = this.alterBitmap(colorPixels, colorBitmap.PixelHeight, colorBitmap.PixelWidth);

                    colorBitmap.WritePixels(new Int32Rect(0, 0, this.currentFrameDescription.Width, this.currentFrameDescription.Height), colorPixels, this.currentFrameDescription.Width * bytePerPixel, 0);
                    if (colorRunCount == 1)
                    {
                        //this.saveImage("mercpics/mercpic.png", this.colorBitmap);
                        if (App.UserNumber.Length == 10)
                        {
                            this.SendTextWithImage();
                         //   this.connectPhoneCall();

                        }

                        this.closeColorCamera();
                      //  this.colorInstantImage = this.colorBitmap;
                    }




                }
                colorRunCount++;

            }
Beispiel #16
0
 private void Image_FrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
 {
     if (!_bProcessing)
     {
         try
         {
             _bProcessing = true;
             MultiSourceFrame frame = e.FrameReference.AcquireFrame();
             if (frame == null)
             {
                 return;
             }
             using (ColorFrame cframe = frame.ColorFrameReference.AcquireFrame())
             {
                 DrawColorImage(cframe);
             }
             using (InfraredFrame iframe = frame.InfraredFrameReference.AcquireFrame())
             {
                 DrawInfraredImage(iframe);
             }
             using (DepthFrame dframe = frame.DepthFrameReference.AcquireFrame())
             {
                 DrawDepthImage(dframe);
             }
         }
         catch
         {
         }
         finally
         {
             Refresh();
             Application.DoEvents();
             _bProcessing = false;
         }
     }
 }
Beispiel #17
0
        void saliencyEngine(object state)
        {
            lock (lockThis)
            {
                using (ColorFrame frame = colorRef.AcquireFrame())
                {
                    // It's possible that we skipped a frame or it is already gone
                    if (frame == null)
                    {
                        return;
                    }

                    size = Convert.ToUInt32(frame.FrameDescription.Height * frame.FrameDescription.Width * 4);
                    frame.CopyConvertedFrameDataToIntPtr(kinect.Camera.PinnedImageBuffer, size, ColorImageFormat.Bgra);

                    frame.Dispose();

                    colorRef = null;

                    UpdateVisualSaliency(vs, kinect.Camera.PinnedImageBuffer);
                    Spoint = GetSalientPoint(vs);

                    saliency[currentId % 10] = Spoint;
                    if (currentId == 10)
                    {
                        currentId = 0;
                    }
                    else
                    {
                        currentId++;
                    }

                    saliencySecondsTimer_Tick();
                }
            }
        }
        private ImageSource ToBitmap(ColorFrame frame)
        {
            int         width  = frame.FrameDescription.Width;
            int         height = frame.FrameDescription.Height;
            PixelFormat format = PixelFormats.Bgr32;

            //why bgra? it results in faster performance
            byte[] pixels = new byte[width * height * ((format.BitsPerPixel + 7) / 8)];

            //kinect uses bgra
            if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
            {
                frame.CopyRawFrameDataToArray(pixels);
            }
            else
            {
                //convert the image type to bgra before converting to array
                frame.CopyConvertedFrameDataToArray(pixels, ColorImageFormat.Bgra);
            }

            int stride = width * format.BitsPerPixel / 8;

            return(BitmapSource.Create(width, height, 96, 96, format, null, pixels, stride));
        }
Beispiel #19
0
        private void processColorFrame(ColorFrame colorFrame)
        {
            if (colorFrame != null)
            {
                Box dimensions = Box.With(colorFrame.FrameDescription.Width, colorFrame.FrameDescription.Height);
                frameResolutions[SourceType.COLOR]  = dimensions;
                framePixelFormats[SourceType.COLOR] = PixelFormats.Bgr32;

                if (colorPixels == null)
                {
                    colorPixels = new byte[dimensions.Area * 4];
                }

                if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                {
                    colorFrame.CopyRawFrameDataToArray(colorPixels);
                }
                else
                {
                    colorFrame.CopyConvertedFrameDataToArray(colorPixels, ColorImageFormat.Bgra);
                }
                colorFrame?.Dispose();
            }
        }
Beispiel #20
0
    private void CheckColorReader()
    {
        // Assumes AstraUnityContext.Instance.IsUpdateAsyncComplete is already true

        ReaderFrame frame;

        if (_readerColor.TryOpenFrame(0, out frame))
        {
            using (frame)
            {
                ColorFrame colorFrame = frame.GetFrame <ColorFrame>();

                if (colorFrame != null)
                {
                    if (_lastColorFrameIndex != colorFrame.FrameIndex)
                    {
                        _lastColorFrameIndex = colorFrame.FrameIndex;

                        NewColorFrameEvent.Invoke(colorFrame);
                    }
                }
            }
        }
    }
Beispiel #21
0
        void multiSourceFrameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            var multisourceFrame = e.FrameReference.AcquireFrame();

            if (multisourceFrame == null)
            {
                return;
            }

            using (ColorFrame colorFrame = multisourceFrame.ColorFrameReference.AcquireFrame())
            {
                HandleColorFrame(colorFrame);
            }

            using (DepthFrame depthFrame = multisourceFrame.DepthFrameReference.AcquireFrame())
            {
                HandleDepthFrame(depthFrame);
            }

            using (BodyFrame bodyFrame = multisourceFrame.BodyFrameReference.AcquireFrame())
            {
                HandleBodyFrame(bodyFrame);
            }
        }
Beispiel #22
0
 private void ColorFrameReader_FrameArrived(object sender, ColorFrameArrivedEventArgs e)
 {
     //Just update colorBitmap for display, no data processing required
     using (ColorFrame colorFrame = e.FrameReference.AcquireFrame())
     {
         if (colorFrame != null)
         {
             FrameDescription colorFrameDescription = colorFrame.FrameDescription;
             using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
             {
                 colorBitmap.Lock();
                 if ((colorFrameDescription.Width == colorBitmap.PixelWidth) && (colorFrameDescription.Height == colorBitmap.PixelHeight))
                 {
                     colorFrame.CopyConvertedFrameDataToIntPtr(
                         colorBitmap.BackBuffer,
                         (uint)(colorFrameDescription.Width * colorFrameDescription.Height * 4),
                         ColorImageFormat.Bgra);
                     colorBitmap.AddDirtyRect(new Int32Rect(0, 0, colorBitmap.PixelWidth, colorBitmap.PixelHeight));
                 }
                 colorBitmap.Unlock();
             }
         }
     }
 }
Beispiel #23
0
        private void ReaderOnMultiSourceFrameArrived(object sender,
                                                     MultiSourceFrameArrivedEventArgs multiSourceFrameArrivedEventArgs)
        {
            if (multiSourceFrameArrivedEventArgs == null)
            {
                return;
            }

            ColorFrame colorFrame = multiSourceFrameArrivedEventArgs.FrameReference.AcquireFrame().ColorFrameReference
                                    .AcquireFrame();
            InfraredFrame infraredFrame = multiSourceFrameArrivedEventArgs.FrameReference.AcquireFrame().InfraredFrameReference
                                          .AcquireFrame();
            DepthFrame depthFrame = multiSourceFrameArrivedEventArgs.FrameReference.AcquireFrame().DepthFrameReference
                                    .AcquireFrame();

            // Color
            if (colorFrame != null)
            {
                colorPreview.Source = GetBitmapSourceFromFrame(colorFrame);
                colorFrame.Dispose();
            }

            // Infrared
            if (infraredFrame != null)
            {
                infraredPreview.Source = GetBitmapSourceFromFrame(infraredFrame);
                infraredFrame.Dispose();
            }

            // Depth
            if (depthFrame != null)
            {
                depthPreview.Source = GetBitmapSourceFromFrame(depthFrame);
                depthFrame.Dispose();
            }
        }
        private void processColorFrame(ColorFrame colorFrame)
        {
            colorFrame.CopyConvertedFrameDataToIntPtr(this.bitmap.BackBuffer, this.bitmapBackBufferSize, ColorImageFormat.Bgra);

            // We're done with the ColorFrame
            colorFrame.Dispose();
            colorFrame = null;
        }
Beispiel #25
0
        private void ShowColorFrame(ColorFrame colorFrame)
        {
            bool colorFrameProcessed = false;

            if (colorFrame != null)
            {
                FrameDescription colorFrameDescription = colorFrame.FrameDescription;

                // verify data and write the new color frame data to the Writeable bitmap
                if ((colorFrameDescription.Width == this.bitmap.PixelWidth) && (colorFrameDescription.Height == this.bitmap.PixelHeight))
                {
                    if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                    {
                        colorFrame.CopyRawFrameDataToBuffer(this.bitmap.PixelBuffer);
                    }
                    else
                    {
                        colorFrame.CopyConvertedFrameDataToBuffer(this.bitmap.PixelBuffer, ColorImageFormat.Bgra);
                    }

                    colorFrameProcessed = true;
                }
            }

            if (colorFrameProcessed)
            {
                this.bitmap.Invalidate();
                FrameDisplayImage.Source = this.bitmap;
            }
        }
Beispiel #26
0
 protected override void OnColorFrameReceived(ColorFrame frame)
 {
     CaptureFrame();
 }
        /// <summary>
        /// Updates the bitmap with new frame data.
        /// </summary>
        /// <param name="frame">The specified Kinect color frame.</param>
        /// <param name="format">The specified color format.</param>
        public void Update(ColorFrame frame, ColorImageFormat format)
        {
            if (Bitmap == null)
            {
                _width = frame.FrameDescription.Width;
                _height = frame.FrameDescription.Height;
                _pixels = new byte[_width * _height * 4];
                Bitmap = new WriteableBitmap(_width, _height);
                _stream = Bitmap.PixelBuffer.AsStream();
            }

            if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
            {
                frame.CopyRawFrameDataToArray(_pixels);
            }
            else
            {
                frame.CopyConvertedFrameDataToArray(_pixels, format);
            }

            _stream.Seek(0, SeekOrigin.Begin);
            _stream.Write(_pixels, 0, _pixels.Length);

            Bitmap.Invalidate();
        }
Beispiel #28
0
        /// <summary>
        /// Update the Bitmap from the supplied <c>ColorFrame</c>.
        /// </summary>
        public void Update(ColorFrame frame)
        {
            if (frame != null)
            {
#if NETFX_CORE
                frame.CopyConvertedFrameDataToArray(_bytes, ColorImageFormat.Bgra);
                _stream.Seek(0, SeekOrigin.Begin);
                _stream.Write(_bytes, 0, _bytes.Length);
                _bitmap.Invalidate();
#else
                _bitmap.Lock();
                frame.CopyConvertedFrameDataToIntPtr(_bitmap.BackBuffer, (uint)_bytes.Length, ColorImageFormat.Bgra);
                _bitmap.AddDirtyRect(_dirtyRect);
                _bitmap.Unlock();
#endif
            }
        }
        private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            // All frame counter
            this.frameCount++;
            if (this.frameCount % this.framesToCapture != 0)
            {
                return;
            }

            ColorFrame     colorFrame     = null;
            DepthFrame     depthFrame     = null;
            BodyFrame      bodyFrame      = null;
            BodyIndexFrame bodyIndexFrame = null;
            Body           body           = null;
            SkeletonOfBody skel_up        = new SkeletonOfBody(Constants.SKEL_UP_TOTAL_JOINTS);

            try
            {
                var frameReference = e.FrameReference.AcquireFrame();

                colorFrame     = frameReference.ColorFrameReference.AcquireFrame();
                depthFrame     = frameReference.DepthFrameReference.AcquireFrame();
                bodyFrame      = frameReference.BodyFrameReference.AcquireFrame();
                bodyIndexFrame = frameReference.BodyIndexFrameReference.AcquireFrame();

                if (colorFrame == null || depthFrame == null || bodyFrame == null || bodyIndexFrame == null)
                {
                    return;
                }

                //--------------------------------------------
                // Get the color frame
                //--------------------------------------------
                using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                {
                    colorFrame.CopyConvertedFrameDataToArray(this.buffColor32, ColorImageFormat.Bgra);
                } //End ColorFrame

                //--------------------------------------------
                // Get the depth frame
                //--------------------------------------------
                using (KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                {
                    depthFrame.CopyFrameDataToArray(this.buffDepth16);
                    //depthFrame.CopyFrameDataToArray(this.buffDepth16Copy);

                    // Multiplication by 20 only to turn the depth visually more perceptible
                    //int i = 0;
                    //Array.ForEach(this.buffDepth16Copy, (x) => { this.buffDepth16Copy[i++] = (ushort)(x * 20); });
                } //End DepthFrame

                //--------------------------------------------
                // Get the body index frame
                //--------------------------------------------
                using (KinectBuffer bodyIndexBuffer = bodyIndexFrame.LockImageBuffer())
                {
                    bodyIndexFrame.CopyFrameDataToArray(this.buffBodyIndex8);
                }

                //--------------------------------------------
                // Get the body frame
                //--------------------------------------------
                bodyFrame.GetAndRefreshBodyData(this.listBodies);
                //bodyFrame.FloorClipPlane.

                //--------------------------------------------
                // Map the depth frame to it color frame
                //--------------------------------------------
                {
                    Array.Clear(this.buffColorSpacePoints, 0, this.buffColorSpacePoints.Length);
                    Array.Clear(this.buffMapDepthToColor32, 0, this.buffMapDepthToColor32.Length);

                    // Coordinate mapping
                    this.coordinateMapper.MapDepthFrameToColorSpace(this.buffDepth16, this.buffColorSpacePoints);

                    unsafe
                    {
                        fixed(ColorSpacePoint *depthMappedToColorPointsPointer = buffColorSpacePoints)
                        {
                            // Loop over each row and column of the color image
                            // Zero out any pixels that don't correspond to a body index
                            for (int idxDepth = 0; idxDepth < buffColorSpacePoints.Length; ++idxDepth)
                            {
                                float depthMappedToColorX = depthMappedToColorPointsPointer[idxDepth].X;
                                float depthMappedToColorY = depthMappedToColorPointsPointer[idxDepth].Y;

                                // The sentinel value is -inf, -inf, meaning that no depth pixel corresponds to this color pixel.
                                if (!float.IsNegativeInfinity(depthMappedToColorX) &&
                                    !float.IsNegativeInfinity(depthMappedToColorY))
                                {
                                    // Make sure the depth pixel maps to a valid point in color space
                                    int colorX = (int)(depthMappedToColorX + 0.5f);
                                    int colorY = (int)(depthMappedToColorY + 0.5f);

                                    // If the point is not valid, there is no body index there.
                                    if ((colorX >= 0) && (colorX < this.colorImageSize.Width) && (colorY >= 0) && (colorY < this.colorImageSize.Height))
                                    {
                                        int idxColor = (colorY * this.colorImageSize.Width) + colorX;

                                        // If we are tracking a body for the current pixel, save the depth data
                                        if (this.buffBodyIndex8[idxDepth] != 0xff)
                                        {
                                            this.buffMapDepthToColor32[idxDepth * 4]     = this.buffColor32[idxColor * 4];     // B
                                            this.buffMapDepthToColor32[idxDepth * 4 + 1] = this.buffColor32[idxColor * 4 + 1]; // G
                                            this.buffMapDepthToColor32[idxDepth * 4 + 2] = this.buffColor32[idxColor * 4 + 2]; // R
                                        }
                                    }
                                }
                            }
                        }
                    } //End Unsafe
                }     //End Mapping


                //--------------------------------------------
                // Process the face of the default body
                //--------------------------------------------

                // Variable to save the detected face paramenters
                this.faceData = new FaceData(new BoxFace(0, 0, 0, 0), new BoxFace(0, 0, 0, 0));

#if FACE_DETECTION
                // Get the default body
                // Body body = this.listBodies.Where(b => b.IsTracked).FirstOrDefault();
                if (this.faceFrameSource.IsActive)
                {
                    // In our experiment we get the closest body
                    body = Util.GetClosestBody(this.listBodies);

                    if (body != null && body.IsTracked)
                    {
                        // Get the first skeleton
                        skel_up = Util.GetSkeletonUpperBody(this.Mapper, body);

                        // Draw skeleton joints
                        if (this.drawingDepthMarkups)
                        {
                            Util.WriteSkeletonOverFrame(this, VisTypes.Depth, skel_up, 2, ref this.buffMapDepthToColor32);
                            //Util.WriteSkeletonOverFrame(this, VisTypes.Depth, skeleton, 2, ref this.buffDepth16);
                        }

                        // Assign a tracking ID to the face source
                        this.faceFrameSource.TrackingId = body.TrackingId;

                        if (this.faceFrameResults != null)
                        {
                            var boxColor = this.faceFrameResults.FaceBoundingBoxInColorSpace;
                            var boxDepth = this.faceFrameResults.FaceBoundingBoxInInfraredSpace;

                            // If there are face results, then save data
                            // We save in a format of rectangle [x, y, width, height]
                            this.faceData.boxColor = new BoxFace(boxColor.Left, boxColor.Top, (boxColor.Right - boxColor.Left), (boxColor.Bottom - boxColor.Top));
                            this.faceData.boxDepth = new BoxFace(boxDepth.Left, boxDepth.Top, (boxDepth.Right - boxDepth.Left), (boxDepth.Bottom - boxDepth.Top));

                            // Draw the face
                            if (this.drawingDepthMarkups)
                            {
                                Util.WriteFaceOverFrame(this, VisTypes.Depth, faceData.boxDepth, 1, ref this.buffMapDepthToColor32);
                                //Util.WriteFaceOverFrame(this, VisTypes.Depth, faceData.boxDepth, 1, ref this.buffDepth16);
                            } //End Drawing
                        }     //End FaceResult
                    }         //End Body
                }
#endif

                // Update the data handler
                this.frameHandler(
                    this.buffColor32,
                    this.buffDepth16,
                    this.buffBodyIndex8,
                    this.buffMapDepthToColor32,
                    this.listBodies,
                    this.faceData
                    );

                // Recording state ture
                byte[]       _colorData     = null;
                ushort[]     _depthData     = null;
                byte[]       _bodyIndexData = null;
                IList <Body> _bodies        = null;

                //--------------------------------------------
                // Record the data
                //--------------------------------------------

                if (this.stateOfRecording)
                {
                    // 25-09-15
                    // Discard frames where the hand is not corrected tracked (i.e., the hand has a zero coordinate)
                    // To discard failures in hand tracking
                    if (skel_up.jointDepthSpace[(int)JointUpType.HandLeft].X == 0 || skel_up.jointDepthSpace[(int)JointUpType.HandLeft].Y == 0 ||
                        skel_up.jointDepthSpace[(int)JointUpType.HandRight].X == 0 || skel_up.jointDepthSpace[(int)JointUpType.HandRight].Y == 0)
                    {
                        Console.WriteLine("Neglect frame {0}", this.recordedFrames);
                        return;
                    }

                    // Storage data;
                    _colorData     = new byte[this.buffColor32.Length];
                    _depthData     = new ushort[this.buffDepth16.Length];
                    _bodyIndexData = new byte[this.buffBodyIndex8.Length];
                    _bodies        = new Body[this.listBodies.Count];

                    colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra);
                    depthFrame.CopyFrameDataToArray(_depthData);
                    bodyIndexFrame.CopyFrameDataToArray(_bodyIndexData);
                    bodyFrame.GetAndRefreshBodyData(_bodies);

                    // Increase the counter
                    this.recordedFrames++;

                    this.dataContainer.AddColor        = _colorData;
                    this.dataContainer.AddDepth        = _depthData;
                    this.dataContainer.AddBodyIndex    = _bodyIndexData;
                    this.dataContainer.AddListOfBodies = _bodies;
                    this.dataContainer.AddFaceData     = this.faceData;


                    // If the user only require to save a fixed number of frames
                    if (this.fixedFrames == this.recordedFrames)
                    {
                        this.stateOfRecording = false;
                    }
                }


                // Notice:
                // Array.Copy() --> how many elements to copy
                // Buffer.BlockCopy --> how many of bytes to copy

                // Update Frame Rate
                UpdateGrabberFrameRate();
            }
            finally
            {
                if (this.frameCount > 100000000)
                {
                    this.frameCount = 0;
                }
                if (colorFrame != null)
                {
                    colorFrame.Dispose();
                }
                if (depthFrame != null)
                {
                    depthFrame.Dispose();
                }
                if (bodyFrame != null)
                {
                    bodyFrame.Dispose();
                }
                if (bodyIndexFrame != null)
                {
                    bodyIndexFrame.Dispose();
                }
            }
        }
Beispiel #30
0
        /// <summary>
        /// Removes the background of the specified frames and generates a new bitmap (green-screen effect).
        /// </summary>
        /// <param name="bodyIndexFrame">The specified <see cref="BodyIndexFrame"/>.</param>
        /// <param name="colorFrame">The specified <see cref="ColorFrame"/>.</param>
        /// <param name="depthFrame">The specified <see cref="DepthFrame"/>.</param>
        /// <returns>The bitmap representation of the generated frame.</returns>
        public static WriteableBitmap GreenScreen(this BodyIndexFrame bodyIndexFrame, ColorFrame colorFrame, DepthFrame depthFrame)
        {
            _greenScreenBitmapGenerator.Update(colorFrame, depthFrame, bodyIndexFrame);

            return _greenScreenBitmapGenerator.Bitmap;
        }
Beispiel #31
0
        private void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            if (!(KinectStreamerConfig.ProvideBodyData || KinectStreamerConfig.ProvideColorData || KinectStreamerConfig.ProvideDepthData))
            {
                return;
            }

            depthFrame = null;
            colorFrame = null;
            bodyFrame = null;

            multiSourceFrame = e.FrameReference.AcquireFrame();

            // If the Frame has expired by the time we process this event, return.
            if (multiSourceFrame == null)
            {
                return;
            }

            // We use a try/finally to ensure that we clean up before we exit the function.
            // This includes calling Dispose on any Frame objects that we may have and unlocking the bitmap back buffer.
            try
            {
                depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame();
                colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame();
                bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame();

                // If any frame has expired by the time we process this event, return.
                // The "finally" statement will Dispose any that are not null.
                if ((depthFrame == null) || (colorFrame == null) || (bodyFrame == null))
                {
                    return;
                }

                // Process color stream if needed

                if (KinectStreamerConfig.ProvideColorData)
                {
                    ProcessColorData();
                }

                // Process depth frame if needed

                if (KinectStreamerConfig.ProvideDepthData)
                {
                    ProcessDepthData();
                }

                // Process body data if needed
                if (KinectStreamerConfig.ProvideBodyData)
                {
                    ProcessBodyData();
                }

            }
            finally
            {
                if (depthFrame != null)
                {
                    depthFrame.Dispose();
                }
                if (colorFrame != null)
                {
                    colorFrame.Dispose();
                }
                if (bodyFrame != null)
                {
                    bodyFrame.Dispose();
                }
            }
        }
        /// <summary>
        /// Converts a depth frame to the corresponding System.Windows.Media.Imaging.BitmapSource and removes the background (green-screen effect).
        /// </summary>
        /// <param name="depthFrame">The specified depth frame.</param>
        /// <param name="colorFrame">The specified color frame.</param>
        /// <param name="bodyIndexFrame">The specified body index frame.</param>
        /// <returns>The corresponding System.Windows.Media.Imaging.BitmapSource representation of image.</returns>
        public WriteableBitmap GreenScreen(ColorFrame colorFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame)
        {
            int colorWidth = colorFrame.FrameDescription.Width;
            int colorHeight = colorFrame.FrameDescription.Height;

            int depthWidth = depthFrame.FrameDescription.Width;
            int depthHeight = depthFrame.FrameDescription.Height;

            int bodyIndexWidth = bodyIndexFrame.FrameDescription.Width;
            int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;

            if (_displayPixels == null)
            {
                _depthData = new ushort[depthWidth * depthHeight];
                _bodyData = new byte[depthWidth * depthHeight];
                _colorData = new byte[colorWidth * colorHeight * Constants.BYTES_PER_PIXEL];
                _displayPixels = new byte[depthWidth * depthHeight * Constants.BYTES_PER_PIXEL];
                _colorPoints = new ColorSpacePoint[depthWidth * depthHeight];
                _bitmap = new WriteableBitmap(depthWidth, depthHeight);
                _stream = _bitmap.PixelBuffer.AsStream();
            }

            if (((depthWidth * depthHeight) == _depthData.Length) && ((colorWidth * colorHeight * Constants.BYTES_PER_PIXEL) == _colorData.Length) && ((bodyIndexWidth * bodyIndexHeight) == _bodyData.Length))
            {
                depthFrame.CopyFrameDataToArray(_depthData);

                if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                {
                    colorFrame.CopyRawFrameDataToArray(_colorData);
                }
                else
                {
                    colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra);
                }

                bodyIndexFrame.CopyFrameDataToArray(_bodyData);

                CoordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);

                Array.Clear(_displayPixels, 0, _displayPixels.Length);

                for (int y = 0; y < depthHeight; ++y)
                {
                    for (int x = 0; x < depthWidth; ++x)
                    {
                        int depthIndex = (y * depthWidth) + x;

                        byte player = _bodyData[depthIndex];

                        if (player != 0xff)
                        {
                            ColorSpacePoint colorPoint = _colorPoints[depthIndex];

                            int colorX = (int)Math.Floor(colorPoint.X + 0.5);
                            int colorY = (int)Math.Floor(colorPoint.Y + 0.5);

                            if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                            {
                                int colorIndex = ((colorY * colorWidth) + colorX) * Constants.BYTES_PER_PIXEL;
                                int displayIndex = depthIndex * Constants.BYTES_PER_PIXEL;

                                _displayPixels[displayIndex + 0] = _colorData[colorIndex];
                                _displayPixels[displayIndex + 1] = _colorData[colorIndex + 1];
                                _displayPixels[displayIndex + 2] = _colorData[colorIndex + 2];
                                _displayPixels[displayIndex + 3] = 0xff;
                            }
                        }
                    }
                }

                _stream.Seek(0, SeekOrigin.Begin);
                _stream.Write(_displayPixels, 0, _displayPixels.Length);

                _bitmap.Invalidate();
            }

            return _bitmap;
        }
        private void ProcessFrames(DepthFrame depthFrame, ColorFrame colorFrame, BodyIndexFrame bodyIndexFrame, BodyFrame bodyFrame)
        {



            FrameDescription depthFrameDescription = depthFrame.FrameDescription;
            FrameDescription colorFrameDescription = colorFrame.FrameDescription;
            FrameDescription bodyIndexFrameDescription = bodyIndexFrame.FrameDescription;




            int bodyIndexWidth = bodyIndexFrameDescription.Width;
            int bodyIndexHeight = bodyIndexFrameDescription.Height;


            // The ImageModel object is used to transfer Kinect data into the DataFlow rotunies. 
            ImageModel imageModel = new ImageModel()
            {
                DepthWidth = depthFrameDescription.Width,
                DepthHeight = depthFrameDescription.Height,
                ColorWidth = colorFrameDescription.Width,
                ColorHeight = colorFrameDescription.Height,
                ShowTrails = _vm.LeaveTrails,
                PersonFill = _vm.PersonFill,
                MaxDistance = _vm.BackgroundDistance
            };
            imageModel.ColorFrameData = new byte[imageModel.ColorWidth * imageModel.ColorHeight * this.bytesPerPixel];

            imageModel.DisplayPixels = new byte[_PreviousFrameDisplayPixels.Length];
            imageModel.BodyIndexFrameData = new byte[imageModel.DepthWidth * imageModel.DepthHeight];
            imageModel.ColorPoints = new ColorSpacePoint[imageModel.DepthWidth * imageModel.DepthHeight];
            imageModel.BytesPerPixel = bytesPerPixel;
            imageModel.Bodies = new Body[this.kinectSensor.BodyFrameSource.BodyCount];
            bodyFrame.GetAndRefreshBodyData(imageModel.Bodies);
            imageModel.DepthData = new ushort[imageModel.DepthWidth * imageModel.DepthHeight];
            
            depthFrame.CopyFrameDataToArray(imageModel.DepthData);
            depthFrame.CopyFrameDataToArray(this.DepthFrameData);
            
            if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
            {
                colorFrame.CopyRawFrameDataToArray(imageModel.ColorFrameData);
            }
            else
            {
                colorFrame.CopyConvertedFrameDataToArray(imageModel.ColorFrameData, ColorImageFormat.Bgra);
            }
            imageModel.PixelFormat = PixelFormats.Bgra32;



            _ColorBitmap.WritePixels(new Int32Rect(0, 0, imageModel.ColorWidth, imageModel.ColorHeight),
                                          imageModel.ColorFrameData,
                                          imageModel.ColorWidth * imageModel.BytesPerPixel,
                                          0);


            //RenderTargetBitmap renderBitmap = new RenderTargetBitmap((int)CompositeImage.ActualWidth, (int)CompositeImage.ActualHeight, 96.0, 96.0, PixelFormats.Pbgra32);
            //DrawingVisual dv = new DrawingVisual();
            //VisualBrush brush = new VisualBrush(CompositeImage);

            //foreach(Body body in _bodies)
            //{
            //    if (body.IsTracked)
            //    {
            //        Joint joint = body.Joints[JointType.HandRight];
            //        using (DrawingContext dc = dv.RenderOpen())
            //        {

            //            dc.DrawRectangle(brush, null, new Rect(new Point(), new Size(CompositeImage.ActualWidth, CompositeImage.ActualHeight)));
            //            ImageBrush brush2 = new ImageBrush(_pointerBitmap);
            //            brush2.Opacity = 1.0;
            //            dc.DrawRectangle(brush2, null, new Rect(new Point(0, CompositeImage.ActualHeight - _Overlay.Height), new Size(_pointerBitmap.Width, _pointerBitmap.Height)));
            //        }
            //    }
            //}

            //ConvertIRDataToByte();






            ImagePreview.Source = _ColorBitmap;


            bodyIndexFrame.CopyFrameDataToArray(imageModel.BodyIndexFrameData);

            this.coordinateMapper.MapDepthFrameToColorSpace(DepthFrameData, imageModel.ColorPoints);

            if (_vm.LeaveTrails)
            {
                Array.Copy(this._PreviousFrameDisplayPixels, imageModel.DisplayPixels, this._PreviousFrameDisplayPixels.Length);
            }


            try
            {
                //Send the imageModel to the DataFlow transformer
                _ImageTransformer.Post(imageModel);
            }
            catch (Exception ex)
            {
#if DEBUG
                Console.WriteLine(ex);
#endif
            }


        }
        //Reads in the colorFrame
        private void ShowColorFrame(ColorFrame colorFrame)
        {
            if (colorFrame != null)
            {
                FrameDescription colorFrameDescription =
                    colorFrame.FrameDescription;
                using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                {
                    this.colorBitmap.Lock();

                    // verify data and write the new color frame data to the display bitmap
                    if ((colorFrameDescription.Width == this.colorBitmap.PixelWidth) && (colorFrameDescription.Height == this.colorBitmap.PixelHeight))
                    {
                        colorFrame.CopyConvertedFrameDataToIntPtr(
                            this.colorBitmap.BackBuffer,
                            (uint)(colorFrameDescription.Width * colorFrameDescription.Height * 4),
                            ColorImageFormat.Bgra);

                        this.colorBitmap.AddDirtyRect(new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight));
                    }

                    this.colorBitmap.Unlock();
                }
            }
        }
        public void RecordFrame(ColorFrame frame)
        {
            if (!_isStarted)
                throw new InvalidOperationException("Cannot record frames unless the KinectRecorder is started.");

            if (frame != null)
            {
                _recordQueue.Enqueue(new RPColorFrame(frame));
                System.Diagnostics.Debug.WriteLine("+++ Enqueued Color Frame ({0})", _recordQueue.Count);
            }
            else
            {
                System.Diagnostics.Debug.WriteLine("!!! FRAME SKIPPED (Color in KinectRecorder)");
            }
        }
        private void ProcessColorFrame(ColorFrame colorFrame)
        {
            if ( (null == colorFrame)) return;

            FrameDescription colorFrameDescription = colorFrame.FrameDescription;

                    using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                    {
                        this.m_colorBitmap.Lock();

                        // verify data and write the new color frame data to the display bitmap
                        if ((colorFrameDescription.Width == this.m_colorBitmap.PixelWidth) && (colorFrameDescription.Height == this.m_colorBitmap.PixelHeight))
                        {
                            colorFrame.CopyConvertedFrameDataToIntPtr(
                                this.m_colorBitmap.BackBuffer,
                                (uint)(colorFrameDescription.Width * colorFrameDescription.Height * 4),
                                ColorImageFormat.Bgra);

                        this.m_colorBitmap.AddDirtyRect(new Int32Rect(0, 0, this.m_colorBitmap.PixelWidth, this.m_colorBitmap.PixelHeight));
                        //this.m_colorBitmap.AddDirtyRect(new Int32Rect(m_drawingRegion.Left, m_drawingRegion.Top, m_drawingRegion.Right, m_drawingRegion.Bottom));
                    }

                        this.m_colorBitmap.Unlock();

                    }
        }
        private ImageSource ToBitmap(ColorFrame frame)
        {
            int width = frame.FrameDescription.Width;
            int height = frame.FrameDescription.Height;
            PixelFormat format = PixelFormats.Bgr32;

            byte[] pixels = new byte[width * height * ((PixelFormats.Bgr32.BitsPerPixel + 7) / 8)];

            if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
            {
                frame.CopyRawFrameDataToArray(pixels);
            }
            else
            {
                frame.CopyConvertedFrameDataToArray(pixels, ColorImageFormat.Bgra);
            }

            int stride = width * format.BitsPerPixel / 8;

            return BitmapSource.Create(width, height, 96, 96, PixelFormats.Bgr32, null, pixels, stride);
        }
        /// <summary>
        /// Converts a depth frame to the corresponding System.Windows.Media.Imaging.BitmapSource and removes the background (green-screen effect).
        /// </summary>
        /// <param name="depthFrame">The specified depth frame.</param>
        /// <param name="colorFrame">The specified color frame.</param>
        /// <param name="bodyIndexFrame">The specified body index frame.</param>
        /// <returns>The corresponding System.Windows.Media.Imaging.BitmapSource representation of image.</returns>
        public Bitmap GreenScreen(ColorFrame colorFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame)
        {
            int colorWidth = colorFrame.FrameDescription.Width;
            int colorHeight = colorFrame.FrameDescription.Height;

            int depthWidth = depthFrame.FrameDescription.Width;
            int depthHeight = depthFrame.FrameDescription.Height;

            int bodyIndexWidth = bodyIndexFrame.FrameDescription.Width;
            int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;

            if (_displayPixels == null)
            {
                _depthData = new ushort[depthWidth * depthHeight];
                _bodyData = new byte[depthWidth * depthHeight];
                _colorData = new byte[colorWidth * colorHeight * Constants.BYTES_PER_PIXEL];
                _displayPixels = new byte[depthWidth * depthHeight * Constants.BYTES_PER_PIXEL];
                _colorPoints = new ColorSpacePoint[depthWidth * depthHeight];
                _bitmap = new Bitmap(depthWidth, depthHeight, Constants.FORMAT);
            }

            if (((depthWidth * depthHeight) == _depthData.Length) && ((colorWidth * colorHeight * Constants.BYTES_PER_PIXEL) == _colorData.Length) && ((bodyIndexWidth * bodyIndexHeight) == _bodyData.Length))
            {
                depthFrame.CopyFrameDataToArray(_depthData);

                if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                {
                    colorFrame.CopyRawFrameDataToArray(_colorData);
                }
                else
                {
                    colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra);
                }

                bodyIndexFrame.CopyFrameDataToArray(_bodyData);

                CoordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);

                Array.Clear(_displayPixels, 0, _displayPixels.Length);

                for (int y = 0; y < depthHeight; ++y)
                {
                    for (int x = 0; x < depthWidth; ++x)
                    {
                        int depthIndex = (y * depthWidth) + x;

                        byte player = _bodyData[depthIndex];

                        if (player != 0xff)
                        {
                            ColorSpacePoint colorPoint = _colorPoints[depthIndex];

                            int colorX = (int)Math.Floor(colorPoint.X + 0.5);
                            int colorY = (int)Math.Floor(colorPoint.Y + 0.5);

                            if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                            {
                                int colorIndex = ((colorY * colorWidth) + colorX) * Constants.BYTES_PER_PIXEL;
                                int displayIndex = depthIndex * Constants.BYTES_PER_PIXEL;

                                _displayPixels[displayIndex + 0] = _colorData[colorIndex];
                                _displayPixels[displayIndex + 1] = _colorData[colorIndex + 1];
                                _displayPixels[displayIndex + 2] = _colorData[colorIndex + 2];
                                _displayPixels[displayIndex + 3] = 0xff;
                            }
                        }
                    }
                }

                BitmapData bitmapData = _bitmap.LockBits(new Rectangle(0, 0, depthWidth, depthHeight), ImageLockMode.ReadWrite, _bitmap.PixelFormat);
                Marshal.Copy(_displayPixels, 0, bitmapData.Scan0, _displayPixels.Length);

                _bitmap.UnlockBits(bitmapData);
            }

            return _bitmap;
        }
Beispiel #39
0
        private void Reader_MultiSourceFrameArrived(MultiSourceFrameReader sender, MultiSourceFrameArrivedEventArgs e)
        {
            MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame();

            // If the Frame has expired by the time we process this event, return.
            if (multiSourceFrame == null)
            {
                return;
            }
            DepthFrame     depthFrame           = null;
            ColorFrame     colorFrame           = null;
            InfraredFrame  infraredFrame        = null;
            BodyFrame      bodyFrame            = null;
            BodyIndexFrame bodyIndexFrame       = null;
            IBuffer        depthFrameDataBuffer = null;
            IBuffer        bodyIndexFrameData   = null;
            // Com interface for unsafe byte manipulation
            IBufferByteAccess bufferByteAccess = null;

            switch (CurrentDisplayFrameType)
            {
            case DisplayFrameType.Infrared:
                using (infraredFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame())
                {
                    ShowInfraredFrame(infraredFrame);
                }
                break;

            case DisplayFrameType.Color:
                using (colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame())
                {
                    ShowColorFrame(colorFrame);
                }
                break;

            case DisplayFrameType.Depth:
                using (depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame())
                {
                    ShowDepthFrame(depthFrame);
                }
                break;

            case DisplayFrameType.BodyMask:
                // Put in a try catch to utilise finally() and clean up frames
                try
                {
                    depthFrame     = multiSourceFrame.DepthFrameReference.AcquireFrame();
                    bodyIndexFrame = multiSourceFrame.BodyIndexFrameReference.AcquireFrame();
                    colorFrame     = multiSourceFrame.ColorFrameReference.AcquireFrame();
                    if ((depthFrame == null) || (colorFrame == null) || (bodyIndexFrame == null))
                    {
                        return;
                    }

                    // Access the depth frame data directly via LockImageBuffer to avoid making a copy
                    depthFrameDataBuffer = depthFrame.LockImageBuffer();
                    this.coordinateMapper.MapColorFrameToDepthSpaceUsingIBuffer(depthFrameDataBuffer, this.colorMappedToDepthPoints);
                    // Process Color
                    colorFrame.CopyConvertedFrameDataToBuffer(this.bitmap.PixelBuffer, ColorImageFormat.Bgra);
                    // Access the body index frame data directly via LockImageBuffer to avoid making a copy
                    bodyIndexFrameData = bodyIndexFrame.LockImageBuffer();
                    ShowMappedBodyFrame(depthFrame.FrameDescription.Width, depthFrame.FrameDescription.Height, bodyIndexFrameData, bufferByteAccess);
                }
                finally
                {
                    if (depthFrame != null)
                    {
                        depthFrame.Dispose();
                    }
                    if (colorFrame != null)
                    {
                        colorFrame.Dispose();
                    }
                    if (bodyIndexFrame != null)
                    {
                        bodyIndexFrame.Dispose();
                    }

                    if (depthFrameDataBuffer != null)
                    {
                        // We must force a release of the IBuffer in order to ensure that we have dropped all references to it.
                        System.Runtime.InteropServices.Marshal.ReleaseComObject(depthFrameDataBuffer);
                    }
                    if (bodyIndexFrameData != null)
                    {
                        System.Runtime.InteropServices.Marshal.ReleaseComObject(bodyIndexFrameData);
                    }
                    if (bufferByteAccess != null)
                    {
                        System.Runtime.InteropServices.Marshal.ReleaseComObject(bufferByteAccess);
                    }
                }
                break;

            case DisplayFrameType.BodyJoints:
                using (bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame())
                {
                    ShowBodyJoints(bodyFrame);
                }
                break;

            case DisplayFrameType.BackgroundRemoved:
                // Put in a try catch to utilise finally() and clean up frames
                try
                {
                    depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame();
                    colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame();
                    if ((depthFrame == null) || (colorFrame == null))
                    {
                        return;
                    }
                    depthFrame.CopyFrameDataToArray(depthFrameData);
                    this.coordinateMapper.MapColorFrameToDepthSpace(depthFrameData, this.colorMappedToDepthPoints);
                    // Process Color.
                    colorFrame.CopyConvertedFrameDataToBuffer(this.bitmap.PixelBuffer, ColorImageFormat.Bgra);

                    ShowMappedColorBackgroundRemoved(colorMappedToDepthPoints, depthFrameData, depthFrame.FrameDescription);
                }
                finally
                {
                    if (depthFrame != null)
                    {
                        depthFrame.Dispose();
                    }
                    if (colorFrame != null)
                    {
                        colorFrame.Dispose();
                    }
                }
                break;

            default:
                break;
            }
        }
Beispiel #40
0
    private void FrameReady(object sender, FrameReadyEventArgs e)
    {
        frameReadyTime.Start();

        //Debug.Log("FrameReady " + _frameCount);
        DepthFrame depthFrame = e.Frame.GetFrame <DepthFrame>();

        if (depthFrame != null)
        {
            if (_lastDepthFrameIndex != depthFrame.FrameIndex)
            {
                _lastDepthFrameIndex = depthFrame.FrameIndex;

                NewDepthFrameEvent.Invoke(depthFrame);
            }
        }

        ColorFrame colorFrame = e.Frame.GetFrame <ColorFrame>();

        if (colorFrame != null)
        {
            if (_lastColorFrameIndex != colorFrame.FrameIndex)
            {
                _lastColorFrameIndex = colorFrame.FrameIndex;

                NewColorFrameEvent.Invoke(colorFrame);
            }
        }

        BodyFrame bodyFrame = e.Frame.GetFrame <BodyFrame>();

        if (bodyFrame != null)
        {
            if (_lastBodyFrameIndex != bodyFrame.FrameIndex)
            {
                _lastBodyFrameIndex = bodyFrame.FrameIndex;

                NewBodyFrameEvent.Invoke(_bodyStream, bodyFrame);
                NewBodyMaskEvent.Invoke(bodyFrame.BodyMask);
            }
        }

        MaskedColorFrame maskedColorFrame = e.Frame.GetFrame <MaskedColorFrame>();

        if (maskedColorFrame != null)
        {
            if (_lastMaskedColorFrameIndex != maskedColorFrame.FrameIndex)
            {
                _lastMaskedColorFrameIndex = maskedColorFrame.FrameIndex;

                NewMaskedColorFrameEvent.Invoke(maskedColorFrame);
            }
        }

        ColorizedBodyFrame colorizedBodyFrame = e.Frame.GetFrame <ColorizedBodyFrame>();

        if (colorizedBodyFrame != null)
        {
            if (_lastColorizedBodyFrameIndex != colorizedBodyFrame.FrameIndex)
            {
                _lastColorizedBodyFrameIndex = colorizedBodyFrame.FrameIndex;

                NewColorizedBodyFrameEvent.Invoke(colorizedBodyFrame);
            }
        }

        _frameCount++;
        _frameReadyDirty = true;
        frameReadyTime.Stop();
    }
    private void StartColorStream() {
      // Get frame description for the infr output
      var description = Sensor.ColorFrameSource.FrameDescription;

      // Init infr buffer
      ColorFrame frame = Color = new ColorFrame();
      frame.Width = description.Width;
      frame.Height = description.Height;
      frame.Pixels = new byte[description.LengthInPixels * 4];
      frame.Stamp = new Timestamp();
      frame.Fps = 15;

      AddOnManager.GetInstance().InitFrame(Name, frame);
      Log(frame.ToString());

      // Start Watch
      ColorWatch = new StopwatchAvg();
    }
Beispiel #42
0
 static unsafe void ColorFrameReady(ColorFrame frame)
 {
     if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
     {
         frame.CopyRawFrameDataToArray(sensorColorFrameData);
     }
     else
     {
         frame.CopyConvertedFrameDataToArray(sensorColorFrameData, ColorImageFormat.Bgra);
     }
 }
Beispiel #43
0
        /// <summary>
        /// Converts a depth frame to the corresponding System.Windows.Media.Imaging.BitmapSource and removes the background (green-screen effect).
        /// </summary>
        /// <param name="depthFrame">The specified depth frame.</param>
        /// <param name="colorFrame">The specified color frame.</param>
        /// <param name="bodyIndexFrame">The specified body index frame.</param>
        /// <returns>The corresponding System.Windows.Media.Imaging.BitmapSource representation of image.</returns>
        public Bitmap GreenScreen(ColorFrame colorFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame)
        {
            int colorWidth  = colorFrame.FrameDescription.Width;
            int colorHeight = colorFrame.FrameDescription.Height;

            int depthWidth  = depthFrame.FrameDescription.Width;
            int depthHeight = depthFrame.FrameDescription.Height;

            int bodyIndexWidth  = bodyIndexFrame.FrameDescription.Width;
            int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;

            if (_displayPixels == null)
            {
                _depthData     = new ushort[depthWidth * depthHeight];
                _bodyData      = new byte[depthWidth * depthHeight];
                _colorData     = new byte[colorWidth * colorHeight * Constants.BYTES_PER_PIXEL];
                _displayPixels = new byte[depthWidth * depthHeight * Constants.BYTES_PER_PIXEL];
                _colorPoints   = new ColorSpacePoint[depthWidth * depthHeight];
                _bitmap        = new Bitmap(depthWidth, depthHeight, Constants.FORMAT);
            }

            if (((depthWidth * depthHeight) == _depthData.Length) && ((colorWidth * colorHeight * Constants.BYTES_PER_PIXEL) == _colorData.Length) && ((bodyIndexWidth * bodyIndexHeight) == _bodyData.Length))
            {
                depthFrame.CopyFrameDataToArray(_depthData);

                if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                {
                    colorFrame.CopyRawFrameDataToArray(_colorData);
                }
                else
                {
                    colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra);
                }

                bodyIndexFrame.CopyFrameDataToArray(_bodyData);

                CoordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);

                Array.Clear(_displayPixels, 0, _displayPixels.Length);

                for (int y = 0; y < depthHeight; ++y)
                {
                    for (int x = 0; x < depthWidth; ++x)
                    {
                        int depthIndex = (y * depthWidth) + x;

                        byte player = _bodyData[depthIndex];

                        if (player != 0xff)
                        {
                            ColorSpacePoint colorPoint = _colorPoints[depthIndex];

                            int colorX = (int)Math.Floor(colorPoint.X + 0.5);
                            int colorY = (int)Math.Floor(colorPoint.Y + 0.5);

                            if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                            {
                                int colorIndex   = ((colorY * colorWidth) + colorX) * Constants.BYTES_PER_PIXEL;
                                int displayIndex = depthIndex * Constants.BYTES_PER_PIXEL;

                                _displayPixels[displayIndex + 0] = _colorData[colorIndex];
                                _displayPixels[displayIndex + 1] = _colorData[colorIndex + 1];
                                _displayPixels[displayIndex + 2] = _colorData[colorIndex + 2];
                                _displayPixels[displayIndex + 3] = 0xff;
                            }
                        }
                    }
                }

                BitmapData bitmapData = _bitmap.LockBits(new Rectangle(0, 0, depthWidth, depthHeight), ImageLockMode.ReadWrite, _bitmap.PixelFormat);
                Marshal.Copy(_displayPixels, 0, bitmapData.Scan0, _displayPixels.Length);

                _bitmap.UnlockBits(bitmapData);
            }

            return(_bitmap);
        }
        private void OnFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            double           utcTime          = (DateTime.UtcNow - new DateTime(1970, 1, 1)).TotalSeconds;
            MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame();

            using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame())
            {
                using (DepthFrame depthFrame = multiSourceFrame.DepthFrameReference.AcquireFrame())
                {
                    if (colorFrame != null)
                    {
                        colorFrame.CopyConvertedFrameDataToArray(this.colorArray, ColorImageFormat.Bgra);
                        // System.Buffer.BlockCopy(this.colorArray, 0, this.byteColorArray,0,(this.kinect.ColorFrameSource.FrameDescription.Height * this.kinect.ColorFrameSource.FrameDescription.Width * BYTES_PER_COLOR_PIXEL));
                        // System.Buffer.BlockCopy(BitConverter.GetBytes(utcTime), 0, this.byteColorArray, (this.kinect.ColorFrameSource.FrameDescription.Height * this.kinect.ColorFrameSource.FrameDescription.Width * BYTES_PER_COLOR_PIXEL), sizeof(double));
                        // this.colorConnector.Broadcast(this.byteColorArray);

                        FrameDescription colorFrameDescription = colorFrame.FrameDescription;

                        using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                        {
                            this.colorBitmap.Lock();

                            // verify data and write the new color frame data to the display bitmap
                            if ((colorFrameDescription.Width == this.colorBitmap.PixelWidth) && (colorFrameDescription.Height == this.colorBitmap.PixelHeight))
                            {
                                colorFrame.CopyConvertedFrameDataToIntPtr(
                                    this.colorBitmap.BackBuffer,
                                    (uint)(colorFrameDescription.Width * colorFrameDescription.Height * 4),
                                    ColorImageFormat.Bgra);

                                this.colorBitmap.AddDirtyRect(new Int32Rect(0, 0, this.colorBitmap.PixelWidth, this.colorBitmap.PixelHeight));
                            }

                            this.colorBitmap.Unlock();
                        }


                        IplImage imgSrc  = this.colorBitmap.ToIplImage();             // Source, BGR image.
                        IplImage imgGray = new IplImage(imgSrc.Size, BitDepth.U8, 1); // Binary image that has the blobs.
                        IplImage imghsv  = new IplImage(imgSrc.Size, BitDepth.U8, 3); // HSV image for thresholding.


                        Cv.CvtColor(imgSrc, imghsv, ColorConversion.BgrToHsv);


                        Cv.InRange(imghsv, lower, upper, imgGray);
                        CvBlobs blobs = new CvBlobs();
                        blobs.Label(imgGray);
                        int min_area = 1500;
                        foreach (KeyValuePair <int, CvBlob> item in blobs)
                        {
                            int    label = item.Key;
                            CvBlob blob  = item.Value;
                            blob.CalcCentroid();
                            int val = blob.Area;
                            if (val > min_area)
                            {
                                min_area = val;
                                float x = (float)blob.Centroid.X;
                                float y = (float)blob.Centroid.Y;
                                target_x = (int)x;
                                target_y = (int)y;
                            }


                            //   Debug.Print( "Coordinates" + (blob.Centroid.ToString()) +"Area" + val.ToString());
                        }

                        /*
                         * IplImage render = new IplImage(imgSrc.Size, BitDepth.U8, 3);
                         * blobs.RenderBlobs(imgSrc, render);
                         *
                         * using (new CvWindow("Orange Blob Detection", WindowMode.AutoSize, render))
                         * {
                         *   CvWindow.WaitKey(0);
                         * }
                         */
                    }

                    if (depthFrame != null)
                    {
                        // Debug.Print("HELLO");

                        depthFrame.CopyFrameDataToArray(this.depthArray);
                        System.Buffer.BlockCopy(this.depthArray, 0, this.byteDepthArray, 0, this.kinect.DepthFrameSource.FrameDescription.Height * this.kinect.DepthFrameSource.FrameDescription.Width * BYTES_PER_DEPTH_PIXEL);
                        System.Buffer.BlockCopy(BitConverter.GetBytes(utcTime), 0, this.byteDepthArray, this.kinect.DepthFrameSource.FrameDescription.Height * this.kinect.DepthFrameSource.FrameDescription.Width * BYTES_PER_DEPTH_PIXEL, sizeof(double));
                        // Console.WriteLine("depth "+ this.byteDepthArray.Length);
                        calculateScanFromDepth(this.depthArray);
                        System.Buffer.BlockCopy(this.scan2DArray, 0, this.byteScan2DArray, 0, 6 * this.kinect.DepthFrameSource.FrameDescription.Width * 4 + 12);
                        // System.Buffer.BlockCopy(BitConverter.GetBytes(utcTime), 0, this.byteScan2DArray, this.kinect.DepthFrameSource.FrameDescription.Height * this.kinect.DepthFrameSource.FrameDescription.Width * 4, sizeof(double));

                        this.scan2DConnector.Broadcast(this.byteScan2DArray);
                        this.depthConnector.Broadcast(this.byteDepthArray);
                    }
                }
            }



            /*using (InfraredFrame irFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame())
             * {
             *  if (irFrame != null)
             *  {
             *      irFrame.CopyFrameDataToArray(this.irArray);
             *      System.Buffer.BlockCopy(this.irArray, 0, this.byteIRArray, 0, this.kinect.InfraredFrameSource.FrameDescription.Height * this.kinect.InfraredFrameSource.FrameDescription.Width * BYTES_PER_IR_PIXEL);
             *      System.Buffer.BlockCopy(BitConverter.GetBytes(utcTime), 0, this.byteIRArray, this.kinect.InfraredFrameSource.FrameDescription.Height * this.kinect.InfraredFrameSource.FrameDescription.Width * BYTES_PER_IR_PIXEL, sizeof(double));
             *      this.irConnector.Broadcast(this.byteIRArray);
             *  }
             * }
             *
             * using (BodyFrame bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame())
             * {
             *  if (bodyFrame != null)
             *  {
             *      bodyFrame.GetAndRefreshBodyData(this.bodyArray);
             *      string jsonString = JsonConvert.SerializeObject(this.bodyArray);
             *      int diff = 28000 - jsonString.Length;
             *      for (int i = 0; i < diff;i++ )
             *      {
             *          jsonString += " ";
             *      }
             *      byte[] bodyByteArray = new byte[jsonString.Length*sizeof(char) + sizeof(double)];
             *      System.Buffer.BlockCopy(jsonString.ToCharArray(), 0, bodyByteArray, 0, jsonString.Length * sizeof(char));
             *      System.Buffer.BlockCopy(BitConverter.GetBytes(utcTime), 0, bodyByteArray, jsonString.Length * sizeof(char),sizeof(double));
             *      this.bodyConnector.Broadcast(bodyByteArray);
             *  }
             * }*/
        }
Beispiel #45
0
        private void MultiSourceFrameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            bool             dataReceived     = false;
            MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame();

            using (ColorFrame colorFrame = multiSourceFrame.ColorFrameReference.AcquireFrame())
            {
                if (colorFrame != null)
                {
                    using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                    {
                        colorBitmap.Lock();

                        if ((colorFrameDescription.Width == colorBitmap.PixelWidth) &&
                            (colorFrameDescription.Height == colorBitmap.PixelHeight))
                        {
                            colorFrame.CopyConvertedFrameDataToIntPtr(
                                colorBitmap.BackBuffer,
                                (uint)(colorFrameDescription.Width *
                                       colorFrameDescription.Height * 4),
                                ColorImageFormat.Bgra);
                            colorBitmap.AddDirtyRect(new Int32Rect(0, 0, colorBitmap.PixelWidth,
                                                                   colorBitmap.PixelHeight));
                        }
                        colorBitmap.Unlock();
                    }
                }
            }
            using (BodyFrame bodyFrame = multiSourceFrame.BodyFrameReference.AcquireFrame())
            {
                if (bodyFrame != null)
                {
                    if (bodies == null)
                    {
                        bodies = new Body[bodyFrame.BodyCount];
                    }
                    bodyFrame.GetAndRefreshBodyData(bodies);
                    dataReceived = true;
                }
            }
            if (dataReceived)
            {
                canvas.Children.Clear();

                //Add this line
                int colorIndex = 0;

                foreach (Body body in bodies.Where(b => b.IsTracked))
                {
                    //Add the following lines
                    SolidColorBrush colorBrush = bodyColors[colorIndex++];
                    Dictionary <JointType, Point> jointColorPoints = new Dictionary <JointType, Point>();

                    foreach (var joint in body.Joints)
                    {
                        CameraSpacePoint position = joint.Value.Position;
                        if (position.Z < 0)
                        {
                            position.Z = 0.1f;
                        }
                        ColorSpacePoint colorSpacePoint = coordinateMapper.MapCameraPointToColorSpace
                                                              (position);

                        //Add this line
                        jointColorPoints[joint.Key] = new Point(colorSpacePoint.X, colorSpacePoint.Y);

                        if (joint.Value.TrackingState == TrackingState.Tracked)
                        {
                            DrawJoint(new Point(colorSpacePoint.X, colorSpacePoint.Y), new
                                      SolidColorBrush(Colors.Purple));
                        }
                        if (joint.Value.TrackingState == TrackingState.Inferred)
                        {
                            DrawJoint(new Point(colorSpacePoint.X, colorSpacePoint.Y), new
                                      SolidColorBrush(Colors.LightGray));
                        }
                        foreach (var bone in bones)
                        {
                            DrawBone(body.Joints, jointColorPoints, bone.Item1, bone.Item2, colorBrush);
                        }
                    }
                }
            }
        }
 /// <summary>
 /// Updates the bitmap with new frame data.
 /// </summary>
 /// <param name="frame">The specified Kinect color frame.</param>
 public void Update(ColorFrame frame)
 {
     Update(frame, ColorImageFormat.Bgra);
 }
        void GreenScreenMappingDepthToColorSplats(ref DepthFrame depthFrame, ref ColorFrame colorFrame, ref BodyIndexFrame bodyIndexFrame, int depthWidth, int depthHeight, int colorWidth, int colorHeight)
        {
            m_stopwatch.Restart();

            using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer()) {
                // Need to know the color space point for each depth space point, but this is much less data
                // and much faster to compute than mapping the other way
                m_coordinateMapper.MapDepthFrameToColorSpaceUsingIntPtr(
                    depthFrameData.UnderlyingBuffer,
                    depthFrameData.Size,
                    m_depthToColorSpacePoints);
            }

            m_depthMapTimer.Update(m_stopwatch.ElapsedMilliseconds);
            m_stopwatch.Restart();

            // We're done with the DepthFrame 
            depthFrame.Dispose();
            depthFrame = null;

            lock (m_displayPixels) { // [KinectThread] avoid racing display buffer refresh with render (can cause missing images)

                // have to clear the display pixels so we can copy only the BGRA image of the player(s)
                Array.Clear(m_displayPixels, 0, m_displayPixels.Length);

                unsafe {
                    fixed (byte* colorFrameDataPtr = &m_colorFrameData[0]) {
                        colorFrame.CopyConvertedFrameDataToIntPtr(new IntPtr(colorFrameDataPtr), (uint)m_colorFrameData.Length, ColorImageFormat.Bgra);
                    }
                }

                // done with the colorFrame
                colorFrame.Dispose();
                colorFrame = null;

                m_colorCopyTimer.Update(m_stopwatch.ElapsedMilliseconds);
                m_stopwatch.Restart();

                // We'll access the body index data directly to avoid a copy
                using (KinectBuffer bodyIndexData = bodyIndexFrame.LockImageBuffer()) {
                    unsafe {
                        byte* bodyIndexDataPointer = (byte*)bodyIndexData.UnderlyingBuffer;
                        uint bodyIndexDataLength = bodyIndexData.Size;

                        int colorMappedToDepthPointCount = m_colorToDepthSpacePoints.Length;

                        fixed (ColorSpacePoint* depthMappedToColorPointsPointer = m_depthToColorSpacePoints) {
                            fixed (byte* bitmapPixelsBytePointer = &m_displayPixels[0]) {
                                fixed (byte* sourcePixelsBytePointer = &m_colorFrameData[0]) {
                                    uint* bitmapPixelsPointer = (uint*)bitmapPixelsBytePointer;
                                    uint* sourcePixelsPointer = (uint*)sourcePixelsBytePointer;

                                    // We don't go all the way to the edge of the depth buffer, to eliminate a chance
                                    // that a splat will go outside the edge of the color buffer when mapped to color
                                    // space.  In the x direction this will never happen anyway since the depth FOV
                                    // is so much narrower than the color FOV.
                                    const int Margin = 2;
                                    for (int y = Margin; y < depthHeight - Margin; y++) {
                                        for (int x = 0; x < depthWidth; x++) {
                                            // Scan forwards until we find a non-0xff value in the body index data.
                                            int depthIndex = y * depthWidth + x;
                                            if (bodyIndexDataPointer[depthIndex] != 0xff) {
                                                int depthIndex2 = depthIndex;
                                                // We found the beginning of a horizontal run of player pixels.
                                                // Scan to the end.
                                                int runWidth;
                                                for (runWidth = 1; runWidth + x < depthWidth; runWidth++) {
                                                    depthIndex2++;
                                                    if (bodyIndexDataPointer[depthIndex2] == 0xff) {
                                                        break;
                                                    }
                                                }
                                                
                                                // Now splat from (x, y) to (x + runWidth, y)
                                                float depthMappedToColorLeftX = depthMappedToColorPointsPointer[depthIndex].X;
                                                float depthMappedToColorLeftY = depthMappedToColorPointsPointer[depthIndex].Y;
                                                float depthMappedToColorRightX = depthMappedToColorPointsPointer[depthIndex2 - 1].X;
                                                float depthMappedToColorRightY = depthMappedToColorPointsPointer[depthIndex2 - 1].Y;

                                                // Now copy color pixels along that rectangle.
                                                const int splatHMargin = 2; // X margin of splat rectangle in color pixels
                                                const int splatVMargin = 3; // Y margin of splat rectangle in color pixels
                                                int minX = (int)Math.Min(depthMappedToColorLeftX, depthMappedToColorRightX) - splatHMargin;
                                                int minY = (int)Math.Min(depthMappedToColorLeftY, depthMappedToColorRightY) - splatVMargin;
                                                int maxX = (int)Math.Max(depthMappedToColorLeftX, depthMappedToColorRightX) + splatHMargin;
                                                int maxY = (int)Math.Max(depthMappedToColorLeftY, depthMappedToColorRightY) + splatVMargin;

                                                // Some edge of screen situations can result in color space points that are negative or otherwise
                                                // actually outside the color space coordinate range.
                                                Clamp(ref minX, colorWidth - 1);
                                                Clamp(ref minY, colorHeight - 1);
                                                Clamp(ref maxX, colorWidth - 1);
                                                Clamp(ref maxY, colorHeight - 1);

                                                for (int colorY = minY; colorY < maxY; colorY++) {
                                                    int colorIndex = colorY * colorWidth + minX;
                                                    for (int colorX = minX; colorX < maxX; colorX++) {
                                                        bitmapPixelsPointer[colorIndex] = sourcePixelsPointer[colorIndex];
                                                        colorIndex++;
                                                    }
                                                }

                                                x += runWidth;                          
                                            }
                                        }
                                    }
                                }
                            }
                        }
                    }
                }

                // Done with bodyIndexFrame
                bodyIndexFrame.Dispose();
                bodyIndexFrame = null;                
            }

            m_colorScanTimer.Update(m_stopwatch.ElapsedMilliseconds);
            m_stopwatch.Restart();

            m_displayTexture.SetData(m_displayPixels);

            m_textureSetDataTimer.Update(m_stopwatch.ElapsedMilliseconds);
            m_stopwatch.Restart();

            Spam.TopLine1 = string.Format("depth map: {0} msec; color copy: {1} msec; color scan: {2} msec; texture set: {3} msec",
                m_depthMapTimer.Average,
                m_colorCopyTimer.Average,
                m_colorScanTimer.Average,
                m_textureSetDataTimer.Average);
        }
        /// <summary>
        /// Updates the bitmap with new frame data.
        /// </summary>
        /// <param name="depthFrame">The specified depth frame.</param>
        /// <param name="colorFrame">The specified color frame.</param>
        /// <param name="bodyIndexFrame">The specified body index frame.</param>
        override public void Update(ColorFrame colorFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame)
        {
            int colorWidth = colorFrame.FrameDescription.Width;
            int colorHeight = colorFrame.FrameDescription.Height;

            int depthWidth = depthFrame.FrameDescription.Width;
            int depthHeight = depthFrame.FrameDescription.Height;

            int bodyIndexWidth = bodyIndexFrame.FrameDescription.Width;
            int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;

            if (Bitmap == null)
            {
                InitBuffers(colorFrame.FrameDescription, depthFrame.FrameDescription, bodyIndexFrame.FrameDescription);
            }

            if (((depthWidth * depthHeight) == _depthData.Length) && ((colorWidth * colorHeight * Constants.BYTES_PER_PIXEL) == _colorData.Length) && ((bodyIndexWidth * bodyIndexHeight) == _bodyData.Length))
            {
                depthFrame.CopyFrameDataToArray(_depthData);

                if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                {
                    colorFrame.CopyRawFrameDataToArray(_colorData);
                }
                else
                {
                    colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra);
                }

                bodyIndexFrame.CopyFrameDataToArray(_bodyData);

                CoordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);

                Array.Clear(Pixels, 0, Pixels.Length);

                for (int y = 0; y < depthHeight; ++y)
                {
                    for (int x = 0; x < depthWidth; ++x)
                    {
                        int depthIndex = (y * depthWidth) + x;

                        if (_bodyData[depthIndex] != 0xff)
                        {
                            ColorSpacePoint colorPoint = _colorPoints[depthIndex];

                            int colorX = (int)(colorPoint.X + 0.5);
                            int colorY = (int)(colorPoint.Y + 0.5);

                            if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                            {
                                int colorIndex = ((colorY * colorWidth) + colorX) * Constants.BYTES_PER_PIXEL;
                                int displayIndex = depthIndex * Constants.BYTES_PER_PIXEL;

                                for (int b = 0; b < Constants.BYTES_PER_PIXEL; ++b)
                                {
                                    Pixels[displayIndex + b] = _colorData[colorIndex + b];
                                }
                            }
                        }
                    }
                }

                UpdateBitmap();
            }
        }
        private void ProcessFrameData(MultiSourceFrameArrivedEventArgs e)
        {
            MultiSourceFrameReference frameReference   = e.FrameReference;
            MultiSourceFrame          multiSourceFrame = null;
            DepthFrame depthFrame = null;
            ColorFrame colorFrame = null;

            try
            {
                multiSourceFrame = frameReference.AcquireFrame();
                if (multiSourceFrame != null)
                {
                    lock (rawDataLock)
                    {
                        ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference;
                        DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference;
                        colorFrame = colorFrameReference.AcquireFrame();
                        depthFrame = depthFrameReference.AcquireFrame();

                        if ((depthFrame != null) && (colorFrame != null))
                        {
                            FrameDescription colorFrameDescription = colorFrame.FrameDescription;
                            int colorWidth  = colorFrameDescription.Width;
                            int colorHeight = colorFrameDescription.Height;
                            if ((colorWidth * colorHeight * sizeof(int)) == colorImagePixels.Length)
                            {
                                colorFrame.CopyConvertedFrameDataToArray(colorImagePixels, ColorImageFormat.Bgra);
                            }

                            FrameDescription depthFrameDescription = depthFrame.FrameDescription;
                            int depthWidth  = depthFrameDescription.Width;
                            int depthHeight = depthFrameDescription.Height;

                            if ((depthWidth * depthHeight) == depthImagePixels.Length)
                            {
                                depthFrame.CopyFrameDataToArray(depthImagePixels);
                            }
                        }
                    }
                }
            }
            catch (Exception)
            {
                // ignore if the frame is no longer available
            }
            finally
            {
                // MultiSourceFrame, DepthFrame, ColorFrame, BodyIndexFrame are IDispoable
                if (depthFrame != null)
                {
                    depthFrame.Dispose();
                    depthFrame = null;
                }
                if (colorFrame != null)
                {
                    colorFrame.Dispose();
                    colorFrame = null;
                }
                if (multiSourceFrame != null)
                {
                    multiSourceFrame = null;
                }
            }
        }
Beispiel #50
0
        /// <summary>
        /// Handles the multisource frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private unsafe void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            // Create instance of EMGUargs which holds the output of data from the kinect
            EMGUargs emguArgs = new EMGUargs();
            MultiSourceFrameReference frameReference = e.FrameReference;
            // Variables initialized to null for easy check of camera failures
            MultiSourceFrame multiSourceFrame = null;
            InfraredFrame    infraredFrame    = null;
            ColorFrame       colorFrame       = null;
            DepthFrame       depthFrame       = null;

            // Acquire frame from the Kinect
            multiSourceFrame = frameReference.AcquireFrame();

            // If the Frame has expired by the time we process this event, return.
            if (multiSourceFrame == null)
            {
                return;
            }
            try
            {
                /*
                 * DepthSpacePoint dp = new DepthSpacePoint
                 * {
                 *  X = 50,
                 *  Y = 20
                 * };
                 * DepthSpacePoint[] dps = new DepthSpacePoint[] { dp };
                 * ushort[] depths = new ushort[] { 2000 };
                 * CameraSpacePoint[] ameraSpacePoints = new CameraSpacePoint[1];
                 *
                 * mapper.MapDepthPointsToCameraSpace(dps, depths, ameraSpacePoints);
                 */
                InfraredFrameReference infraredFrameReference = multiSourceFrame.InfraredFrameReference;
                infraredFrame = infraredFrameReference.AcquireFrame();

                DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference;
                depthFrame = depthFrameReference.AcquireFrame();

                // Check whether needed frames are avaliable
                if (infraredFrame == null || depthFrame == null)
                {
                    return;
                }

                // the fastest way to process the depth frame data is to directly access
                // the underlying buffer
                using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                {
                    // verify data and write the new depth frame data to the display bitmap
                    if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) ==
                         (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)))
                    {
                        // Conversion to needed EMGU image
                        Mat depthImage = this.ProcessDepthFrameData(depthFrame);

                        emguArgs.DepthImage          = depthImage;
                        emguArgs.DepthFrameDimension = new FrameDimension(depthFrameDescription.Width, depthFrameDescription.Height);
                    }

                    //BgrToDephtPixel(depthBuffer.UnderlyingBuffer, depthBuffer.Size);

                    depthFrame.Dispose();
                    depthFrame = null;
                }

                // IR image
                FrameDescription infraredFrameDescription = infraredFrame.FrameDescription;

                // the fastest way to process the infrared frame data is to directly access
                // the underlying buffer
                using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer())
                {
                    // verify data and write the new infrared frame data to the display bitmap
                    if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel)))
                    {
                        // Conversion to needed EMGU image
                        Mat infraredImage = this.ProcessInfaredFrameData(infraredFrame);
                        emguArgs.InfraredImage          = infraredImage;
                        emguArgs.InfraredFrameDimension = new FrameDimension(infraredFrameDescription.Width, infraredFrameDescription.Height);
                        //  infraredImage.Dispose();
                    }
                    infraredFrame.Dispose();
                    infraredFrame = null;

                    // Check as to whether or not the color image is needed for mainwindow view
                    if (generateColorImage)
                    {
                        ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference;
                        colorFrame = colorFrameReference.AcquireFrame();
                        if (colorFrame == null)
                        {
                            return;
                        }

                        // color image
                        FrameDescription colorFrameDescription = colorFrame.FrameDescription;

                        // the fastest way to process the color frame data is to directly access
                        // the underlying buffer
                        using (Microsoft.Kinect.KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                        {
                            // Conversion to needed EMGU image
                            Mat colorImage = this.ProcessColorFrameData(colorFrame);
                            emguArgs.Colorimage          = colorImage;
                            emguArgs.ColorFrameDimension = new FrameDimension(colorFrameDescription.Width, colorFrameDescription.Height);
                        }
                        // We're done with the colorFrame
                        colorFrame.Dispose();
                        colorFrame = null;
                    }
                }
                // Call the processing finished event for the conversion to EMGU images
                OnEmguArgsProcessed(emguArgs);
            }
            catch (Exception ex)
            {
                // ignore if the frame is no longer available
                Console.WriteLine("FRAME CHRASHED: " + ex.ToString());
            }
            finally
            {
                // generate event at send writeable bitmaps for each frame, and cleanup.
                // only generate event if the mainwindow is shown.

                // DepthFrame, ColorFrame are Disposable.
                if (colorFrame != null)
                {
                    colorFrame.Dispose();
                    colorFrame = null;
                }
                if (depthFrame != null)
                {
                    depthFrame.Dispose();
                    depthFrame = null;
                }
                // infraredFrame is Disposable
                if (infraredFrame != null)
                {
                    infraredFrame.Dispose();
                    infraredFrame = null;
                }
                if (multiSourceFrame != null)
                {
                    multiSourceFrame = null;
                }
            }
        }
Beispiel #51
0
        /**
         * Listener function, recieves the frames from the kinect sensor, and identifies the type of frame returned
         * @method _reader_MultiSourceFrameArrived
         * @param {Object} sender
         * @param {MultiSourceFrameArrivedEventArgs} e
         * @private
         */
        private static void _reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            // Get a reference to the multi-frame
            MultiSourceFrame reference = e.FrameReference.AcquireFrame();

            // if returned frame is a colourframe, return the colour frame blob
            using (ColorFrame frame = reference.ColorFrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    if (_mode == Mode.Color)
                    {
                        // serialize the data
                        var blob = frame.Serialize();

                        foreach (var socket in _clients)
                        {
                            socket.Send(blob);
                        }
                    }
                }
            }

            // if returned frame is a depthframe, return the depth frame blob
            using (DepthFrame frame = reference.DepthFrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    if (_mode == Mode.Depth)
                    {
                        // serialize the data
                        var blob = frame.Serialize();

                        foreach (var socket in _clients)
                        {
                            socket.Send(blob);
                        }
                    }
                }
            }

            // If returned frame is a skeletal/body frame,populate the bodies in the frame to a list and return the result through the socket
            using (BodyFrame frame = reference.BodyFrameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    frame.GetAndRefreshBodyData(_skeletons);

                    // fetch the bodies monitored in the frame
                    var users = _skeletons.Where(s => s.IsTracked == true).ToList();

                    if (users.Count > 0)
                    {
                        // reialize the data to be returneed
                        string json = users.Serialize(_coordinateMapper, _mode);

                        foreach (var socket in _clients)
                        {
                            socket.Send(json);
                        }
                    }
                }
            }
        }
 static unsafe void MultiFrameReady(ColorFrame colorFrame, DepthFrame depthFrame, BodyIndexFrame biFrame)
 {
     _displayPixels = _bgRemove.GreenScreen(colorFrame, depthFrame, biFrame);
 }
        /// <summary>
        /// This code displays a color frame
        /// </summary>
        /// <param name="frame"></param>
        /// <returns></returns>
        async Task DisplayColorFrame(ColorFrame frame)
        {
            var frameDescription = frame.FrameDescription;
            var bufSize = frameDescription.Width * frameDescription.Height * this.bytesPerPixel;
            if (this.pixels.Length != bufSize)
                pixels = new byte[bufSize];

            if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
            {
                frame.CopyRawFrameDataToArray(this.pixels);
            }
            else
            {
                frame.CopyConvertedFrameDataToArray(this.pixels, ColorImageFormat.Bgra);
            }
            
            // write the data into the local bitmap
            this.videoImageSource.WritePixels(
                new Int32Rect(0, 0, frameDescription.Width, frameDescription.Height), 
                this.pixels,
                frameDescription.Width * this.bytesPerPixel,
                0);

            // Save frame to JPG?
            if (WriteJpgFiles)
            {
                CurrentJpgFrame += 1;
                string filename = VideoFolder + "\\" + FramePrefix + CurrentJpgFrame.ToString("0000") + ".jpg";
                byte[] buffer = new byte[this.pixels.Length];
                Array.Copy(this.pixels, buffer, this.pixels.Length);
                await SaveJpg(buffer, frameDescription.Width, frameDescription.Height, frameDescription.Width * this.bytesPerPixel, filename);
            }

            NotifyPropertyChanged("VideoImageSource");

        }
    private void InitFrames(DepthImageFrame depthFrame, ColorImageFrame colorFrame, SkeletonFrame skeletonFrame) {
      if (init) { return; } init = true;

      // Color Frame
      Color = new ColorFrame();
      Color.Width = colorFrame.Width;
      Color.Height = colorFrame.Height;
      Color.Pixels = new byte[colorFrame.PixelDataLength];
      Color.Stamp = new Timestamp();
      Color.Fps = FPS;
      AddOnManager.GetInstance().InitFrame(Name, Color);
      Log(Color.ToString());
      ColorFormat = colorFrame.Format;

      // Depth Frame
      Depth = new DepthFrame();
      Depth.Width = depthFrame.Width;
      Depth.Height = depthFrame.Height;
      Depth.Pixelss = new short[depthFrame.PixelDataLength];
      Depth.Stamp = new Timestamp();
      AddOnManager.GetInstance().InitFrame(Name, Depth);
      Log(Depth.ToString());

      var dueTime = TimeSpan.FromMilliseconds(200);
      var interval = TimeSpan.FromMilliseconds(ConfigManager.GetInstance().Find("kinect_v1.motion.ms", 100));
      Task = new MotionTask(dueTime, interval);
      Task.Device = "";
      Task.AddFrame(Depth);
      Task.Start();


      // Skeleton Frame
      Skeletons = new BodyFrame();
      Skeletons.Width  = colorFrame.Width;
      Skeletons.Height = colorFrame.Height;
      Skeletons.RawData = new Skeleton[6];
      Skeletons.Bodies  = new List<NBody>(6);
      Skeletons.Stamp = new Timestamp();
      AddOnManager.GetInstance().InitFrame(Name, Skeletons);
      Log(Skeletons.ToString());

    }
        /// <summary>
        /// Converts a depth frame to the corresponding System.Windows.Media.Imaging.BitmapSource and removes the background (green-screen effect).
        /// </summary>
        /// <param name="depthFrame">The specified depth frame.</param>
        /// <param name="colorFrame">The specified color frame.</param>
        /// <param name="bodyIndexFrame">The specified body index frame.</param>
        /// <returns>The corresponding System.Windows.Media.Imaging.BitmapSource representation of image.</returns>
        public BitmapSource GreenScreen(ColorFrame colorFrame, DepthFrame depthFrame, BodyIndexFrame bodyIndexFrame)
        {
            int colorWidth = colorFrame.FrameDescription.Width;
            int colorHeight = colorFrame.FrameDescription.Height;

            int depthWidth = depthFrame.FrameDescription.Width;
            int depthHeight = depthFrame.FrameDescription.Height;

            int bodyIndexWidth = bodyIndexFrame.FrameDescription.Width;
            int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;

            if (_displayPixels == null)
            {
                _depthData = new ushort[depthWidth * depthHeight];
                _bodyData = new byte[depthWidth * depthHeight];
                _colorData = new byte[colorWidth * colorHeight * BYTES_PER_PIXEL];
                _displayPixels = new byte[depthWidth * depthHeight * BYTES_PER_PIXEL];
                _colorPoints = new ColorSpacePoint[depthWidth * depthHeight];
                _bitmap = new WriteableBitmap(depthWidth, depthHeight, DPI, DPI, FORMAT, null);
            }

            if (((depthWidth * depthHeight) == _depthData.Length) && ((colorWidth * colorHeight * BYTES_PER_PIXEL) == _colorData.Length) && ((bodyIndexWidth * bodyIndexHeight) == _bodyData.Length))
            {
                depthFrame.CopyFrameDataToArray(_depthData);

                if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                {
                    colorFrame.CopyRawFrameDataToArray(_colorData);
                }
                else
                {
                    colorFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Bgra);
                }

                bodyIndexFrame.CopyFrameDataToArray(_bodyData);

                _coordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);

                Array.Clear(_displayPixels, 0, _displayPixels.Length);

                for (int y = 0; y < depthHeight; ++y)
                {
                    for (int x = 0; x < depthWidth; ++x)
                    {
                        int depthIndex = (y * depthWidth) + x;

                        byte player = _bodyData[depthIndex];

                        if (player != 0xff)
                        {
                            ColorSpacePoint colorPoint = _colorPoints[depthIndex];

                            int colorX = (int)Math.Floor(colorPoint.X + 0.5);
                            int colorY = (int)Math.Floor(colorPoint.Y + 0.5);

                            if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                            {
                                int colorIndex = ((colorY * colorWidth) + colorX) * BYTES_PER_PIXEL;
                                int displayIndex = depthIndex * BYTES_PER_PIXEL;

                                _displayPixels[displayIndex + 0] = 255;
                                _displayPixels[displayIndex + 1] = 255;
                                _displayPixels[displayIndex + 2] = 255;
                                _displayPixels[displayIndex + 3] = 127;
                                // 79 195 247
                            }
                        }
                    }
                }

                _bitmap.Lock();

                Marshal.Copy(_displayPixels, 0, _bitmap.BackBuffer, _displayPixels.Length);
                _bitmap.AddDirtyRect(new Int32Rect(0, 0, depthWidth, depthHeight));

                _bitmap.Unlock();
            }

            return _bitmap;
        }
Beispiel #56
0
        /// <summary>
        /// Initializes a new instance of the <see cref="ReplayColorFrame"/> class
        /// based on the specified <c>ColorFrame</c> and <c>byte</c> array.
        /// </summary>
        /// <param name="frame">The frame.</param>
        /// <param name="bytes">The bytes.</param>
        internal ReplayColorFrame(ColorFrame frame, byte[] bytes)
        {
            this.Codec = ColorCodecs.Raw;

            this.FrameType = FrameTypes.Color;
            this.RelativeTime = frame.RelativeTime;

            this.Width = frame.FrameDescription.Width;
            this.Height = frame.FrameDescription.Height;

            this.FrameDataSize = this.Width * this.Height * 4; // BGRA is 4 bytes per pixel
            this._frameData = bytes;
        }
        /// <summary>
        /// Interprets the passed color frame and returns an image source (bitmap) which can be displayed in the UI
        /// Code by Vangos Pterneas
        /// </summary>
        /// <param name="frame">color frame</param>
        /// <returns>bitmap</returns>
        private ImageSource ToBitmap(ColorFrame frame)
        {
            //width and height of the color frame (should be 1920 x 1080)
            int width = frame.FrameDescription.Width;
            int height = frame.FrameDescription.Height;

            //create an array which will hold the color information for each pixel (width * height) which should be converted to the bitmap (--> this array is for the output).
            //Each pixel consists of a couple of bytes for storing rgb values (e.g. 1 Byte for each red green blue color) and aditional information like the alpha value
            //The amounts of bytes for each pixel depends on the PixelFormat. (PixelFormats.TYPE.BitsPerPixel + 7)/8 calculates the minimal number of required bytes.

            byte[] pixels = new byte[width * height * ((PixelFormats.Bgr32.BitsPerPixel + 7) / 8)];

            //convert frame and store data into array
            if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
            {
                frame.CopyRawFrameDataToArray(pixels);
            }
            else
            {
                frame.CopyConvertedFrameDataToArray(pixels, ColorImageFormat.Bgra);
            }

            //?
            int stride = width * PixelFormats.Bgra32.BitsPerPixel / 8;

            //create and return bitmap
            return BitmapSource.Create(width, height, 96, 96, PixelFormats.Bgra32, null, pixels, stride);
        }
Beispiel #58
0
        /// <summary>
        /// Initializes a new instance of the <see cref="ReplayColorFrame"/> class
        /// based on the specified <c>ColorFrame</c>.
        /// </summary>
        /// <param name="frame">The frame.</param>
        internal ReplayColorFrame(ColorFrame frame)
        {
            this.Codec = ColorCodecs.Raw;

            this.FrameType = FrameTypes.Color;
            this.RelativeTime = frame.RelativeTime;

            this.Width = frame.FrameDescription.Width;
            this.Height = frame.FrameDescription.Height;

            this.FrameDataSize = this.Width * this.Height * 4; // BGRA is 4 bytes per pixel
            this._frameData = new Byte[this.FrameDataSize];

            if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
            {
                frame.CopyRawFrameDataToArray(_frameData);
            }
            else
            {
                frame.CopyConvertedFrameDataToArray(_frameData, ColorImageFormat.Bgra);
            }
        }
Beispiel #59
0
        /// <summary>
        /// This event will be called whenever the color Frame Reader receives a new frame
        /// </summary>
        /// <param name="sender">sender</param>
        /// <param name="e">args of the event</param>
        void Reader_FrameArrived(object sender, ColorFrameArrivedEventArgs e)
        {
            // Store the depth and the color frame
            DepthFrame depthFrame = null;
            ColorFrame colorFrame = null;

            // Store the state of the frame lock
            bool isColorBitmapLocked = false;
            bool isDepthBitmapLocked = false;

            // Acquire a new frame
            colorFrame = e.FrameReference.AcquireFrame();

            // If the frame has expired or is invalid, return
            if (colorFrame == null)
            {
                return;
            }

            // Using a try/finally structure allows us to liberate/dispose of the elements even if there was an error
            try
            {
                // ===============================
                // ColorFrame code block
                // ===============================

                FrameDescription colorDesc = colorFrame.FrameDescription;
                // Using an IDisposable buffer to work with the color frame. Will be disposed automatically at the end of the using block.
                using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                {
                    // Lock the colorBitmap while we write in it.
                    this.colorBitmap.Lock();
                    isColorBitmapLocked = true;

                    // Check for correct size
                    if (colorDesc.Width == this.colorBitmap.Width && colorDesc.Height == this.colorBitmap.Height)
                    {
                        //write the new color frame data to the display bitmap
                        colorFrame.CopyConvertedFrameDataToIntPtr(this.colorBitmap.BackBuffer, (uint)(colorDesc.Width * colorDesc.Height * BYTESPERPIXELS), ColorImageFormat.Bgra);

                        // Mark the entire buffer as dirty to refresh the display
                        this.colorBitmap.AddDirtyRect(new Int32Rect(0, 0, colorDesc.Width, colorDesc.Height));
                    }

                    // Unlock the colorBitmap
                    this.colorBitmap.Unlock();
                    isColorBitmapLocked = false;
                }



                // ================================================================
                // DepthFrame code block : À modifier et completer
                // Remarque : Beaucoup de code à modifer/ajouter dans cette partie
                // ================================================================



                using (KinectBuffer depthBuffer = colorFrame.LockRawImageBuffer())
                {
                    // Lock the depthBitmap while we write in it.
                    this.depthBitmap.Lock();
                    isDepthBitmapLocked = true;

                    //-----------------------------------------------------------
                    // Effectuer la correspondance espace Profondeur---Couleur
                    //-----------------------------------------------------------
                    //  Utiliser la ligne ci-dessous pour l'image de profondeur
                    Image <Gray, byte> depthImageGray = new Image <Gray, byte>(RAWCOLORWIDTH, RAWCOLORHEIGHT);

                    //-----------------------------------------------------------
                    // Traiter l'image de profondeur
                    //-----------------------------------------------------------


                    // Une fois traitée convertir l'image en Bgra
                    Image <Bgra, byte> depthImageBgra = depthImageGray.Convert <Bgra, byte>();

                    //---------------------------------------------------------------------------------------------------------
                    //  Modifier le code pour que depthBitmap contienne depthImageBgra au lieu du contenu trame couleur actuel
                    //---------------------------------------------------------------------------------------------------------
                    if (colorDesc.Width == this.colorBitmap.Width && colorDesc.Height == this.colorBitmap.Height)
                    {
                        colorFrame.CopyConvertedFrameDataToIntPtr(this.depthBitmap.BackBuffer, (uint)(colorDesc.Width * colorDesc.Height * BYTESPERPIXELS), ColorImageFormat.Bgra);

                        // Mark the entire buffer as dirty to refresh the display
                        this.depthBitmap.AddDirtyRect(new Int32Rect(0, 0, colorDesc.Width, colorDesc.Height));
                    }



                    // Unlock the colorBitmap
                    this.depthBitmap.Unlock();
                    isDepthBitmapLocked = false;
                }


                // We are done with the depthFrame, dispose of it
                // depthFrame.Dispose();
                depthFrame = null;
                // We are done with the ColorFrame, dispose of it
                colorFrame.Dispose();
                colorFrame = null;

                // ===============================
                // ===============================
            }
            finally
            {
                if (isColorBitmapLocked)
                {
                    this.colorBitmap.Unlock();
                }
                if (isDepthBitmapLocked)
                {
                    this.depthBitmap.Unlock();
                }
                if (depthFrame != null)
                {
                    depthFrame.Dispose();
                }
                if (colorFrame != null)
                {
                    colorFrame.Dispose();
                }
            }
        }
 private void NotifyColorDataListenersOfUpdate(ColorFrame aColorFrame)
 {
     if (this.ShouldSendEvents)
     {
         foreach (ColorDataListener currentListener in this.colorDataListeners)
         {
             currentListener.KinectManagerDidGetUpdatedColorData(this, aColorFrame);
         }
     }
 }