Пример #1
0
        public void Update()
        {
            if (reader != null)
            {
                InfraredFrame frame = reader.AcquireLatestFrame();
                if (frame != null)
                {
                    frame.CopyFrameDataToArray(FrameData);

                    int index = 0;
                    foreach (var ir in FrameData)
                    {
                        byte intensity = (byte)(ir >> 8);
                        for (int i = 0; i < 3; i++)
                        {
                            RawData[index++] = intensity;
                        }
                        RawData[index++] = 255; // Alpha
                    }

                    Texture.LoadRawTextureData(RawData);
                    Texture.Apply();

                    frame.Dispose();
                    frame = null;
                }
            }
        }
Пример #2
0
    void updateInfrared(InfraredFrame infraredFrame)
    {
        if (infraredFrame != null)
        {
            infraredFrame.CopyFrameDataToArray(infraredRawData);

            Parallel.For(0, infraredHeight, y =>
            {
                for (int x = 0; x < infraredWidth; x++)
                {
                    int index      = y * infraredWidth + x;
                    byte intensity = (byte)(infraredRawData[index] >> 8);
                    infraredData[(index << 2) | 0] = intensity;
                    infraredData[(index << 2) | 1] = intensity;
                    infraredData[(index << 2) | 2] = intensity;
                    infraredData[(index << 2) | 3] = 255;
                }
            });

            infraredTexture.LoadRawTextureData(infraredData);
            infraredTexture.Apply();

            infraredFrame.Dispose();
            infraredFrame = null;
        }
    }
        /// <summary>
        /// Store infrared image
        /// </summary>
        /// <param name="infraredFrame">infrared frame to be stored</param>
        /// <param name="frameNumber">frame number</param>
        public static void Handle_InfraredFrame(InfraredFrame infraredFrame, String frameNumber)
        {
            using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer())
            {
                BitmapSource bitmapSource = BitmapSource.Create(infraredWidth, infraredHeight, 96.0, 96.0,
                                                                PixelFormats.Gray16, null, infraredBuffer.UnderlyingBuffer, (int)infraredBuffer.Size, infraredWidth << 1);

                String infraredPath = FramesAndPaths.GetImageFilePath(FramesAndPaths.FileType.InfraredImage, frameNumber);
                bitmapSource.Save(infraredPath + ".jpg", ImageFormat.Jpeg);
            }
            // Release infraredFrame
            infraredFrame.Dispose();
        }
Пример #4
0
        private void processInfraredFrame(InfraredFrame infraredFrame)
        {
            if (infraredFrame != null)
            {
                Box dimensions = Box.With(infraredFrame.FrameDescription.Width, infraredFrame.FrameDescription.Height);
                frameResolutions[SourceType.INFRARED]  = dimensions;
                framePixelFormats[SourceType.INFRARED] = PixelFormats.Gray16;

                if (infraredPixels == null)
                {
                    infraredPixels = new UInt16[dimensions.Area];
                }

                infraredFrame.CopyFrameDataToArray(infraredPixels);
                infraredFrame?.Dispose();
            }
        }
Пример #5
0
            private void ProcessInfraredFrame(InfraredFrame infraredFrame)
            {
                if (infraredFrame != null)
                {
                    infraredFrame.CopyFrameDataToArray(_rawInfraredPixels);
                    infraredFrame?.Dispose();

                    int outIndex = 0;
                    for (int index = 0; index < _rawInfraredPixels.Length; ++index)
                    {
                        UInt16 pixel     = _rawInfraredPixels[index];
                        byte   intensity = (byte)(pixel >> 8);

                        _displayableBuffers[SourceType.INFRARED][outIndex++] = intensity;
                        _displayableBuffers[SourceType.INFRARED][outIndex++] = intensity;
                        _displayableBuffers[SourceType.INFRARED][outIndex++] = intensity;
                        _displayableBuffers[SourceType.INFRARED][outIndex++] = 0xFF;
                    }
                }
            }
Пример #6
0
 // Token: 0x06002A0B RID: 10763 RVA: 0x000D6554 File Offset: 0x000D4954
 private void Update()
 {
     if (this._Reader != null)
     {
         InfraredFrame infraredFrame = this._Reader.AcquireLatestFrame();
         if (infraredFrame != null)
         {
             infraredFrame.CopyFrameDataToArray(this._Data);
             int num = 0;
             foreach (ushort num2 in this._Data)
             {
                 byte b = (byte)(num2 >> 8);
                 this._RawData[num++] = b;
                 this._RawData[num++] = b;
                 this._RawData[num++] = b;
                 this._RawData[num++] = byte.MaxValue;
             }
             this._Texture.LoadRawTextureData(this._RawData);
             this._Texture.Apply();
             infraredFrame.Dispose();
         }
     }
 }
Пример #7
0
        private void ReaderOnMultiSourceFrameArrived(object sender,
                                                     MultiSourceFrameArrivedEventArgs multiSourceFrameArrivedEventArgs)
        {
            if (multiSourceFrameArrivedEventArgs == null)
            {
                return;
            }

            ColorFrame colorFrame = multiSourceFrameArrivedEventArgs.FrameReference.AcquireFrame().ColorFrameReference
                                    .AcquireFrame();
            InfraredFrame infraredFrame = multiSourceFrameArrivedEventArgs.FrameReference.AcquireFrame().InfraredFrameReference
                                          .AcquireFrame();
            DepthFrame depthFrame = multiSourceFrameArrivedEventArgs.FrameReference.AcquireFrame().DepthFrameReference
                                    .AcquireFrame();

            // Color
            if (colorFrame != null)
            {
                colorPreview.Source = GetBitmapSourceFromFrame(colorFrame);
                colorFrame.Dispose();
            }

            // Infrared
            if (infraredFrame != null)
            {
                infraredPreview.Source = GetBitmapSourceFromFrame(infraredFrame);
                infraredFrame.Dispose();
            }

            // Depth
            if (depthFrame != null)
            {
                depthPreview.Source = GetBitmapSourceFromFrame(depthFrame);
                depthFrame.Dispose();
            }
        }
Пример #8
0
        /// <summary>
        /// Handles the multisource frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private unsafe void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            // Create instance of EMGUargs which holds the output of data from the kinect
            EMGUargs emguArgs = new EMGUargs();
            MultiSourceFrameReference frameReference = e.FrameReference;
            // Variables initialized to null for easy check of camera failures
            MultiSourceFrame multiSourceFrame = null;
            InfraredFrame    infraredFrame    = null;
            ColorFrame       colorFrame       = null;
            DepthFrame       depthFrame       = null;

            // Acquire frame from the Kinect
            multiSourceFrame = frameReference.AcquireFrame();

            // If the Frame has expired by the time we process this event, return.
            if (multiSourceFrame == null)
            {
                return;
            }
            try
            {
                /*
                 * DepthSpacePoint dp = new DepthSpacePoint
                 * {
                 *  X = 50,
                 *  Y = 20
                 * };
                 * DepthSpacePoint[] dps = new DepthSpacePoint[] { dp };
                 * ushort[] depths = new ushort[] { 2000 };
                 * CameraSpacePoint[] ameraSpacePoints = new CameraSpacePoint[1];
                 *
                 * mapper.MapDepthPointsToCameraSpace(dps, depths, ameraSpacePoints);
                 */
                InfraredFrameReference infraredFrameReference = multiSourceFrame.InfraredFrameReference;
                infraredFrame = infraredFrameReference.AcquireFrame();

                DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference;
                depthFrame = depthFrameReference.AcquireFrame();

                // Check whether needed frames are avaliable
                if (infraredFrame == null || depthFrame == null)
                {
                    return;
                }

                // the fastest way to process the depth frame data is to directly access
                // the underlying buffer
                using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                {
                    // verify data and write the new depth frame data to the display bitmap
                    if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) ==
                         (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)))
                    {
                        // Conversion to needed EMGU image
                        Mat depthImage = this.ProcessDepthFrameData(depthFrame);

                        emguArgs.DepthImage          = depthImage;
                        emguArgs.DepthFrameDimension = new FrameDimension(depthFrameDescription.Width, depthFrameDescription.Height);
                    }

                    //BgrToDephtPixel(depthBuffer.UnderlyingBuffer, depthBuffer.Size);

                    depthFrame.Dispose();
                    depthFrame = null;
                }

                // IR image
                FrameDescription infraredFrameDescription = infraredFrame.FrameDescription;

                // the fastest way to process the infrared frame data is to directly access
                // the underlying buffer
                using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer())
                {
                    // verify data and write the new infrared frame data to the display bitmap
                    if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel)))
                    {
                        // Conversion to needed EMGU image
                        Mat infraredImage = this.ProcessInfaredFrameData(infraredFrame);
                        emguArgs.InfraredImage          = infraredImage;
                        emguArgs.InfraredFrameDimension = new FrameDimension(infraredFrameDescription.Width, infraredFrameDescription.Height);
                        //  infraredImage.Dispose();
                    }
                    infraredFrame.Dispose();
                    infraredFrame = null;

                    // Check as to whether or not the color image is needed for mainwindow view
                    if (generateColorImage)
                    {
                        ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference;
                        colorFrame = colorFrameReference.AcquireFrame();
                        if (colorFrame == null)
                        {
                            return;
                        }

                        // color image
                        FrameDescription colorFrameDescription = colorFrame.FrameDescription;

                        // the fastest way to process the color frame data is to directly access
                        // the underlying buffer
                        using (Microsoft.Kinect.KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                        {
                            // Conversion to needed EMGU image
                            Mat colorImage = this.ProcessColorFrameData(colorFrame);
                            emguArgs.Colorimage          = colorImage;
                            emguArgs.ColorFrameDimension = new FrameDimension(colorFrameDescription.Width, colorFrameDescription.Height);
                        }
                        // We're done with the colorFrame
                        colorFrame.Dispose();
                        colorFrame = null;
                    }
                }
                // Call the processing finished event for the conversion to EMGU images
                OnEmguArgsProcessed(emguArgs);
            }
            catch (Exception ex)
            {
                // ignore if the frame is no longer available
                Console.WriteLine("FRAME CHRASHED: " + ex.ToString());
            }
            finally
            {
                // generate event at send writeable bitmaps for each frame, and cleanup.
                // only generate event if the mainwindow is shown.

                // DepthFrame, ColorFrame are Disposable.
                if (colorFrame != null)
                {
                    colorFrame.Dispose();
                    colorFrame = null;
                }
                if (depthFrame != null)
                {
                    depthFrame.Dispose();
                    depthFrame = null;
                }
                // infraredFrame is Disposable
                if (infraredFrame != null)
                {
                    infraredFrame.Dispose();
                    infraredFrame = null;
                }
                if (multiSourceFrame != null)
                {
                    multiSourceFrame = null;
                }
            }
        }
Пример #9
0
        /// <summary>
        /// Procedure invoked by Kinect when new data are available
        /// </summary>
        private void MultisourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            if (KinectSensor == null || Reader == null)
            {
                return;
            }

            // acquire frame data
            MultiSourceFrame multiSourceFrame = e.FrameReference.AcquireFrame();

            // if the Frame has expired by the time we process this event, return.
            if (multiSourceFrame == null)
            {
                return;
            }

            // Continue only if buffer is empty
            if (!Buffer.IsEmpty())
            {
                return;
            }

            // declare variables for data from sensor
            ColorFrame    colorFrame    = null;
            DepthFrame    depthFrame    = null;
            InfraredFrame infraredFrame = null;

            byte[]             colorFrameData                 = null;
            ushort[]           depthData                      = null;
            ushort[]           infraredData                   = null;
            DepthSpacePoint[]  pointsFromColorToDepth         = null;
            ColorSpacePoint[]  pointsFromDepthToColor         = null;
            CameraSpacePoint[] cameraSpacePointsFromDepthData = null;

            try
            {
                // get frames from sensor
                colorFrame    = multiSourceFrame.ColorFrameReference.AcquireFrame();
                depthFrame    = multiSourceFrame.DepthFrameReference.AcquireFrame();
                infraredFrame = multiSourceFrame.InfraredFrameReference.AcquireFrame();

                // If any frame has expired by the time we process this event, return.
                if (colorFrame == null || depthFrame == null || infraredFrame == null)
                {
                    return;
                }

                // use frame data to fill arrays
                colorFrameData = new byte[ColorFrameDescription.LengthInPixels * 4];
                depthData      = new ushort[DepthFrameDescription.LengthInPixels];
                infraredData   = new ushort[InfraredFrameDescription.LengthInPixels];

                colorFrame.CopyConvertedFrameDataToArray(colorFrameData, ColorImageFormat.Bgra);
                depthFrame.CopyFrameDataToArray(depthData);
                infraredFrame.CopyFrameDataToArray(infraredData);

                pointsFromColorToDepth         = new DepthSpacePoint[ColorFrameDescription.LengthInPixels];
                pointsFromDepthToColor         = new ColorSpacePoint[DepthFrameDescription.LengthInPixels];
                cameraSpacePointsFromDepthData = new CameraSpacePoint[DepthFrameDescription.LengthInPixels];

                using (KinectBuffer depthFrameData = depthFrame.LockImageBuffer())
                {
                    CoordinateMapper.MapColorFrameToDepthSpaceUsingIntPtr(
                        depthFrameData.UnderlyingBuffer,
                        depthFrameData.Size,
                        pointsFromColorToDepth);

                    CoordinateMapper.MapDepthFrameToColorSpaceUsingIntPtr(
                        depthFrameData.UnderlyingBuffer,
                        depthFrameData.Size,
                        pointsFromDepthToColor);

                    CoordinateMapper.MapDepthFrameToCameraSpaceUsingIntPtr(
                        depthFrameData.UnderlyingBuffer,
                        depthFrameData.Size,
                        cameraSpacePointsFromDepthData);
                }
            }
            finally
            {
                // dispose frames so that Kinect can continue processing
                colorFrame?.Dispose();
                depthFrame?.Dispose();
                infraredFrame?.Dispose();

                // send data futher
                if (
                    colorFrameData != null &&
                    depthData != null &&
                    infraredData != null &&
                    cameraSpacePointsFromDepthData != null
                    )
                {
                    // store data to buffer and notify processing thread
                    Buffer.Store(
                        new KinectData(
                            colorFrameData,
                            depthData,
                            infraredData,
                            cameraSpacePointsFromDepthData,
                            pointsFromColorToDepth,
                            pointsFromDepthToColor
                            )
                        );

                    TrackingManager.SendKinectUpdate();
                }
            }
        }
Пример #10
0
        /// <summary>
        /// Update to get a new frame.
        /// This code is similar to the code in the Kinect SDK samples.
        /// </summary>
        private static void Update()
        {
            if (!isConnected)
            {
                return;
            }

            dataAvailable.WaitOne();

            MultiSourceFrame multiSourceFrame = null;
            DepthFrame       depthFrame       = null;
            InfraredFrame    irFrame          = null;
            BodyFrame        bodyFrame        = null;

            lock (updateLock)
            {
                try
                {
                    if (frameReference != null)
                    {
                        multiSourceFrame = frameReference.AcquireFrame();

                        if (multiSourceFrame != null)
                        {
                            DepthFrameReference    depthFrameReference = multiSourceFrame.DepthFrameReference;
                            InfraredFrameReference irFrameReference    = multiSourceFrame.InfraredFrameReference;
                            BodyFrameReference     bodyFrameReference  = multiSourceFrame.BodyFrameReference;

                            depthFrame = depthFrameReference.AcquireFrame();
                            irFrame    = irFrameReference.AcquireFrame();

                            if ((depthFrame != null) && (irFrame != null))
                            {
                                FrameDescription depthFrameDescription = depthFrame.FrameDescription;
                                FrameDescription irFrameDescription    = irFrame.FrameDescription;

                                int depthWidth  = depthFrameDescription.Width;
                                int depthHeight = depthFrameDescription.Height;
                                int irWidth     = irFrameDescription.Width;
                                int irHeight    = irFrameDescription.Height;

                                // verify data and write the new registered frame data to the display bitmap
                                if (((depthWidth * depthHeight) == depthFrameData.Length) &&
                                    ((irWidth * irHeight) == irFrameData.Length))
                                {
                                    depthFrame.CopyFrameDataToArray(depthFrameData);
                                    irFrame.CopyFrameDataToArray(irFrameData);
                                }

                                if (bodyFrameReference != null)
                                {
                                    bodyFrame = bodyFrameReference.AcquireFrame();

                                    if (bodyFrame != null)
                                    {
                                        if (bodies == null || bodies.Length < bodyFrame.BodyCount)
                                        {
                                            bodies = new Body[bodyFrame.BodyCount];
                                        }
                                        using (bodyFrame)
                                        {
                                            bodyFrame.GetAndRefreshBodyData(bodies);
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
                catch (Exception)
                {
                    // ignore if the frame is no longer available
                }
                finally
                {
                    if (depthFrame != null)
                    {
                        depthFrame.Dispose();
                        depthFrame = null;
                    }

                    if (irFrame != null)
                    {
                        irFrame.Dispose();
                        irFrame = null;
                    }
                    if (bodyFrame != null)
                    {
                        bodyFrame.Dispose();
                        bodyFrame = null;
                    }
                    if (multiSourceFrame != null)
                    {
                        multiSourceFrame = null;
                    }
                }
            }
        }
Пример #11
0
        private async void OnMultipleFramesArrivedHandler(MultiSourceFrame e)
        {
            BodyFrame     bodyFrame     = null;
            ColorFrame    colorFrame    = null;
            InfraredFrame infraredFrame = null;


            if (e == null)
            {
                return;
            }

            try
            {
                bodyFrame     = e.BodyFrameReference.AcquireFrame();
                colorFrame    = e.ColorFrameReference.AcquireFrame();
                infraredFrame = e.InfraredFrameReference.AcquireFrame();

                if ((bodyFrame == null) || (colorFrame == null) || (infraredFrame == null))
                {
                    return;
                }


                //ColorFrame
                using (colorFrame)
                {
                    //BodyFrame
                    await ProcessBodyFrame(bodyFrame, colorFrame);
                }

                // InfraredFrame
                //if (infraredFrame != null)
                //{
                //    // the fastest way to process the infrared frame data is to directly access
                //    // the underlying buffer
                //    using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer())
                //    {
                //        // verify data and write the new infrared frame data to the display bitmap
                //        if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel)) &&
                //            (this.infraredFrameDescription.Width == this.infraredBitmap.PixelWidth) && (this.infraredFrameDescription.Height == this.infraredBitmap.PixelHeight))
                //        {
                //            this.ProcessInfraredFrameData(infraredBuffer.UnderlyingBuffer, infraredBuffer.Size);
                //        }
                //    }
                //}
            }
            finally
            {
                if (infraredFrame != null)
                {
                    infraredFrame.Dispose();
                }

                if (colorFrame != null)
                {
                    colorFrame.Dispose();
                }

                if (bodyFrame != null)
                {
                    bodyFrame.Dispose();
                }
            }
        }
    void UpdateKinect()
    {
        if (_Reader != null)
        {
            MultiSourceFrame frame = _Reader.AcquireLatestFrame();
            if (frame != null)
            {
                //color processing with depth
                ColorFrame colorFrame = frame.ColorFrameReference.AcquireFrame();
                if (colorFrame != null)
                {
                    DepthFrame depthFrame = frame.DepthFrameReference.AcquireFrame();
                    if (depthFrame != null)
                    {
                        colorFrame.CopyConvertedFrameDataToArray(_ColorRawData, ColorImageFormat.Rgba);
                        _ColorTexture.LoadRawTextureData(_ColorRawData);
                        _ColorTexture.Apply();

                        depthFrame.CopyFrameDataToArray(_DepthData);

                        depthFrame.Dispose();
                        depthFrame = null;
                    }

                    colorFrame.Dispose();
                    colorFrame = null;
                }

                //ir processing
                InfraredFrame irFrame = frame.InfraredFrameReference.AcquireFrame();
                if (irFrame != null)
                {
                    irFrame.CopyFrameDataToArray(_IRData);

                    int index = 0;
                    foreach (ushort ir in _IRData)
                    {
                        byte intensity = (byte)(ir >> 8);
                        _IRRawData[index++] = intensity;
                        _IRRawData[index++] = intensity;
                        _IRRawData[index++] = intensity;
                        _IRRawData[index++] = 255; // Alpha
                    }

                    //load raw data
                    _IRTexture.LoadRawTextureData(_IRRawData);
                    _IRTexture.Apply();

                    irFrame.Dispose();
                    irFrame = null;
                }


                //body processing
                BodyFrame bodyFrame = frame.BodyFrameReference.AcquireFrame();
                if (bodyFrame != null)
                {
                    if (_BodyData == null)
                    {
                        _BodyData = new Body[_Sensor.BodyFrameSource.BodyCount];
                    }
                    bodyFrame.GetAndRefreshBodyData(_BodyData);

                    bodyFrame.Dispose();
                    bodyFrame = null;
                }
                frame = null;
            }
        }
    }
    public bool GetMultiSourceFrame(KinectInterop.SensorData sensorData)
    {
        if (multiSourceFrameReader != null)
        {
            multiSourceFrame = multiSourceFrameReader.AcquireLatestFrame();

            if (multiSourceFrame != null)
            {
                // try to get all frames at once
                msBodyFrame      = (sensorFlags & KinectInterop.FrameSource.TypeBody) != 0 ? multiSourceFrame.BodyFrameReference.AcquireFrame() : null;
                msBodyIndexFrame = (sensorFlags & KinectInterop.FrameSource.TypeBodyIndex) != 0 ? multiSourceFrame.BodyIndexFrameReference.AcquireFrame() : null;

                bool bAllSet =
                    ((sensorFlags & KinectInterop.FrameSource.TypeBody) == 0 || msBodyFrame != null) &&
                    ((sensorFlags & KinectInterop.FrameSource.TypeBodyIndex) == 0 || msBodyIndexFrame != null);


                if (!bAllSet)
                {
                    // release all frames
                    if (msBodyFrame != null)
                    {
                        msBodyFrame.Dispose();
                        msBodyFrame = null;
                    }

                    if (msBodyIndexFrame != null)
                    {
                        msBodyIndexFrame.Dispose();
                        msBodyIndexFrame = null;
                    }

                    if (msColorFrame != null)
                    {
                        msColorFrame.Dispose();
                        msColorFrame = null;
                    }

                    if (msDepthFrame != null)
                    {
                        msDepthFrame.Dispose();
                        msDepthFrame = null;
                    }

                    if (msInfraredFrame != null)
                    {
                        msInfraredFrame.Dispose();
                        msInfraredFrame = null;
                    }
                }
//				else
//				{
//					bool bNeedBody = (sensorFlags & KinectInterop.FrameSource.TypeBody) != 0;
//					bool bNeedBodyIndex = (sensorFlags & KinectInterop.FrameSource.TypeBodyIndex) != 0;
//					bool bNeedColor = (sensorFlags & KinectInterop.FrameSource.TypeColor) != 0;
//					bool bNeedDepth = (sensorFlags & KinectInterop.FrameSource.TypeDepth) != 0;
//					bool bNeedInfrared = (sensorFlags & KinectInterop.FrameSource.TypeInfrared) != 0;
//
//					bAllSet = true;
//				}
            }

            return(multiSourceFrame != null);
        }

        return(false);
    }