Exemple #1
0
        void useDepthFrame(DepthFrameReference depthFrameReference)
        {
            try
            {
                DepthFrame depthFrame = depthFrameReference.AcquireFrame();

                if (depthFrame != null)
                {
                    using (depthFrame)
                    {
                        depthFrame.CopyFrameDataToArray(this.irImagePixelData);

                        this.updateBitmap(depthFrame.FrameDescription.Width, depthFrame.FrameDescription.Height, this.irImagePixelData, true);

                        this.pictureBox1.Image = new Bitmap(this.colorImageBitmap, this.pictureBox1.Width, this.pictureBox1.Height);
                    }
                }
            }
            catch (Exception er)
            {
                string message = er.Message;
                Console.WriteLine(message);
                // Don't worry about empty frames.
            }
        }
Exemple #2
0
        /// <summary>
        /// Display the depth data when a frame is received
        /// </summary>
        private void DepthReader_FrameArrived(object sender, DepthFrameArrivedEventArgs e)
        {
            DepthFrameReference frameReference = e.FrameReference;
            DepthFrame          frame          = frameReference.AcquireFrame();

            if (frame != null)
            {
                Dispatcher.Invoke(() => { depthRenderer.RenderDepthFrame(e); frame.Dispose(); });
            }
        }
Exemple #3
0
 private void HandleDepthFrame(DepthFrameReference reference)
 {
     DepthWatch.Again();
     using (var frame = reference.AcquireFrame()) {
         if (frame == null)
         {
             return;
         }
         frame.CopyFrameDataToArray(Depth.Pixels);
         Depth.Stamp.Time = System.DateTime.Now;
     }
     DepthWatch.Stop();
 }
Exemple #4
0
        /// <summary>
        /// Acquires depth data from the <see cref="multiSourceFrame"/>.
        /// </summary>
        /// <returns>
        /// Returns true if depth data was acquired correctly, false otherwise.
        /// </returns>
        public bool AcquireDepthData()
        {
            DepthFrameReference depthFrameReference = this.multiSourceFrame.DepthFrameReference;

            using (DepthFrame depthFrame = depthFrameReference.AcquireFrame())
            {
                if (depthFrame == null)
                {
                    this.depthData = null;
                    Log.Warn("The frame does not contain depth data.");
                    return(false);
                }

                FrameDescription depthFrameDescription = depthFrame.FrameDescription;
                this.depthData = new ushort[depthFrameDescription.LengthInPixels];
                depthFrame.CopyFrameDataToArray(this.depthData);
                return(true);
            }
        }
        /// <summary>
        /// Draws depth image data from the specified frame.
        /// </summary>
        /// <param name="frameReference">The reference to the depth frame that should be used.</param>
        private void useDepthImage(DepthFrameReference frameReference)
        {
            // Actually aquire the frame here and check that it was properly aquired, and use it again since it too is disposable.
            DepthFrame frame = frameReference.AcquireFrame();

            if (frame != null)
            {
                FrameDescription description = null;
                Bitmap           outputImage = null;
                using (frame)
                {
                    // Next get the frame's description and create an output bitmap image.
                    description = frame.FrameDescription;
                    outputImage = new Bitmap(description.Width, description.Height, PixelFormat.Format32bppArgb);

                    // Next, we create the raw data pointer for the bitmap, as well as the size of the image's data.
                    System.Drawing.Imaging.BitmapData imageData = outputImage.LockBits(new Rectangle(0, 0, outputImage.Width, outputImage.Height),
                                                                                       ImageLockMode.WriteOnly, outputImage.PixelFormat);
                    IntPtr imageDataPtr = imageData.Scan0;
                    int    size         = imageData.Stride * outputImage.Height;

                    // After this, we copy the image data into its array.  We then go through each pixel and shift the data down for the
                    // RGB values, as their normal values are too large.
                    frame.CopyFrameDataToArray(this.rawDepthPixelData);
                    byte[] rawData = new byte[description.Width * description.Height * 4];
                    int    i       = 0;
                    foreach (ushort point in this.rawDepthPixelData)
                    {
                        rawData[i++] = (byte)(point >> 6);
                        rawData[i++] = (byte)(point >> 4);
                        rawData[i++] = (byte)(point >> 2);
                        rawData[i++] = 255;
                    }
                    // Next, the new raw data is copied to the bitmap's data pointer, and the image is unlocked using its data.
                    System.Runtime.InteropServices.Marshal.Copy(rawData, 0, imageDataPtr, size);
                    outputImage.UnlockBits(imageData);
                }

                // Finally, the image is set for the preview picture box.
                this.previewPictureBox.Image = outputImage;
            }
        }
        void frameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            // Try to get the frame from its reference.
            try
            {
                MultiSourceFrame frame = e.FrameReference.AcquireFrame();

                if (frame != null)
                {
                    try
                    {
                        // Then switch between the possible types of images to show, get its frame reference, then use it
                        // with the appropriate image.
                        switch (this.imageType)
                        {
                        case ImageType.Color:
                            ColorFrameReference colorFrameReference = frame.ColorFrameReference;
                            useRGBAImage(colorFrameReference);
                            break;

                        case ImageType.Depth:
                            DepthFrameReference depthFrameReference = frame.DepthFrameReference;
                            useDepthImage(depthFrameReference);
                            break;

                        case ImageType.IR:
                            InfraredFrameReference irFrameReference = frame.InfraredFrameReference;
                            useIRImage(irFrameReference);
                            break;
                        }
                    }
                    catch (Exception)
                    {
                        // Don't worry about exceptions for this demonstration.
                    }
                }
            }
            catch (Exception)
            {
                // Don't worry about exceptions for this demonstration.
            }
        }
        private void RenderFrame(object sender, DoWorkEventArgs e)
        {
            ProcessDepthFrameEventArgs args      = (ProcessDepthFrameEventArgs)e.Argument;
            DepthFrameReference        reference = args.frame;

            using (DepthFrame frame = reference.AcquireFrame())
            {
                if (frame != null)
                {
                    //////Debug.WriteLine("Depth frame arrived");
                    try
                    {
                        e.Result = new DFrame(frame);
                    }
                    catch (Exception)
                    {
                    }
                }
            }
        }
Exemple #8
0
        /// <summary>
        /// Метод отрисовки глубинной камеры из глубинного фрейма.
        /// </summary>
        /// <param name="frameReference">Ссылка на глубинный фрейм.</param>
        private void ShowDepthImage(DepthFrameReference frameReference)
        {
            // Попытка получить текущий фрейм с сенсора
            DepthFrame frame = frameReference.AcquireFrame();

            if (frame != null)
            {
                FrameDescription description = null;
                Bitmap           outputImage = null;
                using (frame)
                {
                    // Теперь получаем описание фрейма и создаем изображение для depth picture box
                    description = frame.FrameDescription;
                    outputImage = new Bitmap(description.Width, description.Height, PixelFormat.Format32bppArgb);

                    // Далее создаем указатель на данные картинки и указываем будующий размер
                    BitmapData imageData = outputImage.LockBits(new Rectangle(0, 0, outputImage.Width, outputImage.Height),
                                                                ImageLockMode.WriteOnly, outputImage.PixelFormat);
                    IntPtr imageDataPtr = imageData.Scan0;
                    int    size         = imageData.Stride * outputImage.Height;

                    // Теперь копируем изображение в соответствующий массив. Смещаем каждый RGB пиксел к byte размеру
                    frame.CopyFrameDataToArray(this.rawDepthPixelData);
                    byte[] rawData = new byte[description.Width * description.Height * 4];
                    int    i       = 0;
                    foreach (ushort point in this.rawDepthPixelData)
                    {
                        rawData[i++] = (byte)(point >> 6);
                        rawData[i++] = (byte)(point >> 4);
                        rawData[i++] = (byte)(point >> 2);
                        rawData[i++] = 255;
                    }

                    // Наконец создаем картинку из буфера
                    System.Runtime.InteropServices.Marshal.Copy(rawData, 0, imageDataPtr, size);
                    outputImage.UnlockBits(imageData);
                }

                this.pictureBoxDeepCamera.Image = drawGestureStaticRectabgle(outputImage);
            }
        }
Exemple #9
0
        void multiSourceFrameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            MultiSourceFrameReference msFrameReference = e.FrameReference;

            try
            {
                MultiSourceFrame msFrame = msFrameReference.AcquireFrame();

                if (msFrame != null)
                {
                    LongExposureInfraredFrameReference leirFrameReference = msFrame.LongExposureInfraredFrameReference;
                    InfraredFrameReference             irFrameReference   = msFrame.InfraredFrameReference;
                    ColorFrameReference colorFrameReference = msFrame.ColorFrameReference;
                    DepthFrameReference depthFrameReference = msFrame.DepthFrameReference;
                    BodyFrameReference  bodyFrameReference  = msFrame.BodyFrameReference;
                    switch (this.imageType)
                    {
                    case ImageType.Color:
                        useColorFrame(colorFrameReference);
                        break;

                    case ImageType.Depth:
                        useDepthFrame(depthFrameReference);
                        break;

                    case ImageType.IR:
                        useIRFrame(irFrameReference);
                        break;

                    case ImageType.LEIR:
                        useLIRFrame(leirFrameReference);
                        break;
                    }
                    useBodyFrame(bodyFrameReference);
                    //updatePulse(colorFrameReference, irFrameReference, bodyFrameReference);
                }
            }
            catch (Exception ex)
            {
            }
        }
Exemple #10
0
        public async void Update(DepthFrameReference frameReference)
        {
            bool processed = false;
            ushort minDepth = 0;
            ushort maxDepth = 0;
            using (var frame = frameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    frame.CopyFrameDataToArray(_data);
                    minDepth = frame.DepthMinReliableDistance;
                    maxDepth = frame.DepthMaxReliableDistance;
                    processed = true;
                }
            }

            if (processed)
            {
                await UpdateAsync(_data, minDepth, maxDepth);
            }
        }
Exemple #11
0
        /// <summary>
        /// Update the Bitmap from the supplied <c>DepthFrameReference</c>.
        /// </summary>
        public async void Update(DepthFrameReference frameReference)
        {
            bool   processed = false;
            ushort minDepth  = 0;
            ushort maxDepth  = 0;

            using (var frame = frameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    frame.CopyFrameDataToArray(_data);
                    minDepth  = frame.DepthMinReliableDistance;
                    maxDepth  = frame.DepthMaxReliableDistance;
                    processed = true;
                }
            }

            if (processed)
            {
                await UpdateAsync(_data, minDepth, maxDepth);
            }
        }
Exemple #12
0
        /// <summary>
        /// Process the depth frames and update UI
        /// </summary>
        public void OnDepthFrameArrived(object sender, DepthFrameArrivedEventArgs e)
        {
            DepthFrameReference refer = e.FrameReference;

            if (refer == null)
            {
                return;
            }

            DepthFrame frame = refer.AcquireFrame();

            if (frame == null)
            {
                return;
            }

            using (frame)
            {
                FrameDescription frameDesc = frame.FrameDescription;

                if (((frameDesc.Width * frameDesc.Height) == _depthData.Length) && (frameDesc.Width == _depthBitmap.PixelWidth) && (frameDesc.Height == _depthBitmap.PixelHeight))
                {
                    // Copy depth frames
                    frame.CopyFrameDataToArray(_depthData);

                    // Get min & max depth
                    ushort minDepth = frame.DepthMinReliableDistance;
                    ushort maxDepth = frame.DepthMaxReliableDistance;

                    // Adjust visualisation
                    int colorPixelIndex = 0;
                    for (int i = 0; i < _depthData.Length; ++i)
                    {
                        // Get depth value
                        ushort depth = _depthData[i];

                        if (depth == 0)
                        {
                            _depthPixels[colorPixelIndex++] = 41;
                            _depthPixels[colorPixelIndex++] = 239;
                            _depthPixels[colorPixelIndex++] = 242;
                        }
                        else if (depth < minDepth || depth > maxDepth)
                        {
                            _depthPixels[colorPixelIndex++] = 25;
                            _depthPixels[colorPixelIndex++] = 0;
                            _depthPixels[colorPixelIndex++] = 255;
                        }
                        else
                        {
                            double gray = (Math.Floor((double)depth / 250) * 12.75);

                            _depthPixels[colorPixelIndex++] = (byte)gray;
                            _depthPixels[colorPixelIndex++] = (byte)gray;
                            _depthPixels[colorPixelIndex++] = (byte)gray;
                        }

                        // Increment
                        ++colorPixelIndex;
                    }

                    // Copy output to bitmap
                    _depthBitmap.WritePixels(
                        new Int32Rect(0, 0, frameDesc.Width, frameDesc.Height),
                        _depthPixels,
                        frameDesc.Width * _bytePerPixel,
                        0);
                }
            }
        }
Exemple #13
0
        /// <summary>
        /// Update to get a new frame.
        /// This code is similar to the code in the Kinect SDK samples.
        /// </summary>
        private static void Update()
        {
            if (!isConnected)
            {
                return;
            }

            dataAvailable.WaitOne();

            MultiSourceFrame multiSourceFrame = null;
            DepthFrame       depthFrame       = null;
            InfraredFrame    irFrame          = null;
            BodyFrame        bodyFrame        = null;

            lock (updateLock)
            {
                try
                {
                    if (frameReference != null)
                    {
                        multiSourceFrame = frameReference.AcquireFrame();

                        if (multiSourceFrame != null)
                        {
                            DepthFrameReference    depthFrameReference = multiSourceFrame.DepthFrameReference;
                            InfraredFrameReference irFrameReference    = multiSourceFrame.InfraredFrameReference;
                            BodyFrameReference     bodyFrameReference  = multiSourceFrame.BodyFrameReference;

                            depthFrame = depthFrameReference.AcquireFrame();
                            irFrame    = irFrameReference.AcquireFrame();

                            if ((depthFrame != null) && (irFrame != null))
                            {
                                FrameDescription depthFrameDescription = depthFrame.FrameDescription;
                                FrameDescription irFrameDescription    = irFrame.FrameDescription;

                                int depthWidth  = depthFrameDescription.Width;
                                int depthHeight = depthFrameDescription.Height;
                                int irWidth     = irFrameDescription.Width;
                                int irHeight    = irFrameDescription.Height;

                                // verify data and write the new registered frame data to the display bitmap
                                if (((depthWidth * depthHeight) == depthFrameData.Length) &&
                                    ((irWidth * irHeight) == irFrameData.Length))
                                {
                                    depthFrame.CopyFrameDataToArray(depthFrameData);
                                    irFrame.CopyFrameDataToArray(irFrameData);
                                }

                                if (bodyFrameReference != null)
                                {
                                    bodyFrame = bodyFrameReference.AcquireFrame();

                                    if (bodyFrame != null)
                                    {
                                        if (bodies == null || bodies.Length < bodyFrame.BodyCount)
                                        {
                                            bodies = new Body[bodyFrame.BodyCount];
                                        }
                                        using (bodyFrame)
                                        {
                                            bodyFrame.GetAndRefreshBodyData(bodies);
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
                catch (Exception)
                {
                    // ignore if the frame is no longer available
                }
                finally
                {
                    if (depthFrame != null)
                    {
                        depthFrame.Dispose();
                        depthFrame = null;
                    }

                    if (irFrame != null)
                    {
                        irFrame.Dispose();
                        irFrame = null;
                    }
                    if (bodyFrame != null)
                    {
                        bodyFrame.Dispose();
                        bodyFrame = null;
                    }
                    if (multiSourceFrame != null)
                    {
                        multiSourceFrame = null;
                    }
                }
            }
        }
Exemple #14
0
        /// <summary>
        /// Device-specific implementation of Update.
        /// Updates data buffers of all active channels with data of current frame.
        /// </summary>
        /// <remarks>This method is implicitely called by <see cref="Camera.Update"/> inside a camera lock.</remarks>
        /// <seealso cref="Camera.Update"/>
        protected override void UpdateImpl()
        {
            // TODO: This method could yield rather asynchronous channels. If necessary: Try to find a mechanism that updates frames that are already fetched when waiting for others that are not yet available.
            MultiSourceFrame multiSourceFrame       = null;
            bool             bodyIndexRequired      = IsChannelActive(CustomChannelNames.BodyIndex);
            bool             depthRequired          = IsChannelActive(ChannelNames.Distance) || IsChannelActive(ChannelNames.Point3DImage);
            bool             amplitudeRequired      = IsChannelActive(ChannelNames.Amplitude);
            bool             colorRequired          = IsChannelActive(ChannelNames.Color);
            bool             longExposureIRRequired = IsChannelActive(CustomChannelNames.LongExposureIR);

            do
            {
                if (!dataAvailable.WaitOne(UpdateTimeoutMilliseconds))
                {
                    throw ExceptionBuilder.BuildFromID(typeof(MetriCam2Exception), this, 005);
                }

                lock (newFrameLock)
                {
                    try
                    {
                        if (multiFrameReference != null)
                        {
                            multiSourceFrame = multiFrameReference.AcquireFrame();
                        }
                    }
                    catch (Exception)
                    {
                        // ignore if the frame is no longer available
                        continue;// throw
                    }
                }

                try
                {
                    // fetch depth?
                    if (depthRequired)
                    {
                        DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference;
                        if (depthFrameReference != null)
                        {
                            // always synchornize on depth frames if possible.
                            if (lastTimeStamp == GetAbsoluteTimeStamp(depthFrameReference.RelativeTime.Ticks))
                            {
                                continue;
                            }
                            using (DepthFrame depthFrame = depthFrameReference.AcquireFrame())
                            {
                                if (depthFrame == null)
                                {
                                    continue;
                                }

                                depthFrameDescription = depthFrame.FrameDescription;
                                int depthWidth  = depthFrameDescription.Width;
                                int depthHeight = depthFrameDescription.Height;
                                if ((depthWidth * depthHeight) == this.depthFrameData.Length)
                                {
                                    lock (this.depthFrameData)
                                    {
                                        depthFrame.CopyFrameDataToArray(this.depthFrameData);
                                        lastTimeStamp  = GetAbsoluteTimeStamp(depthFrameReference.RelativeTime.Ticks);
                                        timestampDepth = lastTimeStamp;
                                    }
                                    depthRequired = false;
                                }
                            }
                        }
                    }

                    // fetch IR?
                    if (amplitudeRequired)
                    {
                        InfraredFrameReference irFrameReference = multiSourceFrame.InfraredFrameReference;
                        if (irFrameReference != null)
                        {
                            // If depth data is inactive, synchronize on IR frames. If depth and IR are inactive, we synchronize on color frames.
                            if (!(IsChannelActive(ChannelNames.Distance) || IsChannelActive(ChannelNames.Point3DImage)) && lastTimeStamp == GetAbsoluteTimeStamp(irFrameReference.RelativeTime.Ticks))
                            {
                                continue;
                            }

                            using (InfraredFrame irFrame = irFrameReference.AcquireFrame())
                            {
                                if (irFrame == null)
                                {
                                    continue;
                                }

                                FrameDescription irFrameDescription = irFrame.FrameDescription;
                                int irWidth  = irFrameDescription.Width;
                                int irHeight = irFrameDescription.Height;
                                if ((irWidth * irHeight) == this.irFrameData.Length)
                                {
                                    lock (this.irFrameData)
                                    {
                                        irFrame.CopyFrameDataToArray(this.irFrameData);
                                        lastTimeStamp = GetAbsoluteTimeStamp(irFrameReference.RelativeTime.Ticks);
                                        timestampIR   = lastTimeStamp;
                                    }
                                    amplitudeRequired = false;
                                }
                            }
                        }
                    }

                    // (always) fetch body frame
                    BodyFrameReference bodyFrameReference = multiSourceFrame.BodyFrameReference;
                    if (bodyFrameReference != null)
                    {
                        using (BodyFrame bodyFrame = bodyFrameReference.AcquireFrame())
                        {
                            if (bodyFrame != null)
                            {
                                this.bodies = new Body[bodyFrame.BodyCount];
                                using (bodyFrame)
                                {
                                    bodyFrame.GetAndRefreshBodyData(this.bodies);
                                }
                            }
                            else
                            {
                                // TODO: check if channel is activated.
                            }
                        }
                    }

                    // fetch color?
                    if (colorRequired)
                    {
                        ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference;
                        if (colorFrameReference == null)
                        {
                            continue;
                        }
                        // If depth and IR data is inactive, synchronize on color frames. If color, depth and IR are inactive, we don't care for synchronization.
                        if (!(IsChannelActive(ChannelNames.Distance) || IsChannelActive(ChannelNames.Point3DImage) || IsChannelActive(ChannelNames.Amplitude)) && lastTimeStamp == GetAbsoluteTimeStamp(colorFrameReference.RelativeTime.Ticks))
                        {
                            continue;
                        }

                        using (ColorFrame colorFrame = colorFrameReference.AcquireFrame())
                        {
                            //FrameDescription colorFrameDescription = colorFrame.FrameDescription;
                            //int cWidth = colorFrameDescription.Width;
                            //int cHeight = colorFrameDescription.Width;
                            if (colorFrame != null)
                            {
                                using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                                {
                                    lock (this.colorFrameData)
                                    {
                                        colorFrame.CopyConvertedFrameDataToArray(this.colorFrameData, ColorImageFormat.Bgra);
                                        lastTimeStamp  = GetAbsoluteTimeStamp(colorFrameReference.RelativeTime.Ticks);
                                        timestampColor = lastTimeStamp;
                                    }
                                }
                                colorRequired = false;
                            }
                        }
                    }

                    // fetch long exposure IR? (this is independent of the IR images and are acquired at the same rate, so every new frame also
                    // has one of these.)
                    if (longExposureIRRequired)
                    {
                        LongExposureInfraredFrameReference longExposureIRFrameRef = multiSourceFrame.LongExposureInfraredFrameReference;
                        using (LongExposureInfraredFrame longIRFrame = longExposureIRFrameRef.AcquireFrame())
                        {
                            if (longIRFrame == null)
                            {
                                continue;
                            }

                            int longIRWidth  = longIRFrame.FrameDescription.Width;
                            int longIRHeight = longIRFrame.FrameDescription.Height;
                            if (longExposureIRData == null || (longIRWidth * longIRHeight) != longExposureIRData.Length)
                            {
                                longExposureIRData = new ushort[longIRWidth * longIRHeight];
                            }
                            longIRFrame.CopyFrameDataToArray(longExposureIRData);
                            longExposureIRRequired = false;
                        }
                    }

                    // fetch body index frames?
                    if (bodyIndexRequired)
                    {
                        BodyIndexFrameReference bodyIndexFrameRef = multiSourceFrame.BodyIndexFrameReference;
                        using (BodyIndexFrame bodyIndexFrame = bodyIndexFrameRef.AcquireFrame())
                        {
                            if (bodyIndexFrame == null)
                            {
                                log.Debug("bodyIndexFrame is NULL.");
                                continue;
                            }

                            int bodyIndexWidth  = bodyIndexFrame.FrameDescription.Width;
                            int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;
                            if (bodyIndexData == null || (bodyIndexWidth * bodyIndexHeight) != bodyIndexData.Length)
                            {
                                bodyIndexData = new byte[bodyIndexWidth * bodyIndexHeight];
                            }
                            bodyIndexFrame.CopyFrameDataToArray(bodyIndexData);
                            bodyIndexRequired = false;
                        }
                    }
                }
                catch (Exception)
                {
                    // ignore if the frame is no longer available
                }
                finally
                {
                    multiSourceFrame = null;
                }
            } while (depthRequired || colorRequired || bodyIndexRequired || longExposureIRRequired || amplitudeRequired);
        }
Exemple #15
0
        /// <summary>
        /// Handles the depth frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void Reader_FrameArrivedDepth(object sender, DepthFrameArrivedEventArgs e)
        {
            DepthFrameReference frameReference = e.FrameReference;

            try
            {
                DepthFrame frame = frameReference.AcquireFrame();

                if (frame != null)
                {
                    // DepthFrame is IDisposable
                    using (frame)
                    {
                        FrameDescription frameDescription = frame.FrameDescription;

                        // verify dazta and write the new depth frame data to the display bitmap
                        if (((frameDescription.Width * frameDescription.Height) == this.frameData.Length) &&
                            (frameDescription.Width == this.bitmapDepth.PixelWidth) && (frameDescription.Height == this.bitmapDepth.PixelHeight))
                        {
                            // Copy the pixel data from the image to a temporary array
                            frame.CopyFrameDataToArray(this.frameData);

                            // Get the min and max reliable depth for the current frame
                            ushort minDepth = frame.DepthMinReliableDistance;
                            ushort maxDepth = frame.DepthMaxReliableDistance;

                            // Convert the depth to RGB
                            int colorPixelIndex = 0;
                            for (int i = 0; i < this.frameData.Length; ++i)
                            {
                                // Get the depth for this pixel
                                ushort depth = this.frameData[i];

                                // To convert to a byte, we're discarding the most-significant
                                // rather than least-significant bits.
                                // We're preserving detail, although the intensity will "wrap."
                                // Values outside the reliable depth range are mapped to 0 (black).
                                byte intensity = (byte)(depth >= minDepth && depth <= maxDepth ? depth : 0);

                                // Write out blue byte
                                this.pixelsDepth[colorPixelIndex++] = intensity;

                                // Write out green byte
                                this.pixelsDepth[colorPixelIndex++] = intensity;

                                // Write out red byte
                                this.pixelsDepth[colorPixelIndex++] = intensity;

                                // We're outputting BGR, the last byte in the 32 bits is unused so skip it
                                // If we were outputting BGRA, we would write alpha here.
                                ++colorPixelIndex;

                                // Save depth to file
                            }


                            this.bitmapDepth.WritePixels(
                                new Int32Rect(0, 0, frameDescription.Width, frameDescription.Height),
                                this.pixelsDepth,
                                frameDescription.Width * this.cbytesPerPixel,
                                0);

                            //Save the depth data using a binary stream if the option is selected
                            if (depthRecO == true)
                            {
                                long milliseconds = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond;

                                string filePath = depthFolder + '\\' + "depth" + milliseconds + ".bin";

                                using (FileStream streamDepth = new FileStream(filePath, FileMode.Create))
                                {
                                    using (BinaryWriter depthWriter = new BinaryWriter(streamDepth))
                                    {
                                        depthWriter.Write(this.pixelsDepth);
                                        depthWriter.Close();
                                    }
                                }

                                saveTimeStamp("time_depth", milliseconds);
                            }
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
                // ignore if the frame is no longer available
            }
        }
        /// <summary>
        /// Draws depth image data from the specified frame.
        /// </summary>
        /// <param name="frameReference">The reference to the depth frame that should be used.</param>
        private void useDepthImage(DepthFrameReference frameReference)
        {
            // Actually aquire the frame here and check that it was properly aquired, and use it again since it too is disposable.
            DepthFrame frame = frameReference.AcquireFrame();

            if (frame != null)
            {
                FrameDescription description = null;
                Bitmap outputImage = null;
                using (frame)
                {
                    // Next get the frame's description and create an output bitmap image.
                    description = frame.FrameDescription;
                    outputImage = new Bitmap(description.Width, description.Height, PixelFormat.Format32bppArgb);

                    // Next, we create the raw data pointer for the bitmap, as well as the size of the image's data.
                    System.Drawing.Imaging.BitmapData imageData = outputImage.LockBits(new Rectangle(0, 0, outputImage.Width, outputImage.Height),
                        ImageLockMode.WriteOnly, outputImage.PixelFormat);
                    IntPtr imageDataPtr = imageData.Scan0;
                    int size = imageData.Stride * outputImage.Height;

                    // After this, we copy the image data into its array.  We then go through each pixel and shift the data down for the
                    // RGB values, as their normal values are too large.
                    frame.CopyFrameDataToArray(this.rawDepthPixelData);
                    byte[] rawData = new byte[description.Width * description.Height * 4];
                    int i = 0;
                    foreach (ushort point in this.rawDepthPixelData)
                    {
                        rawData[i++] = (byte)(point >> 6);
                        rawData[i++] = (byte)(point >> 4);
                        rawData[i++] = (byte)(point >> 2);
                        rawData[i++] = 255;
                    }
                    // Next, the new raw data is copied to the bitmap's data pointer, and the image is unlocked using its data.
                    System.Runtime.InteropServices.Marshal.Copy(rawData, 0, imageDataPtr, size);
                    outputImage.UnlockBits(imageData);
                }

                // Finally, the image is set for the preview picture box.
                this.previewPictureBox.Image = outputImage;
            }
        }
        private void OnDepthFrameArrived(object sender, DepthFrameArrivedEventArgs e)
        {
            int    af = 0, depth22 = 0;
            double distance           = 0;
            DepthFrameReference refer = e.FrameReference;

            if (refer == null)
            {
                return;
            }
            DepthFrame frame = refer.AcquireFrame();

            if (frame == null)
            {
                return;
            }

            using (frame)
            {
                FrameDescription frameDesc = frame.FrameDescription;
                if (((frameDesc.Width * frameDesc.Height) == _depthData.Length) && (frameDesc.Width == _depthBitmap.PixelWidth) && (frameDesc.Height == _depthBitmap.PixelHeight))
                {
                    uint size = frame.FrameDescription.LengthInPixels;

                    frame.CopyFrameDataToArray(_depthData);

                    ushort minDepth = frame.DepthMinReliableDistance;

                    int colorPixelIndex = 0;
                    for (int i = 0; i < _depthData.Length; i++)
                    {
                        ushort depth = _depthData[i];
                        if (depth < minDepth)
                        {
                            _depthPixels[colorPixelIndex++] = 0;
                            _depthPixels[colorPixelIndex++] = 0;
                            _depthPixels[colorPixelIndex++] = 0;
                        }
                        else if (depth > maxDepth)
                        {
                            _depthPixels[colorPixelIndex++] = 255;
                            _depthPixels[colorPixelIndex++] = 255;
                            _depthPixels[colorPixelIndex++] = 255;
                        }
                        else
                        {
                            double gray = (Math.Floor((double)depth / 250) * 12.75);
                            _depthPixels[colorPixelIndex++] = (byte)gray;
                            _depthPixels[colorPixelIndex++] = (byte)gray;
                            _depthPixels[colorPixelIndex++] = (byte)gray;
                        }
                        ++colorPixelIndex;
                    }
                    _depthBitmap.WritePixels(new Int32Rect(0, 0, frameDesc.Width, frameDesc.Height), _depthPixels, frameDesc.Width * _bytePerPixel, 0);

                    distance             = 260 + (200 * _kinect.DepthFrameSource.FrameDescription.Width);
                    af                   = (int)distance;
                    depth22              = _depthData[af];
                    distanceTextBox.Text = depth22.ToString();
                }
            }
        }
        void useDepthFrame(DepthFrameReference depthFrameReference)
        {
            try
            {
                DepthFrame depthFrame = depthFrameReference.AcquireFrame();

                if (depthFrame != null)
                {
                    using (depthFrame)
                    {
                        depthFrame.CopyFrameDataToArray(this.irImagePixelData);

                        this.updateBitmap(depthFrame.FrameDescription.Width, depthFrame.FrameDescription.Height, this.irImagePixelData, true);

                        this.pictureBox1.Image = new Bitmap(this.colorImageBitmap, this.pictureBox1.Width, this.pictureBox1.Height);
                    }
                }
            }
            catch (Exception er)
            {
                string message = er.Message;
                Console.WriteLine(message);
                // Don't worry about empty frames.
            }
        }
 private void HandleDepthFrame(DepthFrameReference reference) {
   DepthWatch.Again();
   using (var frame = reference.AcquireFrame()) {
     if (frame == null) return;
     frame.CopyFrameDataToArray(Depth.Pixels);
     Depth.Stamp.Time = System.DateTime.Now;
   }
   DepthWatch.Stop();
 }
        //Processes the Frame data from the Kinect camera.
        //Since events are called synchronously, this would bottleneck and cause an issue with framerate
        //By threading, we process the info on seperate threads, allowing execution to coninue with the rest of the game
        private void ProcessRGBVideo(ColorFrameReference aReference, BodyIndexFrameReference bifRef, DepthFrameReference depthRef)
        {
            using (ColorFrame colorImageFrame = aReference.AcquireFrame())
            {
                if (colorImageFrame != null)
                {
                    using (BodyIndexFrame bodyIndexFrame = bifRef.AcquireFrame())
                    {
                        if (bodyIndexFrame != null)
                        {
                            using (DepthFrame depthFrame = depthRef.AcquireFrame())
                            {
                                if (depthFrame != null)
                                {
                                    int depthHeight = depthFrame.FrameDescription.Height;
                                    int depthWidth = depthFrame.FrameDescription.Width;

                                    int colorHeight = colorImageFrame.FrameDescription.Height;
                                    int colorWidth = colorImageFrame.FrameDescription.Width;

                                    ushort[] _depthData = new ushort[depthFrame.FrameDescription.Width * depthFrame.FrameDescription.Height];
                                    byte[] _bodyData = new byte[bodyIndexFrame.FrameDescription.Width * bodyIndexFrame.FrameDescription.Height];
                                    byte[] _colorData = new byte[colorImageFrame.FrameDescription.Width * colorImageFrame.FrameDescription.Height * 4];
                                    ColorSpacePoint[] _colorPoints = new ColorSpacePoint[depthWidth * depthHeight];

                                    depthFrame.CopyFrameDataToArray(_depthData);
                                    bodyIndexFrame.CopyFrameDataToArray(_bodyData);
                                    colorImageFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Rgba);

                                    iSensor.CoordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);

                                    Color[] color = new Color[depthWidth * depthHeight];
                                    Color c;

                                    for (int y = 0; y < depthHeight; ++y)
                                    {
                                        for (int x = 0; x < depthWidth; ++x)
                                        {
                                            int depthIndex = (y * depthHeight) + x;

                                            byte player = _bodyData[depthIndex];

                                            // Check whether this pixel belong to a human!!!
                                            if (player != 0xff)
                                            {
                                                ColorSpacePoint colorPoint = _colorPoints[depthIndex];

                                                int colorX = (int)Math.Floor(colorPoint.X + 0.5);
                                                int colorY = (int)Math.Floor(colorPoint.Y + 0.5);
                                                int colorIndex = ((colorY * colorWidth) + colorX);

                                                if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                                                {

                                                    int displayIndex = colorIndex * 4;

                                                    c = new Color(_colorData[displayIndex + 0], _colorData[displayIndex + 1], _colorData[displayIndex + 2], 0xff);
                                                    color[depthIndex] = c;
                                                }
                                            }
                                        }
                                    }

                                    if (iGraphicsDevice.IsDisposed) return;
                                    var video = new Texture2D(iGraphicsDevice, depthWidth, depthHeight);

                                    video.SetData(color);

                                    lock (iVideoLock)
                                    {
                                        iRGBVideo = video;
                                    }
                                }
                            }
                        }
                    }
                }
            }
        }
        private void ProcessFrameData(MultiSourceFrameArrivedEventArgs e)
        {
            MultiSourceFrameReference frameReference   = e.FrameReference;
            MultiSourceFrame          multiSourceFrame = null;
            DepthFrame depthFrame = null;
            ColorFrame colorFrame = null;

            try
            {
                multiSourceFrame = frameReference.AcquireFrame();
                if (multiSourceFrame != null)
                {
                    lock (rawDataLock)
                    {
                        ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference;
                        DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference;
                        colorFrame = colorFrameReference.AcquireFrame();
                        depthFrame = depthFrameReference.AcquireFrame();

                        if ((depthFrame != null) && (colorFrame != null))
                        {
                            FrameDescription colorFrameDescription = colorFrame.FrameDescription;
                            int colorWidth  = colorFrameDescription.Width;
                            int colorHeight = colorFrameDescription.Height;
                            if ((colorWidth * colorHeight * sizeof(int)) == colorImagePixels.Length)
                            {
                                colorFrame.CopyConvertedFrameDataToArray(colorImagePixels, ColorImageFormat.Bgra);
                            }

                            FrameDescription depthFrameDescription = depthFrame.FrameDescription;
                            int depthWidth  = depthFrameDescription.Width;
                            int depthHeight = depthFrameDescription.Height;

                            if ((depthWidth * depthHeight) == depthImagePixels.Length)
                            {
                                depthFrame.CopyFrameDataToArray(depthImagePixels);
                            }
                        }
                    }
                }
            }
            catch (Exception)
            {
                // ignore if the frame is no longer available
            }
            finally
            {
                // MultiSourceFrame, DepthFrame, ColorFrame, BodyIndexFrame are IDispoable
                if (depthFrame != null)
                {
                    depthFrame.Dispose();
                    depthFrame = null;
                }
                if (colorFrame != null)
                {
                    colorFrame.Dispose();
                    colorFrame = null;
                }
                if (multiSourceFrame != null)
                {
                    multiSourceFrame = null;
                }
            }
        }
Exemple #22
0
        private void DepthReader_FrameArrived(DepthFrameArrivedEventArgs e)
        {
            DepthFrameReference frameReference = e.FrameReference;

            try
            {
                DepthFrame frame = frameReference.AcquireFrame();

                if (frame != null)
                {
                    // DepthFrame is IDisposable
                    using (frame)
                    {
                        FrameDescription frameDescription = frame.FrameDescription;

                        // verify data and write the new depth frame data to the display bitmap
                        if (((frameDescription.Width * frameDescription.Height) == frameData.Length) &&
                            (frameDescription.Width == bitmap.PixelWidth) && (frameDescription.Height == bitmap.PixelHeight))
                        {
                            // Copy the pixel data from the image to a temporary array
                            frame.CopyFrameDataToArray(frameData);

                            // Get the min and max reliable depth for the current frame
                            ushort minDepth = frame.DepthMinReliableDistance;
                            ushort maxDepth = frame.DepthMaxReliableDistance;

                            // Convert the depth to RGB
                            int colorPixelIndex = 0;
                            for (int i = 0; i < frameData.Length; ++i)
                            {
                                // Get the depth for this pixel
                                ushort depth = frameData[i];

                                // To convert to a byte, we're discarding the most-significant
                                // rather than least-significant bits.
                                // We're preserving detail, although the intensity will "wrap."
                                // Values outside the reliable depth range are mapped to 0 (black).
                                byte intensity = (byte)(depth >= minDepth && depth <= maxDepth ? depth : 0);

                                // Write out blue byte
                                pixels[colorPixelIndex++] = intensity;

                                // Write out green byte
                                pixels[colorPixelIndex++] = intensity;

                                // Write out red byte
                                pixels[colorPixelIndex++] = intensity;

                                // We're outputting BGR, the last byte in the 32 bits is unused so skip it
                                // If we were outputting BGRA, we would write alpha here.
                                ++colorPixelIndex;
                            }

                            bitmap.WritePixels(
                                new Int32Rect(0, 0, frameDescription.Width, frameDescription.Height),
                                pixels,
                                frameDescription.Width * cbytesPerPixel,
                                0);
                        }
                    }
                }
            }
            catch (Exception)
            {
                // ignore if the frame is no longer available
            }
        }
        /// <summary>
        /// Handles the multisource frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private unsafe void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            // Create instance of EMGUargs which holds the output of data from the kinect
            EMGUargs emguArgs = new EMGUargs();
            MultiSourceFrameReference frameReference = e.FrameReference;
            // Variables initialized to null for easy check of camera failures
            MultiSourceFrame multiSourceFrame = null;
            InfraredFrame    infraredFrame    = null;
            ColorFrame       colorFrame       = null;
            DepthFrame       depthFrame       = null;

            // Acquire frame from the Kinect
            multiSourceFrame = frameReference.AcquireFrame();

            // If the Frame has expired by the time we process this event, return.
            if (multiSourceFrame == null)
            {
                return;
            }
            try
            {
                /*
                 * DepthSpacePoint dp = new DepthSpacePoint
                 * {
                 *  X = 50,
                 *  Y = 20
                 * };
                 * DepthSpacePoint[] dps = new DepthSpacePoint[] { dp };
                 * ushort[] depths = new ushort[] { 2000 };
                 * CameraSpacePoint[] ameraSpacePoints = new CameraSpacePoint[1];
                 *
                 * mapper.MapDepthPointsToCameraSpace(dps, depths, ameraSpacePoints);
                 */
                InfraredFrameReference infraredFrameReference = multiSourceFrame.InfraredFrameReference;
                infraredFrame = infraredFrameReference.AcquireFrame();

                DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference;
                depthFrame = depthFrameReference.AcquireFrame();

                // Check whether needed frames are avaliable
                if (infraredFrame == null || depthFrame == null)
                {
                    return;
                }

                // the fastest way to process the depth frame data is to directly access
                // the underlying buffer
                using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                {
                    // verify data and write the new depth frame data to the display bitmap
                    if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) ==
                         (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)))
                    {
                        // Conversion to needed EMGU image
                        Mat depthImage = this.ProcessDepthFrameData(depthFrame);

                        emguArgs.DepthImage          = depthImage;
                        emguArgs.DepthFrameDimension = new FrameDimension(depthFrameDescription.Width, depthFrameDescription.Height);
                    }

                    //BgrToDephtPixel(depthBuffer.UnderlyingBuffer, depthBuffer.Size);

                    depthFrame.Dispose();
                    depthFrame = null;
                }

                // IR image
                FrameDescription infraredFrameDescription = infraredFrame.FrameDescription;

                // the fastest way to process the infrared frame data is to directly access
                // the underlying buffer
                using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer())
                {
                    // verify data and write the new infrared frame data to the display bitmap
                    if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel)))
                    {
                        // Conversion to needed EMGU image
                        Mat infraredImage = this.ProcessInfaredFrameData(infraredFrame);
                        emguArgs.InfraredImage          = infraredImage;
                        emguArgs.InfraredFrameDimension = new FrameDimension(infraredFrameDescription.Width, infraredFrameDescription.Height);
                        //  infraredImage.Dispose();
                    }
                    infraredFrame.Dispose();
                    infraredFrame = null;

                    // Check as to whether or not the color image is needed for mainwindow view
                    if (generateColorImage)
                    {
                        ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference;
                        colorFrame = colorFrameReference.AcquireFrame();
                        if (colorFrame == null)
                        {
                            return;
                        }

                        // color image
                        FrameDescription colorFrameDescription = colorFrame.FrameDescription;

                        // the fastest way to process the color frame data is to directly access
                        // the underlying buffer
                        using (Microsoft.Kinect.KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                        {
                            // Conversion to needed EMGU image
                            Mat colorImage = this.ProcessColorFrameData(colorFrame);
                            emguArgs.Colorimage          = colorImage;
                            emguArgs.ColorFrameDimension = new FrameDimension(colorFrameDescription.Width, colorFrameDescription.Height);
                        }
                        // We're done with the colorFrame
                        colorFrame.Dispose();
                        colorFrame = null;
                    }
                }
                // Call the processing finished event for the conversion to EMGU images
                OnEmguArgsProcessed(emguArgs);
            }
            catch (Exception ex)
            {
                // ignore if the frame is no longer available
                Console.WriteLine("FRAME CHRASHED: " + ex.ToString());
            }
            finally
            {
                // generate event at send writeable bitmaps for each frame, and cleanup.
                // only generate event if the mainwindow is shown.

                // DepthFrame, ColorFrame are Disposable.
                if (colorFrame != null)
                {
                    colorFrame.Dispose();
                    colorFrame = null;
                }
                if (depthFrame != null)
                {
                    depthFrame.Dispose();
                    depthFrame = null;
                }
                // infraredFrame is Disposable
                if (infraredFrame != null)
                {
                    infraredFrame.Dispose();
                    infraredFrame = null;
                }
                if (multiSourceFrame != null)
                {
                    multiSourceFrame = null;
                }
            }
        }
Exemple #24
0
        void frameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            try
            {
                MultiSourceFrame frame = e.FrameReference.AcquireFrame();

                if (frame != null)
                {
                    try
                    {
                        ColorFrameReference colorFrameReference = frame.ColorFrameReference;
                        useRGBAImage(colorFrameReference);
                        DepthFrameReference depthFrameReference = frame.DepthFrameReference;
                        ShowDepthImage(depthFrameReference);
                        InfraredFrameReference irFrameReference = frame.InfraredFrameReference;
                        ShowIRImage(irFrameReference);

                        // Накладные вычисления позиции жеста, будем статично отрисовывать прямоугольник для жеста =(
                        //// Body
                        //using (var bodyFrame = frame.BodyFrameReference.AcquireFrame())
                        //{
                        //    if (bodyFrame != null)
                        //    {
                        //        _bodies = new Body[bodyFrame.BodyFrameSource.BodyCount];

                        //        bodyFrame.GetAndRefreshBodyData(_bodies);

                        //        foreach (var body in _bodies)
                        //        {
                        //            if (body.IsTracked)
                        //            {
                        //                var joint = body.Joints[JointType.HandRight];

                        //                if (joint.TrackingState == TrackingState.Tracked)
                        //                {
                        //                    // 3D space point
                        //                    CameraSpacePoint jointPosition = joint.Position;
                        //                    Image gesture;
                        //                    RectangleF colorRectabgle = GetGestureFromJointPosition(jointPosition, out gesture);

                        //                    // color
                        //                    if (pictureBoxCameraColor.Image != null)
                        //                    {
                        //                        var gf = Graphics.FromImage(pictureBoxCameraColor.Image);
                        //                        gf.DrawRectangle(new Pen(Color.Red, 2),
                        //                            colorRectabgle.Location.X, colorRectabgle.Location.Y, colorRectabgle.Width, colorRectabgle.Height);
                        //                    }

                        //                    this.pictureBoxGesture.Image = gesture;
                        //                    this.btnAddGesture.Enabled = true;
                        //                    this.btnAddGesture.Focus();
                        //                } else
                        //                {
                        //                    this.btnAddGesture.Enabled = false;
                        //                    this.pictureBoxGesture.Image = null;
                        //                }
                        //            }
                        //        }
                        //    }
                        //}

                        Bitmap    img    = this.pictureBoxCameraColor.Image as Bitmap;
                        Rectangle rec    = new Rectangle(img.Width / 2 + img.Width / 8 + 2, img.Height / 2 - img.Height / 4 + 2, img.Width / 8 + img.Width / 8 / 8 - 4, img.Height / 4 - 4);
                        Bitmap    target = new Bitmap(rec.Width, rec.Height);

                        using (Graphics g = Graphics.FromImage(target))
                        {
                            g.DrawImage(img, new Rectangle(0, 0, target.Width, target.Height),
                                        rec,
                                        GraphicsUnit.Pixel);
                        }

                        var gesture = target;
                        this.pictureBoxGesture.Image = gesture;
                    }
                    catch (Exception)
                    {
                        // Nothing...
                    }
                }
            }
            catch (Exception)
            {
                // Nothing...
            }
        }