public async void GetBitmap(InfraredFrameReference frameReference)
        {
            bool processed = false;
            using (var frame = frameReference.AcquireFrame())
            {
                if (frame != null)
                {
                    frame.CopyFrameDataToArray(_data);
                    processed = true;
                }
            }

            if (processed)
            {
                await UpdateAsync(_data);
            }
        }
Exemple #2
0
        private void HandleInfraredFrame(InfraredFrameReference reference)
        {
            if (Task.StandBy)
            {
                InfraredWatch.Reset(); return;
            }

            InfraredWatch.Again();
            using (var frame = reference.AcquireFrame()) {
                if (frame == null)
                {
                    return;
                }

                frame.CopyFrameDataToArray(Infrared.Pixels);
                Infrared.Stamp.Time = System.DateTime.Now;
            }
            InfraredWatch.Stop();
        }
Exemple #3
0
        /// <summary>
        /// Метод отрисовки инфракрасной камеры из инфракрасного фрейма.
        /// </summary>
        /// <param name="frameReference">Ссылка на инфракрасный фрейм.</param>
        private void ShowIRImage(InfraredFrameReference frameReference)
        {
            // Попытка получить текущий фрейм с сенсора
            InfraredFrame frame = frameReference.AcquireFrame();

            if (frame != null)
            {
                FrameDescription description = null;
                using (frame)
                {
                    // Теперь получаем описание фрейма и создаем изображение для infrare picture box
                    description = frame.FrameDescription;
                    Bitmap outputImage = new Bitmap(description.Width, description.Height, PixelFormat.Format32bppArgb);

                    // Далее создаем указатель на данные картинки и указываем будующий размер
                    System.Drawing.Imaging.BitmapData imageData = outputImage.LockBits(new Rectangle(0, 0, outputImage.Width, outputImage.Height),
                                                                                       ImageLockMode.WriteOnly, outputImage.PixelFormat);
                    IntPtr imageDataPtr = imageData.Scan0;
                    int    size         = imageData.Stride * outputImage.Height;

                    // Теперь копируем изображение в соответствующий массив. Смещаем каждый RGB пиксел к byte размеру, получая grayscale изображение
                    frame.CopyFrameDataToArray(this.rawIRPixelData);
                    byte[] rawData = new byte[description.Width * description.Height * 4];

                    for (int i = 0; i < this.rawIRPixelData.Length; i++)
                    {
                        byte intensity = (byte)(this.rawIRPixelData[i] >> 8);

                        rawData[i * 4]     = intensity;
                        rawData[i * 4 + 1] = intensity;
                        rawData[i * 4 + 2] = intensity;
                        rawData[i * 4 + 3] = 255;
                    }

                    // Наконец создаем картинку из буфера
                    System.Runtime.InteropServices.Marshal.Copy(rawData, 0, imageDataPtr, size);
                    outputImage.UnlockBits(imageData);

                    // Test: Немного яркости для выхода IRSensor.
                    this.pictureBoxInfraredCamera.Image = drawGestureStaticRectabgle(AdjustBrightness(outputImage, 8.0f)); // 8.0f для работы от полуметра, как заявлено в документации Kinect v2
                }
            }
        }
Exemple #4
0
 private void InfraredFrameReader_FrameArrived(InfraredFrameReference infraredFrameReference)
 {
     using (InfraredFrame infraredFrame = infraredFrameReference.AcquireFrame())
     {
         if (infraredFrame != null)
         {
             FrameDescription infraredFrameDescription = infraredFrame.FrameDescription;
             using (KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer())
             {
                 using (var dest = ImagePool.GetOrCreate(infraredFrameDescription.Width, infraredFrameDescription.Height, Imaging.PixelFormat.Gray_16bpp))
                 {
                     infraredFrame.CopyFrameDataToIntPtr(dest.Resource.ImageData, (uint)(infraredFrameDescription.Width * infraredFrameDescription.Height * 2));
                     var time = this.pipeline.GetCurrentTimeFromElapsedTicks(infraredFrameReference.RelativeTime.Ticks);
                     this.InfraredImage.Post(dest, time);
                 }
             }
         }
     }
 }
        /// <summary>
        /// Draws infrared image data from the specified frame.
        /// </summary>
        /// <param name="frameReference">The reference to the infrared frame that should be used.</param>
        private void useIRImage(InfraredFrameReference frameReference)
        {
            // Actually aquire the frame here and check that it was properly aquired, and use it again since it too is disposable.
            InfraredFrame frame = frameReference.AcquireFrame();

            if (frame != null)
            {
                FrameDescription description = null;
                using (frame)
                {
                    // Next get the frame's description and create an output bitmap image.
                    description = frame.FrameDescription;
                    Bitmap outputImage = new Bitmap(description.Width, description.Height, PixelFormat.Format32bppArgb);

                    // Next, we create the raw data pointer for the bitmap, as well as the size of the image's data.
                    System.Drawing.Imaging.BitmapData imageData = outputImage.LockBits(new Rectangle(0, 0, outputImage.Width, outputImage.Height),
                                                                                       ImageLockMode.WriteOnly, outputImage.PixelFormat);
                    IntPtr imageDataPtr = imageData.Scan0;
                    int    size         = imageData.Stride * outputImage.Height;

                    // After this, we copy the image data into its array.  We then go through each pixel and shift the data down for the
                    // RGB values, and set each one to the same value, resulting in a grayscale image, as their normal values are too large.
                    frame.CopyFrameDataToArray(this.rawIRPixelData);
                    byte[] rawData = new byte[description.Width * description.Height * 4];
                    int    i       = 0;
                    foreach (ushort point in this.rawIRPixelData)
                    {
                        byte value = (byte)(128 - (point >> 8));
                        rawData[i++] = value;
                        rawData[i++] = value;
                        rawData[i++] = value;
                        rawData[i++] = 255;
                    }
                    // Next, the new raw data is copied to the bitmap's data pointer, and the image is unlocked using its data.
                    System.Runtime.InteropServices.Marshal.Copy(rawData, 0, imageDataPtr, size);
                    outputImage.UnlockBits(imageData);

                    // Finally, the image is set for the preview picture box.
                    this.previewPictureBox.Image = outputImage;
                }
            }
        }
        void frameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            // Try to get the frame from its reference.
            try
            {
                MultiSourceFrame frame = e.FrameReference.AcquireFrame();

                if (frame != null)
                {
                    try
                    {
                        // Then switch between the possible types of images to show, get its frame reference, then use it
                        // with the appropriate image.
                        switch (this.imageType)
                        {
                        case ImageType.Color:
                            ColorFrameReference colorFrameReference = frame.ColorFrameReference;
                            useRGBAImage(colorFrameReference);
                            break;

                        case ImageType.Depth:
                            DepthFrameReference depthFrameReference = frame.DepthFrameReference;
                            useDepthImage(depthFrameReference);
                            break;

                        case ImageType.IR:
                            InfraredFrameReference irFrameReference = frame.InfraredFrameReference;
                            useIRImage(irFrameReference);
                            break;
                        }
                    }
                    catch (Exception)
                    {
                        // Don't worry about exceptions for this demonstration.
                    }
                }
            }
            catch (Exception)
            {
                // Don't worry about exceptions for this demonstration.
            }
        }
Exemple #7
0
        private void RenderFrame(object sender, DoWorkEventArgs e)
        {
            ProcessInfraredFrameEventArgs args      = (ProcessInfraredFrameEventArgs)e.Argument;
            InfraredFrameReference        reference = args.frame;

            using (InfraredFrame frame = reference.AcquireFrame())
            {
                if (frame != null)
                {
                    //////Debug.WriteLine("Depth frame arrived");
                    try
                    {
                        e.Result = new IFrame(frame);
                    }
                    catch (Exception)
                    {
                    }
                }
            }
        }
Exemple #8
0
        void multiSourceFrameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            MultiSourceFrameReference msFrameReference = e.FrameReference;

            try
            {
                MultiSourceFrame msFrame = msFrameReference.AcquireFrame();

                if (msFrame != null)
                {
                    LongExposureInfraredFrameReference leirFrameReference = msFrame.LongExposureInfraredFrameReference;
                    InfraredFrameReference             irFrameReference   = msFrame.InfraredFrameReference;
                    ColorFrameReference colorFrameReference = msFrame.ColorFrameReference;
                    DepthFrameReference depthFrameReference = msFrame.DepthFrameReference;
                    BodyFrameReference  bodyFrameReference  = msFrame.BodyFrameReference;
                    switch (this.imageType)
                    {
                    case ImageType.Color:
                        useColorFrame(colorFrameReference);
                        break;

                    case ImageType.Depth:
                        useDepthFrame(depthFrameReference);
                        break;

                    case ImageType.IR:
                        useIRFrame(irFrameReference);
                        break;

                    case ImageType.LEIR:
                        useLIRFrame(leirFrameReference);
                        break;
                    }
                    useBodyFrame(bodyFrameReference);
                    //updatePulse(colorFrameReference, irFrameReference, bodyFrameReference);
                }
            }
            catch (Exception ex)
            {
            }
        }
        /// <summary>
        /// Handles the multisource frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private unsafe void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            // Create instance of EMGUargs which holds the output of data from the kinect
            EMGUargs emguArgs = new EMGUargs();
            MultiSourceFrameReference frameReference = e.FrameReference;
            // Variables initialized to null for easy check of camera failures
            MultiSourceFrame multiSourceFrame = null;
            InfraredFrame    infraredFrame    = null;
            ColorFrame       colorFrame       = null;
            DepthFrame       depthFrame       = null;

            // Acquire frame from the Kinect
            multiSourceFrame = frameReference.AcquireFrame();

            // If the Frame has expired by the time we process this event, return.
            if (multiSourceFrame == null)
            {
                return;
            }
            try
            {
                /*
                 * DepthSpacePoint dp = new DepthSpacePoint
                 * {
                 *  X = 50,
                 *  Y = 20
                 * };
                 * DepthSpacePoint[] dps = new DepthSpacePoint[] { dp };
                 * ushort[] depths = new ushort[] { 2000 };
                 * CameraSpacePoint[] ameraSpacePoints = new CameraSpacePoint[1];
                 *
                 * mapper.MapDepthPointsToCameraSpace(dps, depths, ameraSpacePoints);
                 */
                InfraredFrameReference infraredFrameReference = multiSourceFrame.InfraredFrameReference;
                infraredFrame = infraredFrameReference.AcquireFrame();

                DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference;
                depthFrame = depthFrameReference.AcquireFrame();

                // Check whether needed frames are avaliable
                if (infraredFrame == null || depthFrame == null)
                {
                    return;
                }

                // the fastest way to process the depth frame data is to directly access
                // the underlying buffer
                using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                {
                    // verify data and write the new depth frame data to the display bitmap
                    if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) ==
                         (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)))
                    {
                        // Conversion to needed EMGU image
                        Mat depthImage = this.ProcessDepthFrameData(depthFrame);

                        emguArgs.DepthImage          = depthImage;
                        emguArgs.DepthFrameDimension = new FrameDimension(depthFrameDescription.Width, depthFrameDescription.Height);
                    }

                    //BgrToDephtPixel(depthBuffer.UnderlyingBuffer, depthBuffer.Size);

                    depthFrame.Dispose();
                    depthFrame = null;
                }

                // IR image
                FrameDescription infraredFrameDescription = infraredFrame.FrameDescription;

                // the fastest way to process the infrared frame data is to directly access
                // the underlying buffer
                using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer())
                {
                    // verify data and write the new infrared frame data to the display bitmap
                    if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel)))
                    {
                        // Conversion to needed EMGU image
                        Mat infraredImage = this.ProcessInfaredFrameData(infraredFrame);
                        emguArgs.InfraredImage          = infraredImage;
                        emguArgs.InfraredFrameDimension = new FrameDimension(infraredFrameDescription.Width, infraredFrameDescription.Height);
                        //  infraredImage.Dispose();
                    }
                    infraredFrame.Dispose();
                    infraredFrame = null;

                    // Check as to whether or not the color image is needed for mainwindow view
                    if (generateColorImage)
                    {
                        ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference;
                        colorFrame = colorFrameReference.AcquireFrame();
                        if (colorFrame == null)
                        {
                            return;
                        }

                        // color image
                        FrameDescription colorFrameDescription = colorFrame.FrameDescription;

                        // the fastest way to process the color frame data is to directly access
                        // the underlying buffer
                        using (Microsoft.Kinect.KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                        {
                            // Conversion to needed EMGU image
                            Mat colorImage = this.ProcessColorFrameData(colorFrame);
                            emguArgs.Colorimage          = colorImage;
                            emguArgs.ColorFrameDimension = new FrameDimension(colorFrameDescription.Width, colorFrameDescription.Height);
                        }
                        // We're done with the colorFrame
                        colorFrame.Dispose();
                        colorFrame = null;
                    }
                }
                // Call the processing finished event for the conversion to EMGU images
                OnEmguArgsProcessed(emguArgs);
            }
            catch (Exception ex)
            {
                // ignore if the frame is no longer available
                Console.WriteLine("FRAME CHRASHED: " + ex.ToString());
            }
            finally
            {
                // generate event at send writeable bitmaps for each frame, and cleanup.
                // only generate event if the mainwindow is shown.

                // DepthFrame, ColorFrame are Disposable.
                if (colorFrame != null)
                {
                    colorFrame.Dispose();
                    colorFrame = null;
                }
                if (depthFrame != null)
                {
                    depthFrame.Dispose();
                    depthFrame = null;
                }
                // infraredFrame is Disposable
                if (infraredFrame != null)
                {
                    infraredFrame.Dispose();
                    infraredFrame = null;
                }
                if (multiSourceFrame != null)
                {
                    multiSourceFrame = null;
                }
            }
        }
    private void HandleInfraredFrame(InfraredFrameReference reference) {
      if (Task.StandBy) { InfraredWatch.Reset(); return; }

      InfraredWatch.Again();
      using (var frame = reference.AcquireFrame()) {
        if (frame == null) return;

        frame.CopyFrameDataToArray(Infrared.Pixels);
        Infrared.Stamp.Time = System.DateTime.Now;
      }
      InfraredWatch.Stop();
    }
Exemple #11
0
        void updatePulse(ColorFrameReference colorFrameReference, InfraredFrameReference irFrameReference, BodyFrameReference bodyFrameReference)
        {
            long currentTime = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond;

            int width  = 0;
            int height = 0;

            try
            {
                InfraredFrame IRFrame = irFrameReference.AcquireFrame();

                if (IRFrame != null)
                {
                    using (IRFrame)
                    {
                        width  = IRFrame.FrameDescription.Width;
                        height = IRFrame.FrameDescription.Height;

                        IRFrame.CopyFrameDataToArray(this.irImagePixelData);
                    }
                }
            }
            catch (Exception er)
            {
                string message = er.Message;
                Console.WriteLine(message);
                // Don't worry about empty frames.
            }
            try
            {
                if (this.bodyToTrack > -1)
                {
                    BodyFrame bodyFrame = bodyFrameReference.AcquireFrame();

                    if (bodyFrame != null)
                    {
                        using (bodyFrame)
                        {
                            bodyFrame.GetAndRefreshBodyData(this.bodies);

                            Body body = this.bodies[this.bodyToTrack];
                            if (body.IsTracked)
                            {
                                CameraSpacePoint headPosition = body.Joints[JointType.Head].Position;
                                CameraSpacePoint neckPosition = body.Joints[JointType.Neck].Position;

                                float centerX = neckPosition.X - headPosition.X;
                                centerX = headPosition.X + (centerX / 2.0f);

                                float centerY = neckPosition.Y - headPosition.Y;
                                centerY = headPosition.Y + (centerY / 2.0f);

                                centerX += 1.0f;
                                centerX /= 2.0f;

                                centerY += 1.0f;
                                centerY /= 2.0f;

                                if (this.colorImageBitmap != null)
                                {
                                    Color c = this.colorImageBitmap.GetPixel((int)(centerX * this.colorImageBitmap.Width), (int)(centerY * this.colorImageBitmap.Height));

                                    hueValues.Enqueue(c.GetHue());
                                    if (hueValues.Count > 10)
                                    {
                                        hueValues.Dequeue();
                                    }

                                    if (hueValues.Count >= 10)
                                    {
                                        //this.pulseLabel.Text = "Pulse: " + ((float)c.GetHue() / (float)hueValues.Average());
                                        if (c.GetHue() > hueValues.Average())
                                        {
                                            this.pulseLabel.Text = "Pulse: " + ((float)(currentTime - lastTime) / (float)pulses);
                                            //this.pulseLabel.Text = "Pulse: 1";
                                            pulses += 1;
                                        }
                                        if (currentTime - lastTime > 1000 * 5)
                                        {
                                            lastTime = currentTime;
                                            pulses   = 0;
                                        }
                                        Console.WriteLine("Hue Average: " + hueValues.Average());
                                    }
                                }

                                if (width > 0 && height > 0)
                                {
                                    ushort irValue = this.irImagePixelData[(int)(centerX * width) + (int)(centerY * height) * width];

                                    irValues.Enqueue(irValue);
                                    if (irValues.Count > 10)
                                    {
                                        irValues.Dequeue();
                                    }

                                    if (irValues.Count >= 10)
                                    {
                                        //Console.WriteLine("IR Average: " + irValues.Average());
                                    }
                                }

                                //Console.WriteLine("Color: " + c.R + ", " + c.G + ", " + c.B);
                                //Console.WriteLine("Position:" + centerX + ", " + centerY);
                            }
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                string message = ex.Message;
                Console.WriteLine(message);
                // Don't worry about empty frames.
            }
        }
Exemple #12
0
        /// <summary>
        /// Device-specific implementation of Update.
        /// Updates data buffers of all active channels with data of current frame.
        /// </summary>
        /// <remarks>This method is implicitely called by <see cref="Camera.Update"/> inside a camera lock.</remarks>
        /// <seealso cref="Camera.Update"/>
        protected override void UpdateImpl()
        {
            // TODO: This method could yield rather asynchronous channels. If necessary: Try to find a mechanism that updates frames that are already fetched when waiting for others that are not yet available.
            MultiSourceFrame multiSourceFrame       = null;
            bool             bodyIndexRequired      = IsChannelActive(CustomChannelNames.BodyIndex);
            bool             depthRequired          = IsChannelActive(ChannelNames.Distance) || IsChannelActive(ChannelNames.Point3DImage);
            bool             amplitudeRequired      = IsChannelActive(ChannelNames.Amplitude);
            bool             colorRequired          = IsChannelActive(ChannelNames.Color);
            bool             longExposureIRRequired = IsChannelActive(CustomChannelNames.LongExposureIR);

            do
            {
                if (!dataAvailable.WaitOne(UpdateTimeoutMilliseconds))
                {
                    throw ExceptionBuilder.BuildFromID(typeof(MetriCam2Exception), this, 005);
                }

                lock (newFrameLock)
                {
                    try
                    {
                        if (multiFrameReference != null)
                        {
                            multiSourceFrame = multiFrameReference.AcquireFrame();
                        }
                    }
                    catch (Exception)
                    {
                        // ignore if the frame is no longer available
                        continue;// throw
                    }
                }

                try
                {
                    // fetch depth?
                    if (depthRequired)
                    {
                        DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference;
                        if (depthFrameReference != null)
                        {
                            // always synchornize on depth frames if possible.
                            if (lastTimeStamp == GetAbsoluteTimeStamp(depthFrameReference.RelativeTime.Ticks))
                            {
                                continue;
                            }
                            using (DepthFrame depthFrame = depthFrameReference.AcquireFrame())
                            {
                                if (depthFrame == null)
                                {
                                    continue;
                                }

                                depthFrameDescription = depthFrame.FrameDescription;
                                int depthWidth  = depthFrameDescription.Width;
                                int depthHeight = depthFrameDescription.Height;
                                if ((depthWidth * depthHeight) == this.depthFrameData.Length)
                                {
                                    lock (this.depthFrameData)
                                    {
                                        depthFrame.CopyFrameDataToArray(this.depthFrameData);
                                        lastTimeStamp  = GetAbsoluteTimeStamp(depthFrameReference.RelativeTime.Ticks);
                                        timestampDepth = lastTimeStamp;
                                    }
                                    depthRequired = false;
                                }
                            }
                        }
                    }

                    // fetch IR?
                    if (amplitudeRequired)
                    {
                        InfraredFrameReference irFrameReference = multiSourceFrame.InfraredFrameReference;
                        if (irFrameReference != null)
                        {
                            // If depth data is inactive, synchronize on IR frames. If depth and IR are inactive, we synchronize on color frames.
                            if (!(IsChannelActive(ChannelNames.Distance) || IsChannelActive(ChannelNames.Point3DImage)) && lastTimeStamp == GetAbsoluteTimeStamp(irFrameReference.RelativeTime.Ticks))
                            {
                                continue;
                            }

                            using (InfraredFrame irFrame = irFrameReference.AcquireFrame())
                            {
                                if (irFrame == null)
                                {
                                    continue;
                                }

                                FrameDescription irFrameDescription = irFrame.FrameDescription;
                                int irWidth  = irFrameDescription.Width;
                                int irHeight = irFrameDescription.Height;
                                if ((irWidth * irHeight) == this.irFrameData.Length)
                                {
                                    lock (this.irFrameData)
                                    {
                                        irFrame.CopyFrameDataToArray(this.irFrameData);
                                        lastTimeStamp = GetAbsoluteTimeStamp(irFrameReference.RelativeTime.Ticks);
                                        timestampIR   = lastTimeStamp;
                                    }
                                    amplitudeRequired = false;
                                }
                            }
                        }
                    }

                    // (always) fetch body frame
                    BodyFrameReference bodyFrameReference = multiSourceFrame.BodyFrameReference;
                    if (bodyFrameReference != null)
                    {
                        using (BodyFrame bodyFrame = bodyFrameReference.AcquireFrame())
                        {
                            if (bodyFrame != null)
                            {
                                this.bodies = new Body[bodyFrame.BodyCount];
                                using (bodyFrame)
                                {
                                    bodyFrame.GetAndRefreshBodyData(this.bodies);
                                }
                            }
                            else
                            {
                                // TODO: check if channel is activated.
                            }
                        }
                    }

                    // fetch color?
                    if (colorRequired)
                    {
                        ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference;
                        if (colorFrameReference == null)
                        {
                            continue;
                        }
                        // If depth and IR data is inactive, synchronize on color frames. If color, depth and IR are inactive, we don't care for synchronization.
                        if (!(IsChannelActive(ChannelNames.Distance) || IsChannelActive(ChannelNames.Point3DImage) || IsChannelActive(ChannelNames.Amplitude)) && lastTimeStamp == GetAbsoluteTimeStamp(colorFrameReference.RelativeTime.Ticks))
                        {
                            continue;
                        }

                        using (ColorFrame colorFrame = colorFrameReference.AcquireFrame())
                        {
                            //FrameDescription colorFrameDescription = colorFrame.FrameDescription;
                            //int cWidth = colorFrameDescription.Width;
                            //int cHeight = colorFrameDescription.Width;
                            if (colorFrame != null)
                            {
                                using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                                {
                                    lock (this.colorFrameData)
                                    {
                                        colorFrame.CopyConvertedFrameDataToArray(this.colorFrameData, ColorImageFormat.Bgra);
                                        lastTimeStamp  = GetAbsoluteTimeStamp(colorFrameReference.RelativeTime.Ticks);
                                        timestampColor = lastTimeStamp;
                                    }
                                }
                                colorRequired = false;
                            }
                        }
                    }

                    // fetch long exposure IR? (this is independent of the IR images and are acquired at the same rate, so every new frame also
                    // has one of these.)
                    if (longExposureIRRequired)
                    {
                        LongExposureInfraredFrameReference longExposureIRFrameRef = multiSourceFrame.LongExposureInfraredFrameReference;
                        using (LongExposureInfraredFrame longIRFrame = longExposureIRFrameRef.AcquireFrame())
                        {
                            if (longIRFrame == null)
                            {
                                continue;
                            }

                            int longIRWidth  = longIRFrame.FrameDescription.Width;
                            int longIRHeight = longIRFrame.FrameDescription.Height;
                            if (longExposureIRData == null || (longIRWidth * longIRHeight) != longExposureIRData.Length)
                            {
                                longExposureIRData = new ushort[longIRWidth * longIRHeight];
                            }
                            longIRFrame.CopyFrameDataToArray(longExposureIRData);
                            longExposureIRRequired = false;
                        }
                    }

                    // fetch body index frames?
                    if (bodyIndexRequired)
                    {
                        BodyIndexFrameReference bodyIndexFrameRef = multiSourceFrame.BodyIndexFrameReference;
                        using (BodyIndexFrame bodyIndexFrame = bodyIndexFrameRef.AcquireFrame())
                        {
                            if (bodyIndexFrame == null)
                            {
                                log.Debug("bodyIndexFrame is NULL.");
                                continue;
                            }

                            int bodyIndexWidth  = bodyIndexFrame.FrameDescription.Width;
                            int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;
                            if (bodyIndexData == null || (bodyIndexWidth * bodyIndexHeight) != bodyIndexData.Length)
                            {
                                bodyIndexData = new byte[bodyIndexWidth * bodyIndexHeight];
                            }
                            bodyIndexFrame.CopyFrameDataToArray(bodyIndexData);
                            bodyIndexRequired = false;
                        }
                    }
                }
                catch (Exception)
                {
                    // ignore if the frame is no longer available
                }
                finally
                {
                    multiSourceFrame = null;
                }
            } while (depthRequired || colorRequired || bodyIndexRequired || longExposureIRRequired || amplitudeRequired);
        }
Exemple #13
0
        void frameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            try
            {
                MultiSourceFrame frame = e.FrameReference.AcquireFrame();

                if (frame != null)
                {
                    try
                    {
                        ColorFrameReference colorFrameReference = frame.ColorFrameReference;
                        useRGBAImage(colorFrameReference);
                        DepthFrameReference depthFrameReference = frame.DepthFrameReference;
                        ShowDepthImage(depthFrameReference);
                        InfraredFrameReference irFrameReference = frame.InfraredFrameReference;
                        ShowIRImage(irFrameReference);

                        // Накладные вычисления позиции жеста, будем статично отрисовывать прямоугольник для жеста =(
                        //// Body
                        //using (var bodyFrame = frame.BodyFrameReference.AcquireFrame())
                        //{
                        //    if (bodyFrame != null)
                        //    {
                        //        _bodies = new Body[bodyFrame.BodyFrameSource.BodyCount];

                        //        bodyFrame.GetAndRefreshBodyData(_bodies);

                        //        foreach (var body in _bodies)
                        //        {
                        //            if (body.IsTracked)
                        //            {
                        //                var joint = body.Joints[JointType.HandRight];

                        //                if (joint.TrackingState == TrackingState.Tracked)
                        //                {
                        //                    // 3D space point
                        //                    CameraSpacePoint jointPosition = joint.Position;
                        //                    Image gesture;
                        //                    RectangleF colorRectabgle = GetGestureFromJointPosition(jointPosition, out gesture);

                        //                    // color
                        //                    if (pictureBoxCameraColor.Image != null)
                        //                    {
                        //                        var gf = Graphics.FromImage(pictureBoxCameraColor.Image);
                        //                        gf.DrawRectangle(new Pen(Color.Red, 2),
                        //                            colorRectabgle.Location.X, colorRectabgle.Location.Y, colorRectabgle.Width, colorRectabgle.Height);
                        //                    }

                        //                    this.pictureBoxGesture.Image = gesture;
                        //                    this.btnAddGesture.Enabled = true;
                        //                    this.btnAddGesture.Focus();
                        //                } else
                        //                {
                        //                    this.btnAddGesture.Enabled = false;
                        //                    this.pictureBoxGesture.Image = null;
                        //                }
                        //            }
                        //        }
                        //    }
                        //}

                        Bitmap    img    = this.pictureBoxCameraColor.Image as Bitmap;
                        Rectangle rec    = new Rectangle(img.Width / 2 + img.Width / 8 + 2, img.Height / 2 - img.Height / 4 + 2, img.Width / 8 + img.Width / 8 / 8 - 4, img.Height / 4 - 4);
                        Bitmap    target = new Bitmap(rec.Width, rec.Height);

                        using (Graphics g = Graphics.FromImage(target))
                        {
                            g.DrawImage(img, new Rectangle(0, 0, target.Width, target.Height),
                                        rec,
                                        GraphicsUnit.Pixel);
                        }

                        var gesture = target;
                        this.pictureBoxGesture.Image = gesture;
                    }
                    catch (Exception)
                    {
                        // Nothing...
                    }
                }
            }
            catch (Exception)
            {
                // Nothing...
            }
        }
Exemple #14
0
        /// <summary>
        /// Update to get a new frame.
        /// This code is similar to the code in the Kinect SDK samples.
        /// </summary>
        private static void Update()
        {
            if (!isConnected)
            {
                return;
            }

            dataAvailable.WaitOne();

            MultiSourceFrame multiSourceFrame = null;
            DepthFrame       depthFrame       = null;
            InfraredFrame    irFrame          = null;
            BodyFrame        bodyFrame        = null;

            lock (updateLock)
            {
                try
                {
                    if (frameReference != null)
                    {
                        multiSourceFrame = frameReference.AcquireFrame();

                        if (multiSourceFrame != null)
                        {
                            DepthFrameReference    depthFrameReference = multiSourceFrame.DepthFrameReference;
                            InfraredFrameReference irFrameReference    = multiSourceFrame.InfraredFrameReference;
                            BodyFrameReference     bodyFrameReference  = multiSourceFrame.BodyFrameReference;

                            depthFrame = depthFrameReference.AcquireFrame();
                            irFrame    = irFrameReference.AcquireFrame();

                            if ((depthFrame != null) && (irFrame != null))
                            {
                                FrameDescription depthFrameDescription = depthFrame.FrameDescription;
                                FrameDescription irFrameDescription    = irFrame.FrameDescription;

                                int depthWidth  = depthFrameDescription.Width;
                                int depthHeight = depthFrameDescription.Height;
                                int irWidth     = irFrameDescription.Width;
                                int irHeight    = irFrameDescription.Height;

                                // verify data and write the new registered frame data to the display bitmap
                                if (((depthWidth * depthHeight) == depthFrameData.Length) &&
                                    ((irWidth * irHeight) == irFrameData.Length))
                                {
                                    depthFrame.CopyFrameDataToArray(depthFrameData);
                                    irFrame.CopyFrameDataToArray(irFrameData);
                                }

                                if (bodyFrameReference != null)
                                {
                                    bodyFrame = bodyFrameReference.AcquireFrame();

                                    if (bodyFrame != null)
                                    {
                                        if (bodies == null || bodies.Length < bodyFrame.BodyCount)
                                        {
                                            bodies = new Body[bodyFrame.BodyCount];
                                        }
                                        using (bodyFrame)
                                        {
                                            bodyFrame.GetAndRefreshBodyData(bodies);
                                        }
                                    }
                                }
                            }
                        }
                    }
                }
                catch (Exception)
                {
                    // ignore if the frame is no longer available
                }
                finally
                {
                    if (depthFrame != null)
                    {
                        depthFrame.Dispose();
                        depthFrame = null;
                    }

                    if (irFrame != null)
                    {
                        irFrame.Dispose();
                        irFrame = null;
                    }
                    if (bodyFrame != null)
                    {
                        bodyFrame.Dispose();
                        bodyFrame = null;
                    }
                    if (multiSourceFrame != null)
                    {
                        multiSourceFrame = null;
                    }
                }
            }
        }
        void useIRFrame(InfraredFrameReference irFrameReference)
        {
            try
            {
                InfraredFrame IRFrame = irFrameReference.AcquireFrame();

                if (IRFrame != null)
                {
                    using (IRFrame)
                    {
                        IRFrame.CopyFrameDataToArray(this.irImagePixelData);

                        this.updateBitmap(IRFrame.FrameDescription.Width, IRFrame.FrameDescription.Height, this.irImagePixelData, false);

                        IRFrame.CopyFrameDataToArray(this.irImagePixelDataOld);

                        this.pictureBox1.Image = new Bitmap(this.colorImageBitmap, this.pictureBox1.Width, this.pictureBox1.Height);
                    }
                }
            }
            catch (Exception er)
            {
                string message = er.Message;
                Console.WriteLine(message);
                // Don't worry about empty frames.
            }
        }
        void updatePulse(ColorFrameReference colorFrameReference, InfraredFrameReference irFrameReference, BodyFrameReference bodyFrameReference)
        {
            long currentTime = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond;

            int width = 0;
            int height = 0;
            try
            {
                InfraredFrame IRFrame = irFrameReference.AcquireFrame();

                if (IRFrame != null)
                {
                    using (IRFrame)
                    {
                        width = IRFrame.FrameDescription.Width;
                        height = IRFrame.FrameDescription.Height;

                        IRFrame.CopyFrameDataToArray(this.irImagePixelData);
                    }
                }
            }
            catch (Exception er)
            {
                string message = er.Message;
                Console.WriteLine(message);
                // Don't worry about empty frames.
            }
            try
            {
                if (this.bodyToTrack > -1)
                {
                    BodyFrame bodyFrame = bodyFrameReference.AcquireFrame();

                    if (bodyFrame != null)
                    {
                        using (bodyFrame)
                        {
                            bodyFrame.GetAndRefreshBodyData(this.bodies);

                            Body body = this.bodies[this.bodyToTrack];
                            if (body.IsTracked)
                            {
                                CameraSpacePoint headPosition = body.Joints[JointType.Head].Position;
                                CameraSpacePoint neckPosition = body.Joints[JointType.Neck].Position;

                                float centerX = neckPosition.X - headPosition.X;
                                centerX = headPosition.X + (centerX / 2.0f);

                                float centerY = neckPosition.Y - headPosition.Y;
                                centerY = headPosition.Y + (centerY / 2.0f);

                                centerX += 1.0f;
                                centerX /= 2.0f;

                                centerY += 1.0f;
                                centerY /= 2.0f;

                                if (this.colorImageBitmap != null)
                                {
                                    Color c = this.colorImageBitmap.GetPixel((int)(centerX * this.colorImageBitmap.Width), (int)(centerY * this.colorImageBitmap.Height));

                                    hueValues.Enqueue(c.GetHue());
                                    if (hueValues.Count > 10)
                                    {
                                        hueValues.Dequeue();
                                    }

                                    if (hueValues.Count >= 10)
                                    {
                                        //this.pulseLabel.Text = "Pulse: " + ((float)c.GetHue() / (float)hueValues.Average());
                                        if (c.GetHue() > hueValues.Average())
                                        {
                                            this.pulseLabel.Text = "Pulse: " + ((float)(currentTime - lastTime) / (float)pulses);
                                            //this.pulseLabel.Text = "Pulse: 1";
                                            pulses += 1;
                                        }
                                        if (currentTime - lastTime > 1000 * 5)
                                        {
                                            lastTime = currentTime;
                                            pulses = 0;
                                        }
                                        Console.WriteLine("Hue Average: " + hueValues.Average());
                                    }
                                }

                                if (width > 0 && height > 0)
                                {
                                    ushort irValue = this.irImagePixelData[(int)(centerX * width) + (int)(centerY * height) * width];

                                    irValues.Enqueue(irValue);
                                    if (irValues.Count > 10)
                                    {
                                        irValues.Dequeue();
                                    }

                                    if (irValues.Count >= 10)
                                    {
                                        //Console.WriteLine("IR Average: " + irValues.Average());
                                    }
                                }

                                //Console.WriteLine("Color: " + c.R + ", " + c.G + ", " + c.B);
                                //Console.WriteLine("Position:" + centerX + ", " + centerY);
                            }
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                string message = ex.Message;
                Console.WriteLine(message);
                // Don't worry about empty frames.
            }
        }
        /// <summary>
        /// Draws infrared image data from the specified frame.
        /// </summary>
        /// <param name="frameReference">The reference to the infrared frame that should be used.</param>
        private void useIRImage(InfraredFrameReference frameReference)
        {
            // Actually aquire the frame here and check that it was properly aquired, and use it again since it too is disposable.
            InfraredFrame frame = frameReference.AcquireFrame();

            if (frame != null)
            {
                FrameDescription description = null;
                using (frame)
                {
                    // Next get the frame's description and create an output bitmap image.
                    description = frame.FrameDescription;
                    Bitmap outputImage = new Bitmap(description.Width, description.Height, PixelFormat.Format32bppArgb);

                    // Next, we create the raw data pointer for the bitmap, as well as the size of the image's data.
                    System.Drawing.Imaging.BitmapData imageData = outputImage.LockBits(new Rectangle(0, 0, outputImage.Width, outputImage.Height),
                        ImageLockMode.WriteOnly, outputImage.PixelFormat);
                    IntPtr imageDataPtr = imageData.Scan0;
                    int size = imageData.Stride * outputImage.Height;

                    // After this, we copy the image data into its array.  We then go through each pixel and shift the data down for the
                    // RGB values, and set each one to the same value, resulting in a grayscale image, as their normal values are too large.
                    frame.CopyFrameDataToArray(this.rawIRPixelData);
                    byte[] rawData = new byte[description.Width * description.Height * 4];
                    int i = 0;
                    foreach (ushort point in this.rawIRPixelData)
                    {
                        byte value = (byte)(128 - (point >> 8));
                        rawData[i++] = value;
                        rawData[i++] = value;
                        rawData[i++] = value;
                        rawData[i++] = 255;
                    }
                    // Next, the new raw data is copied to the bitmap's data pointer, and the image is unlocked using its data.
                    System.Runtime.InteropServices.Marshal.Copy(rawData, 0, imageDataPtr, size);
                    outputImage.UnlockBits(imageData);

                    // Finally, the image is set for the preview picture box.
                    this.previewPictureBox.Image = outputImage;
                }
            }
        }