Exemple #1
0
        private void HandleColorFrame(ColorFrameReference reference)
        {
            if (Task.StandBy)
            {
                ColorWatch.Reset(); return;
            }

            ColorWatch.Again();
            using (var frame = reference.AcquireFrame()) {
                if (frame == null)
                {
                    return;
                }

                // Copy data to array based on image format
                if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
                {
                    frame.CopyRawFrameDataToArray(Color.Pixels);
                }
                else
                {
                    frame.CopyConvertedFrameDataToArray(Color.Pixels, ColorImageFormat.Bgra);
                }

                Color.Stamp.Time = System.DateTime.Now;
            }
            ColorWatch.Stop();
        }
Exemple #2
0
 /// <summary>
 /// Update the Bitmap from the supplied <c>ColorFrameReference</c>.
 /// </summary>
 public void Update(ColorFrameReference frameReference)
 {
     using (var frame = frameReference.AcquireFrame())
     {
         Update(frame);
     }
 }
 public void GetBitmap(ColorFrameReference frameReference)
 {
     using (var frame = frameReference.AcquireFrame())
     {
         Update(frame);
     }
 }
Exemple #4
0
        void useColorFrame(ColorFrameReference colorFrameReference)
        {
            try
            {
                ColorFrame colorFrame = colorFrameReference.AcquireFrame();

                if (colorFrame != null)
                {
                    using (colorFrame)
                    {
                        if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                        {
                            colorFrame.CopyRawFrameDataToArray(this.colorImagePixelData);
                        }
                        else
                        {
                            colorFrame.CopyConvertedFrameDataToArray(this.colorImagePixelData, ColorImageFormat.Bgra);
                        }

                        this.updateBitmap(colorFrame.FrameDescription.Width, colorFrame.FrameDescription.Height, PixelFormat.Format32bppArgb, this.colorImagePixelData);

                        this.pictureBox1.Image = new Bitmap(this.colorImageBitmap, this.pictureBox1.Width, this.pictureBox1.Height);
                    }
                }
            }
            catch (Exception er)
            {
                string message = er.Message;
                Console.WriteLine(message);
                // Don't worry about empty frames.
            }
        }
Exemple #5
0
        private void _ColorFrameHandler(ColorFrameReference frameRef)
        {
            using (ColorFrame frame = frameRef.AcquireFrame())
            {
                if (frame != null)
                {
                    // The below algorithm is from the internet
                    // I have just found ways to take colourFrame width/height and convert
                    // it to necessary types
                    int width  = frame.FrameDescription.Width;
                    int height = frame.FrameDescription.Height;

                    byte[] pixels = new byte[width * height * ((PixelFormats.Bgr32.BitsPerPixel + 7) / 8)];

                    if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
                    {
                        frame.CopyRawFrameDataToArray(pixels);
                    }
                    else
                    {
                        frame.CopyConvertedFrameDataToArray(pixels, ColorImageFormat.Bgra);
                    }

                    int stride = width * PixelFormats.Bgra32.BitsPerPixel / 8;

                    //_colourImage.Source = BitmapSource.Create(width, height,
                    //    96, 96, PixelFormats.Bgr32, null, pixels, stride);

                    ImageBrush canvasBackgroundBrush = new ImageBrush();
                    canvasBackgroundBrush.ImageSource = BitmapSource.Create(width, height, 96, 96, PixelFormats.Bgr32, null, pixels, stride);

                    _windowCanvas.Background = canvasBackgroundBrush;
                }
            }
        }
Exemple #6
0
        /// <summary>
        /// Process the infrared frames and update UI
        /// </summary>
        public void OnColorFrameArrived(object sender, ColorFrameArrivedEventArgs e)
        {
            // Get the reference to the color frame
            ColorFrameReference colorRef = e.FrameReference;

            if (colorRef == null)
            {
                return;
            }

            // Acquire frame for specific reference
            ColorFrame frame = colorRef.AcquireFrame();

            // It's possible that we skipped a frame or it is already gone
            if (frame == null)
            {
                return;
            }


            //SALIENCY
            uint size = Convert.ToUInt32(frame.FrameDescription.Height * frame.FrameDescription.Width * 4);

            frame.CopyConvertedFrameDataToIntPtr(PinnedImageBuffer, size, ColorImageFormat.Bgra);

            using (frame)
            {
                // Get frame description
                FrameDescription frameDesc = frame.FrameDescription;

                // Check if width/height matches
                if (frameDesc.Width == _colorBitmap.PixelWidth && frameDesc.Height == _colorBitmap.PixelHeight)
                {
                    // Copy data to array based on image format
                    if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
                    {
                        frame.CopyRawFrameDataToArray(_colorPixels);
                    }
                    else
                    {
                        frame.CopyConvertedFrameDataToArray(_colorPixels, ColorImageFormat.Bgra);
                    }

                    // Copy output to bitmap
                    _colorBitmap.Dispatcher.BeginInvoke(System.Windows.Threading.DispatcherPriority.Normal,
                                                        new Action(delegate()
                    {
                        _colorBitmap.WritePixels(
                            new Int32Rect(0, 0, frameDesc.Width, frameDesc.Height),
                            _colorPixels,
                            frameDesc.Width * _bytePerPixel,
                            0);
                    }));
                }

                frameReady.Set();
            }
        }
Exemple #7
0
        private void OnColorFrameArrived(object sender, ColorFrameArrivedEventArgs e)
        {
            // Get the reference to the color frame
            ColorFrameReference colorRef = e.FrameReference;

            if (colorRef == null)
            {
                return;
            }

            // Acquire frame for specific reference
            ColorFrame frame = colorRef.AcquireFrame();

            // It's possible that we skipped a frame or it is already gone
            if (frame == null)
            {
                return;
            }

            using (frame)
            {
                // Get frame description
                FrameDescription frameDesc = frame.FrameDescription;

                // Check if width/height matches
                if (frameDesc.Width == _colorBitmap.PixelWidth && frameDesc.Height == _colorBitmap.PixelHeight)
                {
                    // Copy data to array based on image format
                    if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
                    {
                        frame.CopyRawFrameDataToArray(_colorPixels);
                    }
                    else
                    {
                        frame.CopyConvertedFrameDataToArray(_colorPixels, ColorImageFormat.Bgra);
                    }

                    // Copy output to bitmap
                    _colorBitmap.WritePixels(
                        new Int32Rect(0, 0, frameDesc.Width, frameDesc.Height),
                        _colorPixels,
                        frameDesc.Width * _bytePerPixel,
                        0);
                }
            }
            if (flag)
            {
                i++;
                if (i == 3)
                {
                    AnalyzeImage(); i = 0;
                }
            }
        }
Exemple #8
0
        /// <summary>
        /// Process the infrared frames and update UI
        /// </summary>
        public void OnColorFrameArrived(object sender, ColorFrameArrivedEventArgs e)
        {
            // Get the reference to the color frame
            colorRef = e.FrameReference;

            if (colorRef == null)
            {
                return;
            }


            ThreadPool.QueueUserWorkItem(saliencyEngine);
        }
Exemple #9
0
        private void RenderFrame(object sender, DoWorkEventArgs e)
        {
            ProcessFrameEventArgs args      = (ProcessFrameEventArgs)e.Argument;
            ColorFrameReference   reference = args.frame;

            using (ColorFrame frame = reference.AcquireFrame()) {
                if (frame != null)
                {
                    //////Debug.WriteLine("Color frame arrived");
                    e.Result = new VFrame(frame);
                }
            }
        }
        //public Thread StartRecognitionThread(Image img, uint frameID, Action<Dictionary<string, float>> callback)
        //{
        //    var t = new Thread(() => ThreadRecognition(img, frameID, callback));
        //    t.Priority = ThreadPriority.Highest;
        //    t.IsBackground = true;
        //    t.Start();
        //    return t;
        //}

        //private void ThreadRecognition(Image img, uint frameID, Action<Dictionary<string, float>> callback)
        //{
        //    if (img != null)
        //    {
        //        var probabilities = new Dictionary<string, float>();
        //        var rec = new Rectangle(img.Width / 2 + img.Width / 8 + 2, img.Height / 2 - img.Height / 4 + 2, img.Width / 4 - 4, img.Height / 4 - 4);
        //        var gesture = new Bitmap(rec.Width, rec.Height);

        //        using (var g = Graphics.FromImage(gesture))
        //        {
        //            g.DrawImage(img, new Rectangle(0, 0, gesture.Width, gesture.Height),
        //                                rec,
        //                                GraphicsUnit.Pixel);
        //        }

        //        var tmpGesturePath = Path.Combine("tmp", $"image_thread_{frameID}");

        //        gesture.Save(tmpGesturePath, ImageFormat.Jpeg);

        //        Tensor imageTensor = ImageIO.ReadTensorFromImageFile(tmpGesturePath, 299, 299, 250.0f, 1.0f / 250.0f);

        //        var inceptionGraph = new Inception(null, new string[] { this.GraphPath, this.LabelsPath }, null, "Mul", "final_result");

        //        float[] probability = inceptionGraph.Recognize(imageTensor);

        //        for (int index = 0; index < probability.Length; ++index)
        //        {
        //            probabilities.Add(inceptionGraph.Labels[index], probability[index]);
        //        }

        //        callback(probabilities);
        //    }
        //}


        /// <summary>
        /// Метод отрисовки обычной камеры из цветного фрейма
        /// </summary>
        /// <param name="frameReference">Ссылка на цветной фрейм.</param>
        private void useRGBAImage(ColorFrameReference frameReference)
        {
            // Попытка получить текущий фрейм с сенсора
            ColorFrame frame = frameReference.AcquireFrame();

            if (frame != null)
            {
                Bitmap     outputImage = null;
                BitmapData imageData   = null;

                // Теперь получаем описание фрейма и создаем изображение для color picture box
                FrameDescription description = frame.FrameDescription;
                outputImage = new Bitmap(description.Width, description.Height, PixelFormat.Format32bppArgb);

                // Далее создаем указатель на данные картинки и указываем будующий размер
                imageData = outputImage.LockBits(new Rectangle(0, 0, outputImage.Width, outputImage.Height),
                                                 ImageLockMode.WriteOnly, outputImage.PixelFormat);
                IntPtr imageDataPtr = imageData.Scan0;
                int    size         = imageData.Stride * outputImage.Height;

                using (frame)
                {
                    // Копируем данные изображения в буфер. Не забываем что имеем дело с BGRA форматом
                    if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
                    {
                        frame.CopyRawFrameDataToIntPtr(imageDataPtr, (uint)size);
                    }
                    else
                    {
                        frame.CopyConvertedFrameDataToIntPtr(imageDataPtr, (uint)size, ColorImageFormat.Bgra);
                    }
                }

                // Наконец создаем картинку из буфера.
                outputImage.UnlockBits(imageData);

                var img = drawGestureStaticRectabgle(outputImage);

                this.pictureBoxCameraColor.Image = img;

                this.sharedImage = img.Clone() as Image;
            }
            else
            {
                // Nothing...
            }
        }
        void LecteurCouleur_FrameArrived(object sender, ColorFrameArrivedEventArgs e)
        {
            // Obtenir la référence du frame couleur
            ColorFrameReference colorRef = e.FrameReference;

            if (colorRef == null)
            {
                return;
            }

            // Obtenir le frame rattaché à la référence
            ColorFrame frame = colorRef.AcquireFrame();

            // S'assurer qu'on n'est pas entre 2 frames
            if (frame == null)
            {
                return;
            }

            using (frame)
            {
                // Obtenir la description du frame
                FrameDescription frameDesc = frame.FrameDescription;

                // Vérifier si les dimensions concordent
                if (frameDesc.Width == BitmapCouleur.PixelWidth && frameDesc.Height == BitmapCouleur.PixelHeight)
                {
                    // Copier les données selon le format de l'image
                    if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
                    {
                        frame.CopyRawFrameDataToArray(pixels);
                    }
                    else
                    {
                        frame.CopyConvertedFrameDataToArray(pixels, ColorImageFormat.Bgra);
                    }

                    // Copier les données obtenues dans l'image qui est rattachée à l'image d'affichage
                    BitmapCouleur.WritePixels(
                        new Int32Rect(0, 0, frameDesc.Width, frameDesc.Height),
                        pixels,
                        frameDesc.Width * octetsParPixel,
                        0);
                }
            }
        }
Exemple #12
0
        /// <summary>
        /// Acquires color data from the <see cref="multiSourceFrame"/>.
        /// </summary>
        /// <returns>
        /// Returns true if color data was acquired correctly, false otherwise.
        /// </returns>
        public bool AcquireColorData()
        {
            ColorFrameReference colorFrameReference = this.multiSourceFrame.ColorFrameReference;

            using (ColorFrame colorFrame = colorFrameReference.AcquireFrame())
            {
                if (colorFrame == null)
                {
                    this.colorData = null;
                    Log.Warn("The frame does not contain color data.");
                    return(false);
                }

                FrameDescription colorFrameDescription = colorFrame.FrameDescription;
                this.colorData = new byte[colorFrameDescription.LengthInPixels * BYTES_PER_PIXEL];
                colorFrame.CopyConvertedFrameDataToArray(this.colorData, ColorImageFormat.Rgba);
                return(true);
            }
        }
        void frameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            // Try to get the frame from its reference.
            try
            {
                MultiSourceFrame frame = e.FrameReference.AcquireFrame();

                if (frame != null)
                {
                    try
                    {
                        // Then switch between the possible types of images to show, get its frame reference, then use it
                        // with the appropriate image.
                        switch (this.imageType)
                        {
                        case ImageType.Color:
                            ColorFrameReference colorFrameReference = frame.ColorFrameReference;
                            useRGBAImage(colorFrameReference);
                            break;

                        case ImageType.Depth:
                            DepthFrameReference depthFrameReference = frame.DepthFrameReference;
                            useDepthImage(depthFrameReference);
                            break;

                        case ImageType.IR:
                            InfraredFrameReference irFrameReference = frame.InfraredFrameReference;
                            useIRImage(irFrameReference);
                            break;
                        }
                    }
                    catch (Exception)
                    {
                        // Don't worry about exceptions for this demonstration.
                    }
                }
            }
            catch (Exception)
            {
                // Don't worry about exceptions for this demonstration.
            }
        }
        /// <summary>
        /// Draws color image data from the specified frame.
        /// </summary>
        /// <param name="frameReference">The reference to the color frame that should be used.</param>
        private void useRGBAImage(ColorFrameReference frameReference)
        {
            // Actually aquire the frame here and check that it was properly aquired, and use it again since it too is disposable.
            ColorFrame frame = frameReference.AcquireFrame();

            if (frame != null)
            {
                Bitmap outputImage = null;
                System.Drawing.Imaging.BitmapData imageData = null;
                // Next get the frame's description and create an output bitmap image.
                FrameDescription description = frame.FrameDescription;
                outputImage = new Bitmap(description.Width, description.Height, PixelFormat.Format32bppArgb);

                // Next, we create the raw data pointer for the bitmap, as well as the size of the image's data.
                imageData = outputImage.LockBits(new Rectangle(0, 0, outputImage.Width, outputImage.Height),
                                                 ImageLockMode.WriteOnly, outputImage.PixelFormat);
                IntPtr imageDataPtr = imageData.Scan0;
                int    size         = imageData.Stride * outputImage.Height;

                using (frame)
                {
                    // After this, we copy the image data directly to the buffer.  Note that while this is in BGRA format, it will be flipped due
                    // to the endianness of the data.
                    if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
                    {
                        frame.CopyRawFrameDataToIntPtr(imageDataPtr, (uint)size);
                    }
                    else
                    {
                        frame.CopyConvertedFrameDataToIntPtr(imageDataPtr, (uint)size, ColorImageFormat.Bgra);
                    }
                }
                // Finally, unlock the output image's raw data again and create a new bitmap for the preview picture box.
                outputImage.UnlockBits(imageData);
                this.previewPictureBox.Image = outputImage;
            }
            else
            {
                Console.WriteLine("Lost frame");
            }
        }
Exemple #15
0
        void multiSourceFrameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            MultiSourceFrameReference msFrameReference = e.FrameReference;

            try
            {
                MultiSourceFrame msFrame = msFrameReference.AcquireFrame();

                if (msFrame != null)
                {
                    LongExposureInfraredFrameReference leirFrameReference = msFrame.LongExposureInfraredFrameReference;
                    InfraredFrameReference             irFrameReference   = msFrame.InfraredFrameReference;
                    ColorFrameReference colorFrameReference = msFrame.ColorFrameReference;
                    DepthFrameReference depthFrameReference = msFrame.DepthFrameReference;
                    BodyFrameReference  bodyFrameReference  = msFrame.BodyFrameReference;
                    switch (this.imageType)
                    {
                    case ImageType.Color:
                        useColorFrame(colorFrameReference);
                        break;

                    case ImageType.Depth:
                        useDepthFrame(depthFrameReference);
                        break;

                    case ImageType.IR:
                        useIRFrame(irFrameReference);
                        break;

                    case ImageType.LEIR:
                        useLIRFrame(leirFrameReference);
                        break;
                    }
                    useBodyFrame(bodyFrameReference);
                    //updatePulse(colorFrameReference, irFrameReference, bodyFrameReference);
                }
            }
            catch (Exception ex)
            {
            }
        }
Exemple #16
0
 private void ColorFrameReader_FrameArrived(ColorFrameReference colorFrameReference, out Shared <Image> colorImage)
 {
     colorImage = null;
     using (ColorFrame colorFrame = colorFrameReference.AcquireFrame())
     {
         if (colorFrame != null)
         {
             FrameDescription colorFrameDescription = colorFrame.FrameDescription;
             using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
             {
                 using (var sharedImage = ImagePool.GetOrCreate(colorFrameDescription.Width, colorFrameDescription.Height, Imaging.PixelFormat.BGRX_32bpp))
                 {
                     colorFrame.CopyConvertedFrameDataToIntPtr(sharedImage.Resource.ImageData, (uint)(colorFrameDescription.Width * colorFrameDescription.Height * 4), ColorImageFormat.Bgra);
                     colorImage = sharedImage.AddRef();
                     var time = this.pipeline.GetCurrentTimeFromElapsedTicks(colorFrame.RelativeTime.Ticks);
                     this.ColorImage.Post(sharedImage, time);
                 }
             }
         }
     }
 }
Exemple #17
0
        void saliencyEngine(object state)
        {
            lock (lockThis)
            {
                using (ColorFrame frame = colorRef.AcquireFrame())
                {
                    // It's possible that we skipped a frame or it is already gone
                    if (frame == null)
                    {
                        return;
                    }

                    size = Convert.ToUInt32(frame.FrameDescription.Height * frame.FrameDescription.Width * 4);
                    frame.CopyConvertedFrameDataToIntPtr(kinect.Camera.PinnedImageBuffer, size, ColorImageFormat.Bgra);

                    frame.Dispose();

                    colorRef = null;

                    UpdateVisualSaliency(vs, kinect.Camera.PinnedImageBuffer);
                    Spoint = GetSalientPoint(vs);

                    saliency[currentId % 10] = Spoint;
                    if (currentId == 10)
                    {
                        currentId = 0;
                    }
                    else
                    {
                        currentId++;
                    }

                    saliencySecondsTimer_Tick();
                }
            }
        }
        private void ProcessFrameData(MultiSourceFrameArrivedEventArgs e)
        {
            MultiSourceFrameReference frameReference   = e.FrameReference;
            MultiSourceFrame          multiSourceFrame = null;
            DepthFrame depthFrame = null;
            ColorFrame colorFrame = null;

            try
            {
                multiSourceFrame = frameReference.AcquireFrame();
                if (multiSourceFrame != null)
                {
                    lock (rawDataLock)
                    {
                        ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference;
                        DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference;
                        colorFrame = colorFrameReference.AcquireFrame();
                        depthFrame = depthFrameReference.AcquireFrame();

                        if ((depthFrame != null) && (colorFrame != null))
                        {
                            FrameDescription colorFrameDescription = colorFrame.FrameDescription;
                            int colorWidth  = colorFrameDescription.Width;
                            int colorHeight = colorFrameDescription.Height;
                            if ((colorWidth * colorHeight * sizeof(int)) == colorImagePixels.Length)
                            {
                                colorFrame.CopyConvertedFrameDataToArray(colorImagePixels, ColorImageFormat.Bgra);
                            }

                            FrameDescription depthFrameDescription = depthFrame.FrameDescription;
                            int depthWidth  = depthFrameDescription.Width;
                            int depthHeight = depthFrameDescription.Height;

                            if ((depthWidth * depthHeight) == depthImagePixels.Length)
                            {
                                depthFrame.CopyFrameDataToArray(depthImagePixels);
                            }
                        }
                    }
                }
            }
            catch (Exception)
            {
                // ignore if the frame is no longer available
            }
            finally
            {
                // MultiSourceFrame, DepthFrame, ColorFrame, BodyIndexFrame are IDispoable
                if (depthFrame != null)
                {
                    depthFrame.Dispose();
                    depthFrame = null;
                }
                if (colorFrame != null)
                {
                    colorFrame.Dispose();
                    colorFrame = null;
                }
                if (multiSourceFrame != null)
                {
                    multiSourceFrame = null;
                }
            }
        }
        /// <summary>
        /// Handles the color frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void Reader_FrameArrivedColour(object sender, ColorFrameArrivedEventArgs e)
        {
            ColorFrameReference frameReference = e.FrameReference;

            if (this.startTime == 0)
            {
                this.startTime = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond;
            }
            try
            {
                ColorFrame frame = frameReference.AcquireFrame();
                if (frame != null)
                {
                    // ColorFrame is IDisposable
                    using (frame)
                    {
                        this.framesSinceUpdate++;
                        FrameDescription frameDescription = frame.FrameDescription;
                        // update status unless last message is sticky for a while
                        if (DateTime.Now >= this.nextStatusUpdate)
                        {
                            // calcuate fps based on last frame received
                            int fps = 0;
                            if (this.stopwatch.IsRunning)
                            {
                                this.stopwatch.Stop();
                                fps            = (int)(this.framesSinceUpdate / this.stopwatch.Elapsed.TotalSeconds) / 2;
                                label2.Content = "FPS: " + fps;
                                this.stopwatch.Reset();
                            }
                            long milliseconds = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond;
                            this.nextStatusUpdate = DateTime.Now + TimeSpan.FromSeconds(1);
                        }
                        if (!this.stopwatch.IsRunning)
                        {
                            this.framesSinceUpdate = 0;
                            this.stopwatch.Start();
                        }
                        // verify data and write the new color frame data to the display bitmap
                        if ((frameDescription.Width == bitmapRGB.PixelWidth) && (frameDescription.Height == bitmapRGB.PixelHeight))
                        {
                            if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
                            {
                                frame.CopyRawFrameDataToArray(this.pixels);
                            }
                            else
                            {
                                frame.CopyConvertedFrameDataToArray(this.pixels, ColorImageFormat.Bgra);
                            }

                            int    width     = frame.FrameDescription.Width;
                            int    height    = frame.FrameDescription.Height;
                            int    bpp       = (int)frame.FrameDescription.BytesPerPixel;
                            var    format    = ColorImageFormat.Rgba;
                            byte[] pixelData = new byte[height * width * bpp];
                            pixelData = this.pixels;
                            int stride   = width * PixelFormats.Bgr32.BitsPerPixel / 8;
                            int writebpp = this.bytesPerPixel;
                            bitmapRGB.WritePixels(
                                new Int32Rect(0, 0, frameDescription.Width, frameDescription.Height),
                                //new Int32Rect(0, 0, 2500, frameDescription.Height),
                                this.pixels,
                                frameDescription.Width * this.bytesPerPixel,
                                0);


                            //save rgb if it meets the criteria
                            if (rgbRecO == true)// && frameCountRGB == downSample)
                            {
                                Task.Factory.StartNew(() =>
                                {
                                    new RGBRecord().ToColorBitmapQ(height, width, bpp, format, pixelData);
                                });
                                frameCountRGB = 0;
                            }

                            if (frameCountRGB == downSample)
                            {
                                frameCountRGB = 0;
                            }
                            frameCountRGB = frameCountRGB + 1;
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
                // ignore if the frame is no longer available
            }
        }
    private void HandleColorFrame(ColorFrameReference reference) {
      if (Task.StandBy) { ColorWatch.Reset(); return; }

      ColorWatch.Again();
      using (var frame = reference.AcquireFrame()) {
        if (frame == null) return;

        // Copy data to array based on image format
        if (frame.RawColorImageFormat == ColorImageFormat.Bgra) {
          frame.CopyRawFrameDataToArray(Color.Pixels);
        } else {
          frame.CopyConvertedFrameDataToArray(Color.Pixels, ColorImageFormat.Bgra);
        }

        Color.Stamp.Time = System.DateTime.Now;
      }
      ColorWatch.Stop();
    }
Exemple #21
0
        private void HandleColorFrame(ColorFrameReference colorFrameReference)
        {
            ColorFrame colorFrame = colorFrameReference.AcquireFrame();

            if(colorFrame != null)
            {
                using(colorFrame)
                {
                    if(colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                    {
                        colorFrame.CopyRawFrameDataToArray(this.colorImageData);
                    }
                    else
                    {
                        colorFrame.CopyConvertedFrameDataToArray(this.colorImageData, ColorImageFormat.Bgra);
                    }

                    this.ifd.createBitmap(colorFrame.FrameDescription.Width, colorFrame.FrameDescription.Height, PixelFormat.Format32bppArgb, this.colorImageData);
                    this.ImageFrameArrived(this, this.ifd);
                }
            }
        }
        /// <summary>
        /// Process each color frame
        /// </summary>
        private async void OnColorFrameArrived(object sender, ColorFrameArrivedEventArgs e)
        {
            // Get the reference to the color frame
            ColorFrameReference colorRef = e.FrameReference;

            if (colorRef == null)
            {
                return;
            }

            // Acquire frame for specific reference
            ColorFrame frame = colorRef.AcquireFrame();

            // It's possible that we skipped a frame or it is already gone
            if (frame == null)
            {
                return;
            }

            using (frame)
            {
                // Get frame description
                FrameDescription frameDesc = frame.FrameDescription;

                // Check if width/height matches
                if (frameDesc.Width == _colorBitmap.PixelWidth && frameDesc.Height == _colorBitmap.PixelHeight)
                {
                    // Copy data to array based on image format
                    if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
                    {
                        frame.CopyRawFrameDataToArray(_colorPixels);
                    }
                    else
                    {
                        frame.CopyConvertedFrameDataToArray(_colorPixels, ColorImageFormat.Bgra);
                    }

                    // Copy output to bitmap
                    _colorBitmap.WritePixels(
                        new Int32Rect(0, 0, frameDesc.Width, frameDesc.Height),
                        _colorPixels,
                        frameDesc.Width * _bytePerPixel,
                        0);

                    // Save image when recording
                    if (_isRecording)
                    {
                        // Create a new byte-array
                        byte[] imageData = new byte[_colorPixels.Length];

                        // Copy the orginal array in the new one
                        Array.Copy(_colorPixels, imageData, _colorPixels.Length);

                        // Save the image in the local folder
                        await ImageProcessor.SaveJpegAsync(imageData, frameDesc.Width, frameDesc.Height, frameDesc.Width *_bytePerPixel, TemporaryFolder.Text, string.Format("{0}_{1:000000}", _recordingID, _sequenceNr));

                        // Increment the sequence number
                        _sequenceNr++;
                    }
                }
            }
        }
Exemple #23
0
        /// <summary>
        /// Handles the color frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private void Reader_FrameArrivedColour(object sender, ColorFrameArrivedEventArgs e)
        {
            ColorFrameReference frameReference = e.FrameReference;

            if (this.startTime == 0)
            {
                this.startTime = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond;
            }

            try
            {
                ColorFrame frame = frameReference.AcquireFrame();

                if (frame != null)
                {
                    // ColorFrame is IDisposable
                    using (frame)
                    {
                        this.framesSinceUpdate++;

                        FrameDescription frameDescription = frame.FrameDescription;

                        // update status unless last message is sticky for a while
                        if (DateTime.Now >= this.nextStatusUpdate)
                        {
                            // calcuate fps based on last frame received
                            double fps = 0.0;

                            if (this.stopwatch.IsRunning)
                            {
                                this.stopwatch.Stop();
                                fps = (this.framesSinceUpdate / this.stopwatch.Elapsed.TotalSeconds) / 2;
                                this.stopwatch.Reset();
                            }
                            long milliseconds = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond;

                            this.nextStatusUpdate = DateTime.Now + TimeSpan.FromSeconds(1);
                            this.StatusText       = string.Format(Properties.Resources.StandardStatusTextFormat, fps, this.startTime - this.stopwatch.Elapsed.TotalSeconds);
                        }

                        if (!this.stopwatch.IsRunning)
                        {
                            this.framesSinceUpdate = 0;
                            this.stopwatch.Start();
                        }

                        // verify data and write the new color frame data to the display bitmap
                        if ((frameDescription.Width == this.bitmapRGB.PixelWidth) && (frameDescription.Height == this.bitmapRGB.PixelHeight))
                        {
                            if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
                            {
                                frame.CopyRawFrameDataToArray(this.pixels);
                            }
                            else
                            {
                                frame.CopyConvertedFrameDataToArray(this.pixels, ColorImageFormat.Bgra);
                            }

                            this.bitmapRGB.WritePixels(
                                new Int32Rect(0, 0, frameDescription.Width, frameDescription.Height),
                                this.pixels,
                                frameDescription.Width * this.bytesPerPixel,
                                0);

                            //save rgb if it meets the criteria
                            if (rgbRecO == true && frameCountRGB == downSample)
                            {
                                long milliseconds = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond;

                                Task.Factory.StartNew(() =>
                                {
                                    string filePath = imageFolder + '\\' + "image" + milliseconds + ".bin";

                                    using (FileStream streamRGB = new FileStream(filePath, FileMode.Create))
                                    {
                                        using (BinaryWriter rgbWriter = new BinaryWriter(streamRGB))
                                        {
                                            rgbWriter.Write(this.pixels);
                                            rgbWriter.Close();
                                        }
                                    }
                                });


                                frameCountRGB = 0;

                                saveTimeStamp("time_rgb", milliseconds);
                            }

                            if (frameCountRGB == downSample)
                            {
                                frameCountRGB = 0;
                            }

                            frameCountRGB = frameCountRGB + 1;
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
                // ignore if the frame is no longer available
            }
        }
Exemple #24
0
        void updatePulse(ColorFrameReference colorFrameReference, InfraredFrameReference irFrameReference, BodyFrameReference bodyFrameReference)
        {
            long currentTime = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond;

            int width  = 0;
            int height = 0;

            try
            {
                InfraredFrame IRFrame = irFrameReference.AcquireFrame();

                if (IRFrame != null)
                {
                    using (IRFrame)
                    {
                        width  = IRFrame.FrameDescription.Width;
                        height = IRFrame.FrameDescription.Height;

                        IRFrame.CopyFrameDataToArray(this.irImagePixelData);
                    }
                }
            }
            catch (Exception er)
            {
                string message = er.Message;
                Console.WriteLine(message);
                // Don't worry about empty frames.
            }
            try
            {
                if (this.bodyToTrack > -1)
                {
                    BodyFrame bodyFrame = bodyFrameReference.AcquireFrame();

                    if (bodyFrame != null)
                    {
                        using (bodyFrame)
                        {
                            bodyFrame.GetAndRefreshBodyData(this.bodies);

                            Body body = this.bodies[this.bodyToTrack];
                            if (body.IsTracked)
                            {
                                CameraSpacePoint headPosition = body.Joints[JointType.Head].Position;
                                CameraSpacePoint neckPosition = body.Joints[JointType.Neck].Position;

                                float centerX = neckPosition.X - headPosition.X;
                                centerX = headPosition.X + (centerX / 2.0f);

                                float centerY = neckPosition.Y - headPosition.Y;
                                centerY = headPosition.Y + (centerY / 2.0f);

                                centerX += 1.0f;
                                centerX /= 2.0f;

                                centerY += 1.0f;
                                centerY /= 2.0f;

                                if (this.colorImageBitmap != null)
                                {
                                    Color c = this.colorImageBitmap.GetPixel((int)(centerX * this.colorImageBitmap.Width), (int)(centerY * this.colorImageBitmap.Height));

                                    hueValues.Enqueue(c.GetHue());
                                    if (hueValues.Count > 10)
                                    {
                                        hueValues.Dequeue();
                                    }

                                    if (hueValues.Count >= 10)
                                    {
                                        //this.pulseLabel.Text = "Pulse: " + ((float)c.GetHue() / (float)hueValues.Average());
                                        if (c.GetHue() > hueValues.Average())
                                        {
                                            this.pulseLabel.Text = "Pulse: " + ((float)(currentTime - lastTime) / (float)pulses);
                                            //this.pulseLabel.Text = "Pulse: 1";
                                            pulses += 1;
                                        }
                                        if (currentTime - lastTime > 1000 * 5)
                                        {
                                            lastTime = currentTime;
                                            pulses   = 0;
                                        }
                                        Console.WriteLine("Hue Average: " + hueValues.Average());
                                    }
                                }

                                if (width > 0 && height > 0)
                                {
                                    ushort irValue = this.irImagePixelData[(int)(centerX * width) + (int)(centerY * height) * width];

                                    irValues.Enqueue(irValue);
                                    if (irValues.Count > 10)
                                    {
                                        irValues.Dequeue();
                                    }

                                    if (irValues.Count >= 10)
                                    {
                                        //Console.WriteLine("IR Average: " + irValues.Average());
                                    }
                                }

                                //Console.WriteLine("Color: " + c.R + ", " + c.G + ", " + c.B);
                                //Console.WriteLine("Position:" + centerX + ", " + centerY);
                            }
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                string message = ex.Message;
                Console.WriteLine(message);
                // Don't worry about empty frames.
            }
        }
        /// <summary>
        /// Draws color image data from the specified frame.
        /// </summary>
        /// <param name="frameReference">The reference to the color frame that should be used.</param>
        private void useRGBAImage(ColorFrameReference frameReference)
        {
            // Actually aquire the frame here and check that it was properly aquired, and use it again since it too is disposable.
            ColorFrame frame = frameReference.AcquireFrame();

            if (frame != null)
            {
                Bitmap outputImage = null;
                System.Drawing.Imaging.BitmapData imageData = null;
                // Next get the frame's description and create an output bitmap image.
                FrameDescription description = frame.FrameDescription;
                outputImage = new Bitmap(description.Width, description.Height, PixelFormat.Format32bppArgb);

                // Next, we create the raw data pointer for the bitmap, as well as the size of the image's data.
                imageData = outputImage.LockBits(new Rectangle(0, 0, outputImage.Width, outputImage.Height),
                    ImageLockMode.WriteOnly, outputImage.PixelFormat);
                IntPtr imageDataPtr = imageData.Scan0;
                int size = imageData.Stride * outputImage.Height;

                using (frame)
                {
                    // After this, we copy the image data directly to the buffer.  Note that while this is in BGRA format, it will be flipped due
                    // to the endianness of the data.
                    if (frame.RawColorImageFormat == ColorImageFormat.Bgra)
                    {
                        frame.CopyRawFrameDataToIntPtr(imageDataPtr, (uint)size);
                    }
                    else
                    {
                        frame.CopyConvertedFrameDataToIntPtr(imageDataPtr, (uint)size, ColorImageFormat.Bgra);
                    }
                }
                // Finally, unlock the output image's raw data again and create a new bitmap for the preview picture box.
                outputImage.UnlockBits(imageData);
                this.previewPictureBox.Image = outputImage;
            }
            else
            {
                Console.WriteLine("Lost frame");
            }
        }
        void frameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            try
            {
                MultiSourceFrame frame = e.FrameReference.AcquireFrame();

                if (frame != null)
                {
                    try
                    {
                        ColorFrameReference colorFrameReference = frame.ColorFrameReference;
                        useRGBAImage(colorFrameReference);

                        // Накладные вычисления позиции жеста, будем статично отрисовывать прямоугольник для жеста =(
                        //// Body
                        //using (var bodyFrame = frame.BodyFrameReference.AcquireFrame())
                        //{
                        //    if (bodyFrame != null)
                        //    {
                        //        _bodies = new Body[bodyFrame.BodyFrameSource.BodyCount];

                        //        bodyFrame.GetAndRefreshBodyData(_bodies);

                        //        foreach (var body in _bodies)
                        //        {
                        //            if (body.IsTracked)
                        //            {
                        //                var joint = body.Joints[JointType.HandRight];

                        //                if (joint.TrackingState == TrackingState.Tracked)
                        //                {
                        //                    // 3D space point
                        //                    CameraSpacePoint jointPosition = joint.Position;
                        //                    Image gesture;
                        //                    RectangleF colorRectabgle = GetGestureFromJointPosition(jointPosition, out gesture);

                        //                    // color
                        //                    if (pictureBoxCameraColor.Image != null)
                        //                    {
                        //                        var gf = Graphics.FromImage(pictureBoxCameraColor.Image);
                        //                        gf.DrawRectangle(new Pen(Color.Red, 2),
                        //                            colorRectabgle.Location.X, colorRectabgle.Location.Y, colorRectabgle.Width, colorRectabgle.Height);
                        //                    }

                        //                    this.pictureBoxGesture.Image = gesture;
                        //                    this.btnAddGesture.Enabled = true;
                        //                    this.btnAddGesture.Focus();
                        //                } else
                        //                {
                        //                    this.btnAddGesture.Enabled = false;
                        //                    this.pictureBoxGesture.Image = null;
                        //                }
                        //            }
                        //        }
                        //    }
                        //}

                        // правой руки нет в камере - отменяем слежку (отмена, вдруг поднесут изображение, в рамках курсовой пусть будет так)

                        //var bodyFrames = frame.BodyFrameReference.AcquireFrame();
                        //if (bodyFrames != null)
                        //{
                        //    var bodies = new Body[bodyFrames.BodyFrameSource.BodyCount];
                        //    bodyFrames.GetAndRefreshBodyData(bodies);
                        //    var bodyFrame = bodies.FirstOrDefault();
                        //    if (bodyFrame == null || !bodyFrame.IsTracked || bodyFrame.Joints[JointType.HandRight].TrackingState != TrackingState.Tracked)
                        //    {
                        //        this.probabilityFrameCounter = 0;
                        //        this.probabilities.Clear();
                        //    }
                        //    else

                        // ...

                        // В зависимости от освещения, Kinect V2 Color Stream переклюяается в 15 fps и 30 fps.
                        //int fps = (frame.ColorFrameReference.RelativeTime- previousFrameTimeCapture).Seconds;
                        //previousFrameTimeCapture = frame.ColorFrameReference.RelativeTime;

                        //будем расчитывать на max - 30 fps.


                        //    var frameID = this.probabilityFrameCounter++;
                        //    Stopwatch watch = Stopwatch.StartNew();

                        // dont use tensorflowahrp gpu or cpu, instead we gonna use emgu.tf cpu with ~500 ms per frame



                        //using (var g = new TFGraph())
                        //{
                        //    g.Import(new TFBuffer(this.modelFile));

                        //    using (var session = new TFSession(g))
                        //    {
                        //        var g_input = g["input"][0];
                        //        var g_output = g["final_result"][0];
                        //        var runner = session.GetRunner();

                        //        //var bitmap = ImageUtil.ResizeImage(img, 224, 224);
                        //        //
                        //        var bitmap = new Bitmap(@"C:\Users\inter\OneDrive\Documents\Visual Studio 2015\Projects\SignLanguageTranslater\SignLanguageTrainer\bin\Debug\TrainImages\a\2018-05-06_04-48-09-3319.jpg");
                        //        var tensor = ImageUtil.CreateTensorFromImageFile(bitmap);

                        //        runner.AddInput(g_input, tensor).Fetch(g_output);
                        //        var output = runner.Run();

                        //        var bestIdx = 0;
                        //        float best = 0;
                        //        var result = output[0];
                        //        var rshape = result.Shape;
                        //        var probabilities = ((float[][])result.GetValue(jagged: true))[0];
                        //        watch.Stop();
                        //        Console.WriteLine($"time took = {watch.Elapsed.Milliseconds}");

                        //        for (int r = 0; r < probabilities.Length; r++)
                        //        {
                        //            if (probabilities[r] > best)
                        //            {
                        //                bestIdx = r;
                        //                best = probabilities[r];
                        //            }
                        //        }

                        //        foreach (var t in output)
                        //        {
                        //            t.Dispose();
                        //        }

                        //        tensor.Dispose();

                        //        bitmap.Dispose();


                        //        Debug.WriteLine("Tensorflow thinks this is: " + this.labelsFile[bestIdx] + " Prob : " + best * 100);
                        //    }
                        //}
                    }

                    //if (frameID % 150 == 0)
                    //{
                    //    var img = this.pictureBoxCameraColor.Image.Clone() as Image;
                    //  StartRecognitionThread(img, frameID,
                    //    new Action<Dictionary<string, float>>(CollectImageRecognizes));
                    //}
                    //}
                    catch (Exception ex)
                    {
                        var s = ex;

                        // Nothing...
                    }
                }
            }
            catch (Exception)
            {
                // Nothing...
            }
        }
        void updatePulse(ColorFrameReference colorFrameReference, InfraredFrameReference irFrameReference, BodyFrameReference bodyFrameReference)
        {
            long currentTime = DateTime.Now.Ticks / TimeSpan.TicksPerMillisecond;

            int width = 0;
            int height = 0;
            try
            {
                InfraredFrame IRFrame = irFrameReference.AcquireFrame();

                if (IRFrame != null)
                {
                    using (IRFrame)
                    {
                        width = IRFrame.FrameDescription.Width;
                        height = IRFrame.FrameDescription.Height;

                        IRFrame.CopyFrameDataToArray(this.irImagePixelData);
                    }
                }
            }
            catch (Exception er)
            {
                string message = er.Message;
                Console.WriteLine(message);
                // Don't worry about empty frames.
            }
            try
            {
                if (this.bodyToTrack > -1)
                {
                    BodyFrame bodyFrame = bodyFrameReference.AcquireFrame();

                    if (bodyFrame != null)
                    {
                        using (bodyFrame)
                        {
                            bodyFrame.GetAndRefreshBodyData(this.bodies);

                            Body body = this.bodies[this.bodyToTrack];
                            if (body.IsTracked)
                            {
                                CameraSpacePoint headPosition = body.Joints[JointType.Head].Position;
                                CameraSpacePoint neckPosition = body.Joints[JointType.Neck].Position;

                                float centerX = neckPosition.X - headPosition.X;
                                centerX = headPosition.X + (centerX / 2.0f);

                                float centerY = neckPosition.Y - headPosition.Y;
                                centerY = headPosition.Y + (centerY / 2.0f);

                                centerX += 1.0f;
                                centerX /= 2.0f;

                                centerY += 1.0f;
                                centerY /= 2.0f;

                                if (this.colorImageBitmap != null)
                                {
                                    Color c = this.colorImageBitmap.GetPixel((int)(centerX * this.colorImageBitmap.Width), (int)(centerY * this.colorImageBitmap.Height));

                                    hueValues.Enqueue(c.GetHue());
                                    if (hueValues.Count > 10)
                                    {
                                        hueValues.Dequeue();
                                    }

                                    if (hueValues.Count >= 10)
                                    {
                                        //this.pulseLabel.Text = "Pulse: " + ((float)c.GetHue() / (float)hueValues.Average());
                                        if (c.GetHue() > hueValues.Average())
                                        {
                                            this.pulseLabel.Text = "Pulse: " + ((float)(currentTime - lastTime) / (float)pulses);
                                            //this.pulseLabel.Text = "Pulse: 1";
                                            pulses += 1;
                                        }
                                        if (currentTime - lastTime > 1000 * 5)
                                        {
                                            lastTime = currentTime;
                                            pulses = 0;
                                        }
                                        Console.WriteLine("Hue Average: " + hueValues.Average());
                                    }
                                }

                                if (width > 0 && height > 0)
                                {
                                    ushort irValue = this.irImagePixelData[(int)(centerX * width) + (int)(centerY * height) * width];

                                    irValues.Enqueue(irValue);
                                    if (irValues.Count > 10)
                                    {
                                        irValues.Dequeue();
                                    }

                                    if (irValues.Count >= 10)
                                    {
                                        //Console.WriteLine("IR Average: " + irValues.Average());
                                    }
                                }

                                //Console.WriteLine("Color: " + c.R + ", " + c.G + ", " + c.B);
                                //Console.WriteLine("Position:" + centerX + ", " + centerY);
                            }
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                string message = ex.Message;
                Console.WriteLine(message);
                // Don't worry about empty frames.
            }
        }
        void useColorFrame(ColorFrameReference colorFrameReference)
        {
            try
            {
                ColorFrame colorFrame = colorFrameReference.AcquireFrame();

                if (colorFrame != null)
                {
                    using (colorFrame)
                    {
                        if (colorFrame.RawColorImageFormat == ColorImageFormat.Bgra)
                        {
                            colorFrame.CopyRawFrameDataToArray(this.colorImagePixelData);
                        }
                        else
                        {
                            colorFrame.CopyConvertedFrameDataToArray(this.colorImagePixelData, ColorImageFormat.Bgra);
                        }

                        this.updateBitmap(colorFrame.FrameDescription.Width, colorFrame.FrameDescription.Height, PixelFormat.Format32bppArgb, this.colorImagePixelData);

                        this.pictureBox1.Image = new Bitmap(this.colorImageBitmap, this.pictureBox1.Width, this.pictureBox1.Height);
                    }
                }
            }
            catch (Exception er)
            {
                string message = er.Message;
                Console.WriteLine(message);
                // Don't worry about empty frames.
            }
        }
        //Processes the Frame data from the Kinect camera.
        //Since events are called synchronously, this would bottleneck and cause an issue with framerate
        //By threading, we process the info on seperate threads, allowing execution to coninue with the rest of the game
        private void ProcessRGBVideo(ColorFrameReference aReference, BodyIndexFrameReference bifRef, DepthFrameReference depthRef)
        {
            using (ColorFrame colorImageFrame = aReference.AcquireFrame())
            {
                if (colorImageFrame != null)
                {
                    using (BodyIndexFrame bodyIndexFrame = bifRef.AcquireFrame())
                    {
                        if (bodyIndexFrame != null)
                        {
                            using (DepthFrame depthFrame = depthRef.AcquireFrame())
                            {
                                if (depthFrame != null)
                                {
                                    int depthHeight = depthFrame.FrameDescription.Height;
                                    int depthWidth = depthFrame.FrameDescription.Width;

                                    int colorHeight = colorImageFrame.FrameDescription.Height;
                                    int colorWidth = colorImageFrame.FrameDescription.Width;

                                    ushort[] _depthData = new ushort[depthFrame.FrameDescription.Width * depthFrame.FrameDescription.Height];
                                    byte[] _bodyData = new byte[bodyIndexFrame.FrameDescription.Width * bodyIndexFrame.FrameDescription.Height];
                                    byte[] _colorData = new byte[colorImageFrame.FrameDescription.Width * colorImageFrame.FrameDescription.Height * 4];
                                    ColorSpacePoint[] _colorPoints = new ColorSpacePoint[depthWidth * depthHeight];

                                    depthFrame.CopyFrameDataToArray(_depthData);
                                    bodyIndexFrame.CopyFrameDataToArray(_bodyData);
                                    colorImageFrame.CopyConvertedFrameDataToArray(_colorData, ColorImageFormat.Rgba);

                                    iSensor.CoordinateMapper.MapDepthFrameToColorSpace(_depthData, _colorPoints);

                                    Color[] color = new Color[depthWidth * depthHeight];
                                    Color c;

                                    for (int y = 0; y < depthHeight; ++y)
                                    {
                                        for (int x = 0; x < depthWidth; ++x)
                                        {
                                            int depthIndex = (y * depthHeight) + x;

                                            byte player = _bodyData[depthIndex];

                                            // Check whether this pixel belong to a human!!!
                                            if (player != 0xff)
                                            {
                                                ColorSpacePoint colorPoint = _colorPoints[depthIndex];

                                                int colorX = (int)Math.Floor(colorPoint.X + 0.5);
                                                int colorY = (int)Math.Floor(colorPoint.Y + 0.5);
                                                int colorIndex = ((colorY * colorWidth) + colorX);

                                                if ((colorX >= 0) && (colorX < colorWidth) && (colorY >= 0) && (colorY < colorHeight))
                                                {

                                                    int displayIndex = colorIndex * 4;

                                                    c = new Color(_colorData[displayIndex + 0], _colorData[displayIndex + 1], _colorData[displayIndex + 2], 0xff);
                                                    color[depthIndex] = c;
                                                }
                                            }
                                        }
                                    }

                                    if (iGraphicsDevice.IsDisposed) return;
                                    var video = new Texture2D(iGraphicsDevice, depthWidth, depthHeight);

                                    video.SetData(color);

                                    lock (iVideoLock)
                                    {
                                        iRGBVideo = video;
                                    }
                                }
                            }
                        }
                    }
                }
            }
        }
Exemple #30
0
        /// <summary>
        /// Device-specific implementation of Update.
        /// Updates data buffers of all active channels with data of current frame.
        /// </summary>
        /// <remarks>This method is implicitely called by <see cref="Camera.Update"/> inside a camera lock.</remarks>
        /// <seealso cref="Camera.Update"/>
        protected override void UpdateImpl()
        {
            // TODO: This method could yield rather asynchronous channels. If necessary: Try to find a mechanism that updates frames that are already fetched when waiting for others that are not yet available.
            MultiSourceFrame multiSourceFrame       = null;
            bool             bodyIndexRequired      = IsChannelActive(CustomChannelNames.BodyIndex);
            bool             depthRequired          = IsChannelActive(ChannelNames.Distance) || IsChannelActive(ChannelNames.Point3DImage);
            bool             amplitudeRequired      = IsChannelActive(ChannelNames.Amplitude);
            bool             colorRequired          = IsChannelActive(ChannelNames.Color);
            bool             longExposureIRRequired = IsChannelActive(CustomChannelNames.LongExposureIR);

            do
            {
                if (!dataAvailable.WaitOne(UpdateTimeoutMilliseconds))
                {
                    throw ExceptionBuilder.BuildFromID(typeof(MetriCam2Exception), this, 005);
                }

                lock (newFrameLock)
                {
                    try
                    {
                        if (multiFrameReference != null)
                        {
                            multiSourceFrame = multiFrameReference.AcquireFrame();
                        }
                    }
                    catch (Exception)
                    {
                        // ignore if the frame is no longer available
                        continue;// throw
                    }
                }

                try
                {
                    // fetch depth?
                    if (depthRequired)
                    {
                        DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference;
                        if (depthFrameReference != null)
                        {
                            // always synchornize on depth frames if possible.
                            if (lastTimeStamp == GetAbsoluteTimeStamp(depthFrameReference.RelativeTime.Ticks))
                            {
                                continue;
                            }
                            using (DepthFrame depthFrame = depthFrameReference.AcquireFrame())
                            {
                                if (depthFrame == null)
                                {
                                    continue;
                                }

                                depthFrameDescription = depthFrame.FrameDescription;
                                int depthWidth  = depthFrameDescription.Width;
                                int depthHeight = depthFrameDescription.Height;
                                if ((depthWidth * depthHeight) == this.depthFrameData.Length)
                                {
                                    lock (this.depthFrameData)
                                    {
                                        depthFrame.CopyFrameDataToArray(this.depthFrameData);
                                        lastTimeStamp  = GetAbsoluteTimeStamp(depthFrameReference.RelativeTime.Ticks);
                                        timestampDepth = lastTimeStamp;
                                    }
                                    depthRequired = false;
                                }
                            }
                        }
                    }

                    // fetch IR?
                    if (amplitudeRequired)
                    {
                        InfraredFrameReference irFrameReference = multiSourceFrame.InfraredFrameReference;
                        if (irFrameReference != null)
                        {
                            // If depth data is inactive, synchronize on IR frames. If depth and IR are inactive, we synchronize on color frames.
                            if (!(IsChannelActive(ChannelNames.Distance) || IsChannelActive(ChannelNames.Point3DImage)) && lastTimeStamp == GetAbsoluteTimeStamp(irFrameReference.RelativeTime.Ticks))
                            {
                                continue;
                            }

                            using (InfraredFrame irFrame = irFrameReference.AcquireFrame())
                            {
                                if (irFrame == null)
                                {
                                    continue;
                                }

                                FrameDescription irFrameDescription = irFrame.FrameDescription;
                                int irWidth  = irFrameDescription.Width;
                                int irHeight = irFrameDescription.Height;
                                if ((irWidth * irHeight) == this.irFrameData.Length)
                                {
                                    lock (this.irFrameData)
                                    {
                                        irFrame.CopyFrameDataToArray(this.irFrameData);
                                        lastTimeStamp = GetAbsoluteTimeStamp(irFrameReference.RelativeTime.Ticks);
                                        timestampIR   = lastTimeStamp;
                                    }
                                    amplitudeRequired = false;
                                }
                            }
                        }
                    }

                    // (always) fetch body frame
                    BodyFrameReference bodyFrameReference = multiSourceFrame.BodyFrameReference;
                    if (bodyFrameReference != null)
                    {
                        using (BodyFrame bodyFrame = bodyFrameReference.AcquireFrame())
                        {
                            if (bodyFrame != null)
                            {
                                this.bodies = new Body[bodyFrame.BodyCount];
                                using (bodyFrame)
                                {
                                    bodyFrame.GetAndRefreshBodyData(this.bodies);
                                }
                            }
                            else
                            {
                                // TODO: check if channel is activated.
                            }
                        }
                    }

                    // fetch color?
                    if (colorRequired)
                    {
                        ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference;
                        if (colorFrameReference == null)
                        {
                            continue;
                        }
                        // If depth and IR data is inactive, synchronize on color frames. If color, depth and IR are inactive, we don't care for synchronization.
                        if (!(IsChannelActive(ChannelNames.Distance) || IsChannelActive(ChannelNames.Point3DImage) || IsChannelActive(ChannelNames.Amplitude)) && lastTimeStamp == GetAbsoluteTimeStamp(colorFrameReference.RelativeTime.Ticks))
                        {
                            continue;
                        }

                        using (ColorFrame colorFrame = colorFrameReference.AcquireFrame())
                        {
                            //FrameDescription colorFrameDescription = colorFrame.FrameDescription;
                            //int cWidth = colorFrameDescription.Width;
                            //int cHeight = colorFrameDescription.Width;
                            if (colorFrame != null)
                            {
                                using (KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                                {
                                    lock (this.colorFrameData)
                                    {
                                        colorFrame.CopyConvertedFrameDataToArray(this.colorFrameData, ColorImageFormat.Bgra);
                                        lastTimeStamp  = GetAbsoluteTimeStamp(colorFrameReference.RelativeTime.Ticks);
                                        timestampColor = lastTimeStamp;
                                    }
                                }
                                colorRequired = false;
                            }
                        }
                    }

                    // fetch long exposure IR? (this is independent of the IR images and are acquired at the same rate, so every new frame also
                    // has one of these.)
                    if (longExposureIRRequired)
                    {
                        LongExposureInfraredFrameReference longExposureIRFrameRef = multiSourceFrame.LongExposureInfraredFrameReference;
                        using (LongExposureInfraredFrame longIRFrame = longExposureIRFrameRef.AcquireFrame())
                        {
                            if (longIRFrame == null)
                            {
                                continue;
                            }

                            int longIRWidth  = longIRFrame.FrameDescription.Width;
                            int longIRHeight = longIRFrame.FrameDescription.Height;
                            if (longExposureIRData == null || (longIRWidth * longIRHeight) != longExposureIRData.Length)
                            {
                                longExposureIRData = new ushort[longIRWidth * longIRHeight];
                            }
                            longIRFrame.CopyFrameDataToArray(longExposureIRData);
                            longExposureIRRequired = false;
                        }
                    }

                    // fetch body index frames?
                    if (bodyIndexRequired)
                    {
                        BodyIndexFrameReference bodyIndexFrameRef = multiSourceFrame.BodyIndexFrameReference;
                        using (BodyIndexFrame bodyIndexFrame = bodyIndexFrameRef.AcquireFrame())
                        {
                            if (bodyIndexFrame == null)
                            {
                                log.Debug("bodyIndexFrame is NULL.");
                                continue;
                            }

                            int bodyIndexWidth  = bodyIndexFrame.FrameDescription.Width;
                            int bodyIndexHeight = bodyIndexFrame.FrameDescription.Height;
                            if (bodyIndexData == null || (bodyIndexWidth * bodyIndexHeight) != bodyIndexData.Length)
                            {
                                bodyIndexData = new byte[bodyIndexWidth * bodyIndexHeight];
                            }
                            bodyIndexFrame.CopyFrameDataToArray(bodyIndexData);
                            bodyIndexRequired = false;
                        }
                    }
                }
                catch (Exception)
                {
                    // ignore if the frame is no longer available
                }
                finally
                {
                    multiSourceFrame = null;
                }
            } while (depthRequired || colorRequired || bodyIndexRequired || longExposureIRRequired || amplitudeRequired);
        }
        /// <summary>
        /// Handles the multisource frame data arriving from the sensor
        /// </summary>
        /// <param name="sender">object sending the event</param>
        /// <param name="e">event arguments</param>
        private unsafe void Reader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            // Create instance of EMGUargs which holds the output of data from the kinect
            EMGUargs emguArgs = new EMGUargs();
            MultiSourceFrameReference frameReference = e.FrameReference;
            // Variables initialized to null for easy check of camera failures
            MultiSourceFrame multiSourceFrame = null;
            InfraredFrame    infraredFrame    = null;
            ColorFrame       colorFrame       = null;
            DepthFrame       depthFrame       = null;

            // Acquire frame from the Kinect
            multiSourceFrame = frameReference.AcquireFrame();

            // If the Frame has expired by the time we process this event, return.
            if (multiSourceFrame == null)
            {
                return;
            }
            try
            {
                /*
                 * DepthSpacePoint dp = new DepthSpacePoint
                 * {
                 *  X = 50,
                 *  Y = 20
                 * };
                 * DepthSpacePoint[] dps = new DepthSpacePoint[] { dp };
                 * ushort[] depths = new ushort[] { 2000 };
                 * CameraSpacePoint[] ameraSpacePoints = new CameraSpacePoint[1];
                 *
                 * mapper.MapDepthPointsToCameraSpace(dps, depths, ameraSpacePoints);
                 */
                InfraredFrameReference infraredFrameReference = multiSourceFrame.InfraredFrameReference;
                infraredFrame = infraredFrameReference.AcquireFrame();

                DepthFrameReference depthFrameReference = multiSourceFrame.DepthFrameReference;
                depthFrame = depthFrameReference.AcquireFrame();

                // Check whether needed frames are avaliable
                if (infraredFrame == null || depthFrame == null)
                {
                    return;
                }

                // the fastest way to process the depth frame data is to directly access
                // the underlying buffer
                using (Microsoft.Kinect.KinectBuffer depthBuffer = depthFrame.LockImageBuffer())
                {
                    // verify data and write the new depth frame data to the display bitmap
                    if (((this.depthFrameDescription.Width * this.depthFrameDescription.Height) ==
                         (depthBuffer.Size / this.depthFrameDescription.BytesPerPixel)))
                    {
                        // Conversion to needed EMGU image
                        Mat depthImage = this.ProcessDepthFrameData(depthFrame);

                        emguArgs.DepthImage          = depthImage;
                        emguArgs.DepthFrameDimension = new FrameDimension(depthFrameDescription.Width, depthFrameDescription.Height);
                    }

                    //BgrToDephtPixel(depthBuffer.UnderlyingBuffer, depthBuffer.Size);

                    depthFrame.Dispose();
                    depthFrame = null;
                }

                // IR image
                FrameDescription infraredFrameDescription = infraredFrame.FrameDescription;

                // the fastest way to process the infrared frame data is to directly access
                // the underlying buffer
                using (Microsoft.Kinect.KinectBuffer infraredBuffer = infraredFrame.LockImageBuffer())
                {
                    // verify data and write the new infrared frame data to the display bitmap
                    if (((this.infraredFrameDescription.Width * this.infraredFrameDescription.Height) == (infraredBuffer.Size / this.infraredFrameDescription.BytesPerPixel)))
                    {
                        // Conversion to needed EMGU image
                        Mat infraredImage = this.ProcessInfaredFrameData(infraredFrame);
                        emguArgs.InfraredImage          = infraredImage;
                        emguArgs.InfraredFrameDimension = new FrameDimension(infraredFrameDescription.Width, infraredFrameDescription.Height);
                        //  infraredImage.Dispose();
                    }
                    infraredFrame.Dispose();
                    infraredFrame = null;

                    // Check as to whether or not the color image is needed for mainwindow view
                    if (generateColorImage)
                    {
                        ColorFrameReference colorFrameReference = multiSourceFrame.ColorFrameReference;
                        colorFrame = colorFrameReference.AcquireFrame();
                        if (colorFrame == null)
                        {
                            return;
                        }

                        // color image
                        FrameDescription colorFrameDescription = colorFrame.FrameDescription;

                        // the fastest way to process the color frame data is to directly access
                        // the underlying buffer
                        using (Microsoft.Kinect.KinectBuffer colorBuffer = colorFrame.LockRawImageBuffer())
                        {
                            // Conversion to needed EMGU image
                            Mat colorImage = this.ProcessColorFrameData(colorFrame);
                            emguArgs.Colorimage          = colorImage;
                            emguArgs.ColorFrameDimension = new FrameDimension(colorFrameDescription.Width, colorFrameDescription.Height);
                        }
                        // We're done with the colorFrame
                        colorFrame.Dispose();
                        colorFrame = null;
                    }
                }
                // Call the processing finished event for the conversion to EMGU images
                OnEmguArgsProcessed(emguArgs);
            }
            catch (Exception ex)
            {
                // ignore if the frame is no longer available
                Console.WriteLine("FRAME CHRASHED: " + ex.ToString());
            }
            finally
            {
                // generate event at send writeable bitmaps for each frame, and cleanup.
                // only generate event if the mainwindow is shown.

                // DepthFrame, ColorFrame are Disposable.
                if (colorFrame != null)
                {
                    colorFrame.Dispose();
                    colorFrame = null;
                }
                if (depthFrame != null)
                {
                    depthFrame.Dispose();
                    depthFrame = null;
                }
                // infraredFrame is Disposable
                if (infraredFrame != null)
                {
                    infraredFrame.Dispose();
                    infraredFrame = null;
                }
                if (multiSourceFrame != null)
                {
                    multiSourceFrame = null;
                }
            }
        }
Exemple #32
0
        void frameReader_MultiSourceFrameArrived(object sender, MultiSourceFrameArrivedEventArgs e)
        {
            try
            {
                MultiSourceFrame frame = e.FrameReference.AcquireFrame();

                if (frame != null)
                {
                    try
                    {
                        ColorFrameReference colorFrameReference = frame.ColorFrameReference;
                        useRGBAImage(colorFrameReference);
                        DepthFrameReference depthFrameReference = frame.DepthFrameReference;
                        ShowDepthImage(depthFrameReference);
                        InfraredFrameReference irFrameReference = frame.InfraredFrameReference;
                        ShowIRImage(irFrameReference);

                        // Накладные вычисления позиции жеста, будем статично отрисовывать прямоугольник для жеста =(
                        //// Body
                        //using (var bodyFrame = frame.BodyFrameReference.AcquireFrame())
                        //{
                        //    if (bodyFrame != null)
                        //    {
                        //        _bodies = new Body[bodyFrame.BodyFrameSource.BodyCount];

                        //        bodyFrame.GetAndRefreshBodyData(_bodies);

                        //        foreach (var body in _bodies)
                        //        {
                        //            if (body.IsTracked)
                        //            {
                        //                var joint = body.Joints[JointType.HandRight];

                        //                if (joint.TrackingState == TrackingState.Tracked)
                        //                {
                        //                    // 3D space point
                        //                    CameraSpacePoint jointPosition = joint.Position;
                        //                    Image gesture;
                        //                    RectangleF colorRectabgle = GetGestureFromJointPosition(jointPosition, out gesture);

                        //                    // color
                        //                    if (pictureBoxCameraColor.Image != null)
                        //                    {
                        //                        var gf = Graphics.FromImage(pictureBoxCameraColor.Image);
                        //                        gf.DrawRectangle(new Pen(Color.Red, 2),
                        //                            colorRectabgle.Location.X, colorRectabgle.Location.Y, colorRectabgle.Width, colorRectabgle.Height);
                        //                    }

                        //                    this.pictureBoxGesture.Image = gesture;
                        //                    this.btnAddGesture.Enabled = true;
                        //                    this.btnAddGesture.Focus();
                        //                } else
                        //                {
                        //                    this.btnAddGesture.Enabled = false;
                        //                    this.pictureBoxGesture.Image = null;
                        //                }
                        //            }
                        //        }
                        //    }
                        //}

                        Bitmap    img    = this.pictureBoxCameraColor.Image as Bitmap;
                        Rectangle rec    = new Rectangle(img.Width / 2 + img.Width / 8 + 2, img.Height / 2 - img.Height / 4 + 2, img.Width / 8 + img.Width / 8 / 8 - 4, img.Height / 4 - 4);
                        Bitmap    target = new Bitmap(rec.Width, rec.Height);

                        using (Graphics g = Graphics.FromImage(target))
                        {
                            g.DrawImage(img, new Rectangle(0, 0, target.Width, target.Height),
                                        rec,
                                        GraphicsUnit.Pixel);
                        }

                        var gesture = target;
                        this.pictureBoxGesture.Image = gesture;
                    }
                    catch (Exception)
                    {
                        // Nothing...
                    }
                }
            }
            catch (Exception)
            {
                // Nothing...
            }
        }