Exemplo n.º 1
0
 void nui_ColorFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     // 32-bit per pixel, RGBA image
     PlanarImage Image = e.ImageFrame.Image;
     // video.Source = BitmapSource.Create(
     // Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);
 }
Exemplo n.º 2
0
 /// <summary>
 /// Handles the VideoFrameReady event of the kinectRunTime control.
 /// </summary>
 /// <param name="sender">The source of the event.</param>
 /// <param name="e">The <see cref="Microsoft.Research.Kinect.Nui.ImageFrameReadyEventArgs"/> instance containing the event data.</param>
 private void KinectRunTime_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     if (this.ImageFrameReady != null)
     {
         this.ImageFrameReady(this, e);
     }
 }
 void kinect_DepthFrameReady( object sender, ImageFrameReadyEventArgs e )
 {
     var source = e.ImageFrame.Image;
     image1.Source = BitmapSource.Create( source.Width, source.Height, 96, 96,
             PixelFormats.Gray16, null, ConvertGrayScale( source ).Bits,
             source.Width * source.BytesPerPixel );
 }
Exemplo n.º 4
0
 void runtime_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     if (this.VideoFrameReady != null)
     {
         this.VideoFrameReady(this, e);
     }
 }
Exemplo n.º 5
0
        /// <summary>Fires when a depth frame is ready.</summary>
        protected void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            // Convert depth frame to video frame to render it
            PlanarImage Image = e.ImageFrame.Image;
            byte[] convertedDepthFrame = util.convertDepthFrame(Image.Bits, ref depthFrame32);
            image2.Source = BitmapSource.Create(Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, convertedDepthFrame, Image.Width * 4);

            // Clear extraneous canvas elements (bones)
            if (canvas1.Children.Count > 9 && handler.JointHistory[(int)JointID.HandLeft].Count > 0 && handler.JointHistory[(int)JointID.HandRight].Count > 0)
                canvas1.Children.RemoveRange(9, canvas1.Children.Count - 9);

            // Draw hand tracking lines and circle
            /*if (handler.JointHistory[(int)JointID.HandLeft].Count > 0)
                DrawCircle(handler.JointHistory[(int)JointID.HandLeft].Last());
            if (handler.JointHistory[(int)JointID.HandRight].Count > 0)
                DrawCircle(handler.JointHistory[(int)JointID.HandRight].Last());
            for (int i = 0; i < handler.JointHistory[(int)JointID.HandLeft].Count - 1; i++)
                DrawLine(handler.JointHistory[(int)JointID.HandLeft][i], handler.JointHistory[(int)JointID.HandLeft][i + 1]);
            for (int i = 0; i < handler.JointHistory[(int)JointID.HandRight].Count - 1; i++)
                DrawLine(handler.JointHistory[(int)JointID.HandRight][i], handler.JointHistory[(int)JointID.HandRight][i + 1]);*/

            // Calculate FPS
            ++totalFrames;
            if (lastTime < DateTime.Now.AddSeconds(-1)) {
                int frameDiff = totalFrames - lastFrames;
                lastFrames = totalFrames;
                lastTime = DateTime.Now;
                Title = "KinectNUI - " + frameDiff.ToString() + " FPS"; }
        }
Exemplo n.º 6
0
 public void nui_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     // Dump video stream to the Image element
     PlanarImage Image = e.ImageFrame.Image;
     image.Source = BitmapSource.Create(Image.Width, Image.Height, 96, 96,
         PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);
 }
Exemplo n.º 7
0
        private void DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            CalculateFps();

            if (mShow.currentScene.mColorEffect != null)
            {
                int[] playercoded = mShow.currentScene.mColorEffect.doMagic(e.ImageFrame.Image.Width, e.ImageFrame.Image.Height, (byte[])(e.ImageFrame.Image.Bits));

                myTex = new Texture2D(GraphicsDevice, e.ImageFrame.Image.Width, e.ImageFrame.Image.Height);
                myTex.SetData(playercoded);
            }
            if (mShow.flashes != null)
            {
                for (int i = 0; i < mShow.flashes.Count; i++)
                {
                    if (mShow.flashes.ElementAt(i).active&& mShow.flashes.ElementAt(i).colorEffect != null)
                    {
                        int[] playercoded = mShow.flashes.ElementAt(i).colorEffect.doMagic(e.ImageFrame.Image.Width, e.ImageFrame.Image.Height, (byte[])(e.ImageFrame.Image.Bits));

                        myTex = new Texture2D(GraphicsDevice, e.ImageFrame.Image.Width, e.ImageFrame.Image.Height);
                        myTex.SetData(playercoded);
                    }
                }
            }
        }
Exemplo n.º 8
0
        //------------
        // 奥行き画像
        //------------
        void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            lock (this)
            {
                PlanarImage Image = e.ImageFrame.Image;
                //  byte[] convertedDepthFrame = convertDepthFrame(Image.Bits);

                int no = 0;

                this.depthColor    = new Color[Image.Height * Image.Width];
                this.texture_depth = new Texture2D(graphics.GraphicsDevice,
                                                   Image.Width, Image.Height);  //テクスチャの作成

                //画像取得
                for (int y = 0; y < Image.Height; ++y)
                {     //y軸
                    for (int x = 0; x < Image.Width; ++x, no += 2)
                    { //x軸
                        int  n         = (y * Image.Width + x) * 2;
                        int  realDepth = (Image.Bits[n + 1] << 5) | (Image.Bits[n] >> 3);
                        byte intensity = (byte)((255 - (255 * realDepth / 0x0fff)) / 2);
                        this.depthColor[y * Image.Width + x] = new Color(intensity, intensity, intensity);

                        // プレイヤー毎に色分けする
                        int playerIndex = Image.Bits[n] & 0x07;
                        if (playerIndex > 0)
                        {
                            Color labelColor = colors[playerIndex % ncolors];
                            this.depthColor[y * Image.Width + x] = new Color(labelColor.B * intensity / 256, labelColor.G * intensity / 256, labelColor.R * intensity / 256);
                        }
                    }
                }
                this.texture_depth.SetData(this.depthColor);    //texture_imageにデータを書き込む
            }
        }
        // Video Camera EventHandler
        void mKinectNUI_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage  frame = e.ImageFrame.Image;
            BitmapSource temp  = BitmapSource.Create(frame.Width, frame.Height, 96, 96, PixelFormats.Bgr32, null, frame.Bits, frame.Width * frame.BytesPerPixel);

            mWindow.KinectCameraPanelInMainWindow.VideoCameraWindow.Source = Resize(temp, frame.Width, frame.Height, mVideoX, mVideoY);
        }
Exemplo n.º 10
0
        public void Update(ImageFrameReadyEventArgs e)
        {
            if (depthFrame32 == null)
            {
                depthFrame32 = new byte[e.ImageFrame.Image.Width * e.ImageFrame.Image.Height * 4];
            }

            ConvertDepthFrame(e.ImageFrame.Image.Bits);

            if (DepthBitmap == null)
            {
                DepthBitmap = new WriteableBitmap(e.ImageFrame.Image.Width, e.ImageFrame.Image.Height, 96, 96, PixelFormats.Bgra32, null);
            }

            DepthBitmap.Lock();

            int stride = DepthBitmap.PixelWidth * DepthBitmap.Format.BitsPerPixel / 8;
            Int32Rect dirtyRect = new Int32Rect(0, 0, DepthBitmap.PixelWidth, DepthBitmap.PixelHeight);
            DepthBitmap.WritePixels(dirtyRect, depthFrame32, stride, 0);

            DepthBitmap.AddDirtyRect(dirtyRect);
            DepthBitmap.Unlock();

            RaisePropertyChanged(()=>DepthBitmap);
        }
Exemplo n.º 11
0
        void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage Image = e.ImageFrame.Image;

            byte[] convertedDepthFrame = convertDepthFrame(Image.Bits);

            depth.Source = BitmapSource.Create(
                Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, convertedDepthFrame, Image.Width * 4);

            ++totalFrames;

            DateTime cur = DateTime.Now;

            if (cur.Subtract(lastTime) > TimeSpan.FromSeconds(1))
            {
                int frameDiff = totalFrames - lastFrames;
                lastFrames = totalFrames;
                lastTime   = cur;
            }

            if (fsm.Current != null)
            {
                fsm.Current.ProcessDepth(e.ImageFrame);
            }
        }
        // "nui_DepthConfigStage" reads the first 100 images and keeps the greater and the lower depth values for the background segmentation
        void nui_DepthConfigStage(object sender, ImageFrameReadyEventArgs e)
        {
            currentDepthMatrix     = GenerateDepthBytes(e.ImageFrame);
            currentDepthImageFrame = e.ImageFrame;

            #region updateMiniMaxValues
            int matrixLength = currentDepthMatrix.Length;

            for (var i = 0; i < matrixLength; i++)
            {
                if (currentDepthMatrix[i] > initialMaxDepthMatrix[i])
                {
                    initialMaxDepthMatrix[i] = currentDepthMatrix[i];
                }
                else if (currentDepthMatrix[i] < initialMinDepthMatrix[i])
                {
                    initialMinDepthMatrix[i] = currentDepthMatrix[i];
                }
            }

            #endregion updateMiniMaxValues


            if (++depthTrainingImages > 99)
            {
                nui.DepthFrameReady -= new EventHandler <ImageFrameReadyEventArgs>(nui_DepthConfigStage);
                // Enable the calibration button after calibrating
                button1.IsEnabled    = true;
                nui.DepthFrameReady += new EventHandler <ImageFrameReadyEventArgs>(nui_DepthFrameReady);
            }
        }
Exemplo n.º 13
0
 void nui_ColorFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     // 32-bit per pixel, RGBA image
     PlanarImage Image = e.ImageFrame.Image;
     video.Source = BitmapSource.Create(
         Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);
 }
Exemplo n.º 14
0
        public void Update(ImageFrameReadyEventArgs e)
        {
            if (depthFrame32 == null)
            {
                depthFrame32 = new byte[e.ImageFrame.Image.Width * e.ImageFrame.Image.Height * 4];
            }

            ConvertDepthFrame(e.ImageFrame.Image.Bits);

            if (DepthBitmap == null)
            {
                DepthBitmap = new WriteableBitmap(e.ImageFrame.Image.Width, e.ImageFrame.Image.Height, 96, 96, PixelFormats.Bgra32, null);
            }

            DepthBitmap.Lock();

            int       stride    = DepthBitmap.PixelWidth * DepthBitmap.Format.BitsPerPixel / 8;
            Int32Rect dirtyRect = new Int32Rect(0, 0, DepthBitmap.PixelWidth, DepthBitmap.PixelHeight);

            DepthBitmap.WritePixels(dirtyRect, depthFrame32, stride, 0);

            DepthBitmap.AddDirtyRect(dirtyRect);
            DepthBitmap.Unlock();

            RaisePropertyChanged(() => DepthBitmap);
        }
Exemplo n.º 15
0
        void DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage PImage = e.ImageFrame.Image;

            //  int x = PImage.Width / 2;
            //  int y = PImage.Height / 2;
            //  int d = getValue(PImage, x, y);
            //  MessageBox.Show(d.ToString());

            int temp = 0;

            int[] count = new int[0x1FFF / 4 + 1];
            for (int i = 0; i < PImage.Bits.Length; i += 2)
            {
                temp = (PImage.Bits[i + 1] << 8 | PImage.Bits[i]) & 0x1FFF;
                count[temp >> 2]++;
                temp             <<= 2;
                PImage.Bits[i]     = (byte)(temp & 0xFF);
                PImage.Bits[i + 1] = (byte)(temp >> 8);
            }
            chart1.Series[0].Points.Clear();
            for (int i = 1; i < (0x1FFF / 4); i++)
            {
                chart1.Series[0].Points.Add(count[i]);
            }
            Application.DoEvents();
            pictureBox1.Image = DepthToBitmap(PImage);
        }
Exemplo n.º 16
0
        void nui_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            //Manually create BitmapSource for Video
            PlanarImage imageData = e.ImageFrame.Image;

            image1.Source = BitmapSource.Create(imageData.Width, imageData.Height, 96, 96, PixelFormats.Bgr32, null, imageData.Bits, imageData.Width * imageData.BytesPerPixel);
        }
Exemplo n.º 17
0
        void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            byte[] ColoredBytes = GenerateColoredBytes(e.ImageFrame);

            PlanarImage  image  = e.ImageFrame.Image;
            BitmapSource bmpSrc = BitmapSource.Create(image.Width, image.Height, 96, 96, PixelFormats.Bgr32, null, ColoredBytes,
                                                      image.Width * PixelFormats.Bgr32.BitsPerPixel / 8);

            image1.Source = bmpSrc;
            if (capture && saveCounter % (skipFrame + 1) == 0)
            {
                String fileName = String.Format("{0:d5}.jpeg", fileNumber);
                try {
                    bmpSrc.Save(savePath + "\\" + fileName, ImageFormat.Jpeg);
                    fileNumber++;
                } catch (Exception) {
                    try {
                        System.IO.Directory.CreateDirectory(savePath);
                    }
                    catch (Exception) {
                        MessageBox.Show("Problem with saving image");
                        this.Close();
                    }
                }
            }
            saveCounter++;
        }
Exemplo n.º 18
0
        void nui_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage image = e.ImageFrame.Image;

            image1.Source = BitmapSource.Create(image.Width, image.Height, 96, 96, PixelFormats.Bgr32,
                                                null, image.Bits, image.Width * image.BytesPerPixel);
        }
Exemplo n.º 19
0
        private void FrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            var pImage = e.ImageFrame.Image;
            var bmap   = PImageToBitmap(pImage);

            pictureBoxVideo.Image = bmap;
        }
        //****************************//
        void nui_VideoFrameReady(object sender, ImageFrameReadyEventArgs evt)
        {
            PlanarImage imgKinect = evt.ImageFrame.Image;

            imageRGB.Source = BitmapSource.Create(imgKinect.Width, imgKinect.Height, 96, 96, PixelFormats.Bgr32,
                                                    null, imgKinect.Bits, imgKinect.Width * imgKinect.BytesPerPixel);
        }
Exemplo n.º 21
0
        private void DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage p = e.ImageFrame.Image;

            Color[] DepthColor = new Color[p.Height * p.Width];

            float maxDist = 4000;
            float minDist = 850;
            float distOffset = maxDist - minDist;

            depthImg = new Texture2D(GraphicsDevice, p.Width, p.Height);

            int index = 0;
            for (int y = 0; y < p.Height; y++)
            {
                for (int x = 0; x < p.Width; x++, index += 2)
                {
                    int n = (y * p.Width + x) * 2;
                    int distance = (p.Bits[n + 0] | p.Bits[n + 1] << 8);
                    if (y == 100)
                        Console.Write(distance + ", ");
                    byte intensity = (byte)(255 - (255 * Math.Max(distance - minDist, 0) / (distOffset)));
                    DepthColor[y * p.Width + x] = new Color(intensity, intensity, intensity);

                }
            }
            depthImg.SetData(DepthColor);
        }
Exemplo n.º 22
0
 void PSEyeSource_OnImageFrame(object sender, ImageFrameReadyEventArgs eventArgs)
 {
     if (this.NewFrame != null)
     {
         this.NewFrame(this, new NewFrameEventArgs(eventArgs.ImageFrame.Bitmap));
     }
 }
Exemplo n.º 23
0
        void kinect_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            // 抜かれた瞬間のKINECTは、InstanceIndex が -1 になる
            Runtime kinect = sender as Runtime;

            if ((kinect != null) && (kinect.InstanceIndex >= 0))
            {
                PlanarImage srouce = e.ImageFrame.Image;
                Image       dest   = images[kinect.InstanceIndex];
                var         b      = BitmapSource.Create(srouce.Width, srouce.Height, 96, 96,
                                                         PixelFormats.Bgr32, null, srouce.Bits, srouce.Width * srouce.BytesPerPixel);
                dest.Source = b;

                int offset = 0;
                for (int y = 0; y < 480; y += mabiki)
                {
                    int index = (640 * bpp) * y;
                    for (int x = 0; x < (640 * bpp); x += (mabiki * bpp))
                    {
                        buf[offset++] = srouce.Bits[index + x];
                        buf[offset++] = srouce.Bits[index + x + 1];
                        buf[offset++] = srouce.Bits[index + x + 2];
                        buf[offset++] = srouce.Bits[index + x + 3];
                    }
                }

                server.SendTo(buf, iep);
            }
        }
Exemplo n.º 24
0
 void runtime_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     if (this.VideoFrameReady != null)
     {
         this.VideoFrameReady(this, e);
     }
 }
Exemplo n.º 25
0
        // draw depth frame and calculate FPS
        void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage Image = e.ImageFrame.Image;

            byte[] convertedDepthFrame = convertDepthFrame(Image.Bits);

            depth.Source = BitmapSource.Create(
                Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, convertedDepthFrame, Image.Width * 4);

            ++totalFrames;

            DateTime cur = DateTime.Now;

            if (cur.Subtract(lastTime) > TimeSpan.FromSeconds(1))
            {
                int frameDiff = totalFrames - lastFrames;
                lastFrames     = totalFrames;
                lastTime       = cur;
                frameRate.Text = frameDiff.ToString() + " fps";
            }

            if (cur.Subtract(lastSkeletonTime) > TimeSpan.FromMilliseconds(200))
            {
                skeleton.Children.Clear();
                log.Clear();
            }

            convertedDepthFrame = null;
        }
Exemplo n.º 26
0
        /// <summary>
        /// This event handler is called by the nui kinect controler.
        /// </summary>
        /// <param name="sender">The obejct sender</param>
        /// <param name="e">An ImageFrameReadyEventArgs with all the information about the raw image data from the Kinect.</param>
        private void VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage rawImage = e.ImageFrame.Image;
            Bitmap      bmp      = PImageToBitmap(rawImage);

            OnImageReady(new ImageEventArgs(bmp));
        }
Exemplo n.º 27
0
        public void nui_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            // Dump video stream to the Image element
            PlanarImage Image = e.ImageFrame.Image;

            image.Source = BitmapSource.Create(Image.Width, Image.Height, 96, 96,
                                               PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);
        }
Exemplo n.º 28
0
 void _runtime_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     if (chkDisplayVideo.IsChecked.Value)
     {
         PlanarImage imageData = e.ImageFrame.Image;
         imgVideoFrame.Source = BitmapSource.Create(imageData.Width, imageData.Height, 96, 96, PixelFormats.Bgr32, null, imageData.Bits, imageData.Width * imageData.BytesPerPixel);
     }
 }
        void runtime_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage image = e.ImageFrame.Image;

            BitmapSource source = BitmapSource.Create(image.Width, image.Height, 96, 96,
                PixelFormats.Gray16, null, image.Bits, image.Width * image.BytesPerPixel);
            depthImage.Source = source;
        }
        const int maxKinectCount = 1; //Change to 1 if you only want to view one at a time. Switching will be enabled.
        //Each Kinect needs to be in its own USB hub, otherwise it won't have enough USB bandwidth.
        //Currently only 1 Kinect per process can have SkeletalTracking working, but color and depth work for all.
        //KinectSDK TODO: enable a larger maxKinectCount (assuming your PC can dedicate a USB hub for each Kinect)
        #endregion Private state


        #region color and drawing tools
        void nui_ColorFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            Dispatcher.BeginInvoke((Action) delegate
            {
                // 32-bit per pixel, RGBA image
                colorImage = e.ImageFrame.Image;
            });
        }
Exemplo n.º 31
0
        private void NUIVideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            // Sets the kinect video image to the canvas
            PlanarImage image = e.ImageFrame.Image;

            CameraImage.Source = BitmapSource.Create(image.Width, image.Height, 96, 96, PixelFormats.Bgr32, null,
                                                     image.Bits, image.Width * image.BytesPerPixel);
        }
Exemplo n.º 32
0
        void kinect_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            var source = e.ImageFrame.Image;

            image1.Source = BitmapSource.Create(source.Width, source.Height, 96, 96,
                                                PixelFormats.Gray16, null, ConvertGrayScale(source).Bits,
                                                source.Width * source.BytesPerPixel);
        }
        void nui_ColorFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage image  = e.ImageFrame.Image;
            ImageFrame  i      = e.ImageFrame;
            Bitmap      bitmap = i.ToBitmap();
            String      info   = "Frame Number:\t" + i.FrameNumber.ToString() + "\nResoulution\t" + i.Resolution.ToString() + "\nTime Stamp\t" + i.Timestamp.ToString();

            video.refresh(info, bitmap);
        }
        void runtime_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            //pull out the video frame from the eventargs and load it into our image object
            PlanarImage  image  = e.ImageFrame.Image;
            BitmapSource source = BitmapSource.Create(image.Width, image.Height, 96, 96,
                                                      PixelFormats.Bgr32, null, image.Bits, image.Width * image.BytesPerPixel);

            videoImage.Source = source;
        }
Exemplo n.º 35
0
        void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            Byte[] ColoredBytes = GenerateColoredBytes(e.ImageFrame, currentVideoFrame);

            PlanarImage image = e.ImageFrame.Image;

            Depth_Image.Source = BitmapSource.Create(image.Width, image.Height, 96, 96, PixelFormats.Bgr32, null,
                                                     ColoredBytes, image.Width * PixelFormats.Bgr32.BitsPerPixel / 8);
        }
Exemplo n.º 36
0
        private void RuntimeVideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage image = e.ImageFrame.Image;

            byte[] pixels = image.Bits;

            //將取得的圖片轉為BitmapSource,並用來當作Image控制項的Source
            imgVideoFrame.Source = BitmapSource.Create(image.Width, image.Height, 96, 96, PixelFormats.Bgr32, null, pixels, image.Width * PixelFormats.Bgr32.BitsPerPixel / 8);
        }
Exemplo n.º 37
0
        void RuntimeColorFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            ColorImage.Source = e.ImageFrame.ToBitmapSource();

            if (_saveColorFrame)
            {
                _saveColorFrame = false;
                e.ImageFrame.ToBitmapSource().Save(DateTime.Now.ToString("yyyyMMddHHmmss") + "_color.jpg", ImageFormat.Jpeg);
            }
        }
Exemplo n.º 38
0
 void _runtime_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     if (chkDisplayDepth.IsChecked.Value)
     {
         if (this.WindowState == WindowState.Normal)
         {
             imgDepthFrame.Source = e.ImageFrame.ToBitmapSource();
         }
     }
 }
Exemplo n.º 39
0
        public BitmapSource Update(ImageFrameReadyEventArgs e)
        {
            PlanarImage Image = e.ImageFrame.Image;

            ColorBitmap = BitmapSource.Create(Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);

            RaisePropertyChanged(() => ColorBitmap);

            return(ColorBitmap);
        }
Exemplo n.º 40
0
        public BitmapSource Update(ImageFrameReadyEventArgs e)
        {
            PlanarImage Image = e.ImageFrame.Image;

            ColorBitmap = BitmapSource.Create(Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);

            RaisePropertyChanged(()=>ColorBitmap);

            return ColorBitmap;
        }
Exemplo n.º 41
0
        void RuntimeColorFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            ColorImage.Source = e.ImageFrame.ToBitmapSource();
			
			if (_saveColorFrame)
			{
				_saveColorFrame = false;
				e.ImageFrame.ToBitmapSource().Save(DateTime.Now.ToString("yyyyMMddHHmmss") + "_color.jpg", ImageFormat.Jpeg);

			}
        }
Exemplo n.º 42
0
        void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            //Convert depth information for a pixel into color information
            byte[] ColoredBytes = GenerateColoredBytes(e.ImageFrame);

            //create an image based on returned colors

            PlanarImage image = e.ImageFrame.Image;
            image1.Source = BitmapSource.Create(image.Width, image.Height, 96, 96, PixelFormats.Bgr32, null,
                ColoredBytes, image.Width * PixelFormats.Bgr32.BitsPerPixel / 8);
        }
Exemplo n.º 43
0
 void kinect_VideoFrameReady( object sender, ImageFrameReadyEventArgs e )
 {
     // 抜かれた瞬間のKINECTは、InstanceIndex が -1 になる
     Runtime kinect = sender as Runtime;
     if ( (kinect != null) && (kinect.InstanceIndex >= 0) ) {
         PlanarImage srouce = e.ImageFrame.Image;
         Image dest = images[kinect.InstanceIndex];
         dest.Source = BitmapSource.Create( srouce.Width, srouce.Height, 96, 96,
             PixelFormats.Bgr32, null, srouce.Bits, srouce.Width * srouce.BytesPerPixel );
     }
 }
Exemplo n.º 44
0
 /// <summary>
 /// Event handler code for when the RGB camerastream is ready.
 /// </summary>
 /// <param name="sender"></param>
 /// <param name="e"></param>
 private void nui_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     try
     {
         PlanarImage image = e.ImageFrame.Image;
         rgbImage.Source = BitmapSource.Create(image.Width, image.Height, 6, 6,
             PixelFormats.Bgr32, null, image.Bits, image.Width * image.BytesPerPixel);
     }
     catch (Exception ex)
     {
         rgbImage = null;
         MessageBox.Show(ex.Message);
     }
 }
        void ColorImageReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage planarImage = e.ImageFrame.Image;

            //An interopBitmap is a WPF construct that enables resetting the Bits of the image.
            //This is more efficient than doing a BitmapSource.Create call every frame.
            if (imageHelper == null)
            {
                imageHelper = new InteropBitmapHelper(planarImage.Width, planarImage.Height, planarImage.Bits);
                kinectColorImage.Source = imageHelper.InteropBitmap;
            }
            else
            {
                imageHelper.UpdateBits(planarImage.Bits);
            }
        }
Exemplo n.º 46
0
        public void OnDepthFrameReady(ImageFrameReadyEventArgs e)
        {
            lock (this)
            {
                if (GraphicsDevice == null)
                    return;
                if (depthTexture == null)
                {
                    depthTexture = new Texture2D(GraphicsDevice, e.ImageFrame.Image.Width, e.ImageFrame.Image.Height);
                }
                PlanarImage Image = e.ImageFrame.Image;
                ConvertDepthFrame(Image.Bits);

                GraphicsDevice.Textures[0] = null;
                depthTexture.SetData(depthFrame32);
            }
        }
Exemplo n.º 47
0
        private void DepthImageReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage planarImage = e.ImageFrame.Image;
            byte[] convertedDepthBits = convertDepthFrame(planarImage.Bits);

            //An interopBitmap is a WPF construct that enables resetting the Bits of the image.
            //This is more efficient than doing a BitmapSource.Create call every frame.
            if (imageHelper == null)
            {
                imageHelper = new InteropBitmapHelper(planarImage.Width, planarImage.Height, convertedDepthBits);
                kinectDepthImage.Source = imageHelper.InteropBitmap;
            }
            else
            {
                imageHelper.UpdateBits(convertedDepthBits);
            }

            calculateFrameRate();
        }
        unsafe void nuiRuntime_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            var image = e.ImageFrame.Image;
            BitmapData bitmapData = this.CurrentValue.LockBits(new System.Drawing.Rectangle(0, 0, this.Width, this.Height), ImageLockMode.WriteOnly, System.Drawing.Imaging.PixelFormat.Format24bppRgb);

            int pointer = 0;
            for (int y = 0; y < this.Height; y++)
            {
                byte* pDest = (byte*)bitmapData.Scan0.ToPointer() + y * bitmapData.Stride + bitmapData.Stride - 3;
                for (int x = 0; x < this.Width; x++, pointer += 2, pDest -= 3)
                {
                    int realDepth = image.Bits[pointer] | (image.Bits[pointer + 1] << 8);
                    byte intensity = (byte)(255 - (255 * realDepth / 0x0fff));
                    pDest[0] = intensity;
                    pDest[1] = intensity;
                    pDest[2] = intensity;
                }
            }
            this.CurrentValue.UnlockBits(bitmapData);
            this.OnNewDataAvailable();
        }
Exemplo n.º 49
0
        /**
         * Appelé lorsque des nouvelles données de profondeur sont disponibles
         */
        void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            //Le tableau de base (dans e.ImageFrame) contient des données encodées selon un format spécial.
            //La fonction GenerateColoredBytes s'occupe de créer un tableau de pixels directement accessibles.
            byte[] ColoredBytes = GenerateColoredBytes(e.ImageFrame);

            //Convertir les données dans un format affichable à l'écran par C#.
            PlanarImage image = e.ImageFrame.Image;
            BitmapSource bmps = BitmapSource.Create(image.Width, image.Height, 96, 96, PixelFormats.Bgra32, null,
                ColoredBytes, image.Width * PixelFormats.Bgra32.BitsPerPixel / 8);

            //Afficher les données récupérées sur le composant image preview.
            //À noter : l'image contient des pixels blancs et des pixels transparents. On ne devrait normalement rien voir...
            //pour pallier à ce défaut, une toile noire est affichée sous le composant image, et visible à travers les pixels transparents.
            preview.Source = bmps;
        }
Exemplo n.º 50
0
 void nui_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     //Manually create BitmapSource for Video
     PlanarImage imageData = e.ImageFrame.Image;
     image1.Source = BitmapSource.Create(imageData.Width, imageData.Height, 96, 96, PixelFormats.Bgr32, null, imageData.Bits, imageData.Width * imageData.BytesPerPixel);
 }
Exemplo n.º 51
0
        void kinectDevice_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage p = e.ImageFrame.Image;

            Color[] color = new Color[p.Height * p.Width];
            kinectRGBVideo = new Texture2D(mDevice, p.Width, p.Height);

            int index = 0;
            for (int y = 0; y < p.Height; y++)
            {
                for (int x = 0; x < p.Width; x++, index += 4)
                {
                    color[y * p.Width + x] = new Color(p.Bits[index + 2], p.Bits[index + 1], p.Bits[index + 0]);
                }
            }
            kinectRGBVideo.SetData<Color>(color);
        }
Exemplo n.º 52
0
 void nui_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     imgKinectCanvas.Source = e.ImageFrame.ToBitmapSource();
 }
Exemplo n.º 53
0
        void KinectruntimeVideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            var imageData = e.ImageFrame.Image;

            //imageData.Width, imageData.Height, 96, 96, PixelFormats.Bgr32, null, imageData.Bits, imageData.Width * imageData.BytesPerPixel
            var bsrc = BitmapSource.Create(imageData.Width, imageData.Height, 96, 96, PixelFormats.Bgr32, null, imageData.Bits,
                                imageData.Width * imageData.BytesPerPixel);

            //Convert Source to bitmap
            System.Drawing.Bitmap bitmap;
            using (MemoryStream outStream = new MemoryStream())
            {
                BitmapEncoder enc = new BmpBitmapEncoder();
                enc.Frames.Add(BitmapFrame.Create(bsrc));
                enc.Save(outStream);
                bitmap = new System.Drawing.Bitmap(outStream);
            }

            //Flip horizontally
            bitmap.RotateFlip(RotateFlipType.RotateNoneFlipX);

            lock (lock_video)
            { _lastVideoFrame = (Bitmap)bitmap.Clone(); }

            //lock (lock_viewarea)
            //{_viewarea = e.ImageFrame.ViewArea;}

            if (OnVideoFrame != null)
                OnVideoFrame(bitmap);
        }
Exemplo n.º 54
0
        void KinectruntimeDepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            //Calculate time between frames
            TimeSpan ts = DateTime.Now - _depthtime;
            _depthtime = DateTime.Now;
            DelayBetweenDepthFrames = (int)Math.Round(ts.TotalMilliseconds);

            PlanarImage img = e.ImageFrame.Image;
            int[] data = ConvertPlanarImageToDepthArray(img);

            lock (lock_depth)
            { _lastDepthFrame = (int[])data.Clone(); }

            if (OnDepthFrame != null)
                OnDepthFrame(data, img);
        }
Exemplo n.º 55
0
 void kinectRuntime_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     //kinectDisplay.Source = streamManager.Update(e);
 }
        void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage Image = e.ImageFrame.Image;
            byte[] convertedDepthFrame = convertDepthFrame(Image.Bits);

            depth.Source = BitmapSource.Create(
                Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, convertedDepthFrame, Image.Width * 4);

            ++totalFrames;

            DateTime cur = DateTime.Now;
            if (cur.Subtract(lastTime) > TimeSpan.FromSeconds(1))
            {
                int frameDiff = totalFrames - lastFrames;
                lastFrames = totalFrames;
                lastTime = cur;
                frameRate.Text = frameDiff.ToString() + " fps";
            }
        }
Exemplo n.º 57
0
 private void NUIVideoFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     PlanarImage image = e.ImageFrame.Image;
     CameraImage.Source = BitmapSource.Create(image.Width, image.Height, 96, 96, PixelFormats.Bgr32, null,
                                              image.Bits, image.Width * image.BytesPerPixel);
 }
Exemplo n.º 58
0
        void VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage p = e.ImageFrame.Image;

            Color[] color = new Color[p.Height * p.Width];
            colorImg = new Texture2D(GraphicsDevice, p.Width, p.Height);

            int index = 0;
            for (int y = 0; y < p.Height; y++)
            {
                for (int x = 0; x < p.Width; x++, index += 4)
                {
                    if (y != 250)
                    {
                        color[y * p.Width + x] = new Color(p.Bits[index + 2], p.Bits[index + 1], p.Bits[index + 0]);
                    }
                    else
                    {
                        color[y * p.Width + x] = new Color(255, 0, 0);
                    }
                }
            }

            colorImg.SetData(color);
        }
 void nui_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     //Automagically create BitmapSource for Video
     image1.Source = e.ImageFrame.ToBitmapSource();
 }
Exemplo n.º 60
0
        void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage Image = e.ImageFrame.Image;

            byte[] convertedDepthFrame = convertDepthFrame(e.ImageFrame);
            depthWidth = Image.Width;
            depthHeight = Image.Height;

            //An interopBitmap is a WPF construct that enables resetting the Bits of the image.
            //This is more efficient than doing a BitmapSource.Create call every frame.
            if (imageHelper == null)
            {
                imageHelper = new InteropBitmapHelper(Image.Width, Image.Height, convertedDepthFrame);
                kinectDepthImage.Source = imageHelper.InteropBitmap;
            }
            else
            {
                imageHelper.UpdateBits(convertedDepthFrame);
            }
        }