Example #1
0
        /// <summary>
        /// This event handler is called by the nui kinect controler.
        /// </summary>
        /// <param name="sender">The obejct sender</param>
        /// <param name="e">An ImageFrameReadyEventArgs with all the information about the raw image data from the Kinect.</param>
        private void VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage rawImage = e.ImageFrame.Image;
            Bitmap      bmp      = PImageToBitmap(rawImage);

            OnImageReady(new ImageEventArgs(bmp));
        }
Example #2
0
        void nui_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            //Manually create BitmapSource for Video
            PlanarImage imageData = e.ImageFrame.Image;

            image1.Source = BitmapSource.Create(imageData.Width, imageData.Height, 96, 96, PixelFormats.Bgr32, null, imageData.Bits, imageData.Width * imageData.BytesPerPixel);
        }
        void DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage PImage = e.ImageFrame.Image;

            //  int x = PImage.Width / 2;
            //  int y = PImage.Height / 2;
            //  int d = getValue(PImage, x, y);
            //  MessageBox.Show(d.ToString());

            int temp = 0;

            int[] count = new int[0x1FFF / 4 + 1];
            for (int i = 0; i < PImage.Bits.Length; i += 2)
            {
                temp = (PImage.Bits[i + 1] << 8 | PImage.Bits[i]) & 0x1FFF;
                count[temp >> 2]++;
                temp             <<= 2;
                PImage.Bits[i]     = (byte)(temp & 0xFF);
                PImage.Bits[i + 1] = (byte)(temp >> 8);
            }
            chart1.Series[0].Points.Clear();
            for (int i = 1; i < (0x1FFF / 4); i++)
            {
                chart1.Series[0].Points.Add(count[i]);
            }
            Application.DoEvents();
            pictureBox1.Image = DepthToBitmap(PImage);
        }
Example #4
0
 void nui_ColorFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     // 32-bit per pixel, RGBA image
     PlanarImage Image = e.ImageFrame.Image;
     // video.Source = BitmapSource.Create(
     // Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);
 }
Example #5
0
        public void Render(IntPtr yBuffer, IntPtr uBuffer, IntPtr vBuffer)
        {
            PlanarImage source = null;

            switch (this.pixelType)
            {
            case PixelAlignmentType.YV12:
            case PixelAlignmentType.I420:
                source = new PlanarImage(this.width, this.height, this.pixelType, new IntPtr[] { yBuffer, uBuffer, vBuffer });
                break;

            case PixelAlignmentType.NV12:
                source = new PlanarImage(this.width, this.height, this.pixelType, new IntPtr[] { yBuffer, uBuffer });
                break;

            // 打包格式
            case PixelAlignmentType.YUY2:
            case PixelAlignmentType.UYVY:
            case PixelAlignmentType.BGRA:
            case PixelAlignmentType.ABGR:
            case PixelAlignmentType.RGB24:
                source = new PlanarImage(this.width, this.height, this.pixelType, yBuffer, this.frameSize);
                break;

            default:
                return;
            }

            PlanarImage image = this.converter.DoTheWork(source);

            this.DisplayImage(image);
        }
Example #6
0
        public void Render(IntPtr buffer)
        {
            PlanarImage source = new PlanarImage(this.width, this.height, this.pixelType, buffer, this.frameSize);
            PlanarImage image  = this.converter.DoTheWork(source);

            this.DisplayImage(image);
        }
        // draw depth frame and calculate FPS
        void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage Image = e.ImageFrame.Image;

            byte[] convertedDepthFrame = convertDepthFrame(Image.Bits);

            depth.Source = BitmapSource.Create(
                Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, convertedDepthFrame, Image.Width * 4);

            ++totalFrames;

            DateTime cur = DateTime.Now;

            if (cur.Subtract(lastTime) > TimeSpan.FromSeconds(1))
            {
                int frameDiff = totalFrames - lastFrames;
                lastFrames     = totalFrames;
                lastTime       = cur;
                frameRate.Text = frameDiff.ToString() + " fps";
            }

            if (cur.Subtract(lastSkeletonTime) > TimeSpan.FromMilliseconds(200))
            {
                skeleton.Children.Clear();
                log.Clear();
            }

            convertedDepthFrame = null;
        }
Example #8
0
        void kinect_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            // 抜かれた瞬間のKINECTは、InstanceIndex が -1 になる
            Runtime kinect = sender as Runtime;

            if ((kinect != null) && (kinect.InstanceIndex >= 0))
            {
                PlanarImage srouce = e.ImageFrame.Image;
                Image       dest   = images[kinect.InstanceIndex];
                var         b      = BitmapSource.Create(srouce.Width, srouce.Height, 96, 96,
                                                         PixelFormats.Bgr32, null, srouce.Bits, srouce.Width * srouce.BytesPerPixel);
                dest.Source = b;

                int offset = 0;
                for (int y = 0; y < 480; y += mabiki)
                {
                    int index = (640 * bpp) * y;
                    for (int x = 0; x < (640 * bpp); x += (mabiki * bpp))
                    {
                        buf[offset++] = srouce.Bits[index + x];
                        buf[offset++] = srouce.Bits[index + x + 1];
                        buf[offset++] = srouce.Bits[index + x + 2];
                        buf[offset++] = srouce.Bits[index + x + 3];
                    }
                }

                server.SendTo(buf, iep);
            }
        }
Example #9
0
        void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            byte[] ColoredBytes = GenerateColoredBytes(e.ImageFrame);

            PlanarImage  image  = e.ImageFrame.Image;
            BitmapSource bmpSrc = BitmapSource.Create(image.Width, image.Height, 96, 96, PixelFormats.Bgr32, null, ColoredBytes,
                                                      image.Width * PixelFormats.Bgr32.BitsPerPixel / 8);

            image1.Source = bmpSrc;
            if (capture && saveCounter % (skipFrame + 1) == 0)
            {
                String fileName = String.Format("{0:d5}.jpeg", fileNumber);
                try {
                    bmpSrc.Save(savePath + "\\" + fileName, ImageFormat.Jpeg);
                    fileNumber++;
                } catch (Exception) {
                    try {
                        System.IO.Directory.CreateDirectory(savePath);
                    }
                    catch (Exception) {
                        MessageBox.Show("Problem with saving image");
                        this.Close();
                    }
                }
            }
            saveCounter++;
        }
    public static WriteableBitmap CreateLivePlayerRenderer(this Runtime runtime, int depthWidth, int depthHeight)
    {
        PlanarImage     depthImage = new PlanarImage();
        WriteableBitmap target     = new WriteableBitmap(depthWidth, depthHeight, 96, 96, PixelFormats.Bgra32, null);
        var             depthRect  = new System.Windows.Int32Rect(0, 0, depthWidth, depthHeight);


        runtime.DepthFrameReady += (s, e) =>
        {
            depthImage = e.ImageFrame.Image;
            Debug.Assert(depthImage.Height == depthHeight && depthImage.Width == depthWidth);
        };

        runtime.VideoFrameReady += (s, e) =>
        {
            // don't do anything if we don't yet have a depth image
            if (depthImage.Bits == null)
            {
                return;
            }

            byte[] color = e.ImageFrame.Image.Bits;

            byte[] output = new byte[depthWidth * depthHeight * 4];

            // loop over each pixel in the depth image
            int outputIndex = 0;
            for (int depthY = 0, depthIndex = 0; depthY < depthHeight; depthY++)
            {
                for (int depthX = 0; depthX < depthWidth; depthX++, depthIndex += 2)
                {
                    // combine the 2 bytes of depth data representing this pixel
                    short depthValue = (short)(depthImage.Bits[depthIndex] | (depthImage.Bits[depthIndex + 1] << 8));

                    // extract the id of a tracked player from the first bit of depth data for this pixel
                    int player = depthImage.Bits[depthIndex] & 7;

                    // find a pixel in the color image which matches this coordinate from the depth image
                    int colorX, colorY;
                    runtime.NuiCamera.GetColorPixelCoordinatesFromDepthPixel(
                        e.ImageFrame.Resolution,
                        e.ImageFrame.ViewArea,
                        depthX, depthY,          // depth coordinate
                        depthValue,              // depth value
                        out colorX, out colorY); // color coordinate

                    // ensure that the calculated color location is within the bounds of the image
                    colorX = Math.Max(0, Math.Min(colorX, e.ImageFrame.Image.Width - 1));
                    colorY = Math.Max(0, Math.Min(colorY, e.ImageFrame.Image.Height - 1));

                    output[outputIndex++] = color[(4 * (colorX + (colorY * e.ImageFrame.Image.Width))) + 0];
                    output[outputIndex++] = color[(4 * (colorX + (colorY * e.ImageFrame.Image.Width))) + 1];
                    output[outputIndex++] = color[(4 * (colorX + (colorY * e.ImageFrame.Image.Width))) + 2];
                    output[outputIndex++] = player > 0 ? (byte)255 : (byte)0;
                }
            }
            target.WritePixels(depthRect, output, depthWidth * PixelFormats.Bgra32.BitsPerPixel / 8, 0);
        };
        return(target);
    }
        void nui_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage image = e.ImageFrame.Image;

            image1.Source = BitmapSource.Create(image.Width, image.Height, 96, 96, PixelFormats.Bgr32,
                                                null, image.Bits, image.Width * image.BytesPerPixel);
        }
Example #12
0
        void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage Image = e.ImageFrame.Image;

            byte[] convertedDepthFrame = convertDepthFrame(Image.Bits);

            depth.Source = BitmapSource.Create(
                Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, convertedDepthFrame, Image.Width * 4);

            ++totalFrames;

            DateTime cur = DateTime.Now;

            if (cur.Subtract(lastTime) > TimeSpan.FromSeconds(1))
            {
                int frameDiff = totalFrames - lastFrames;
                lastFrames = totalFrames;
                lastTime   = cur;
            }

            if (fsm.Current != null)
            {
                fsm.Current.ProcessDepth(e.ImageFrame);
            }
        }
Example #13
0
        //------------
        // 奥行き画像
        //------------
        void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            lock (this)
            {
                PlanarImage Image = e.ImageFrame.Image;
                //  byte[] convertedDepthFrame = convertDepthFrame(Image.Bits);

                int no = 0;

                this.depthColor    = new Color[Image.Height * Image.Width];
                this.texture_depth = new Texture2D(graphics.GraphicsDevice,
                                                   Image.Width, Image.Height);  //テクスチャの作成

                //画像取得
                for (int y = 0; y < Image.Height; ++y)
                {     //y軸
                    for (int x = 0; x < Image.Width; ++x, no += 2)
                    { //x軸
                        int  n         = (y * Image.Width + x) * 2;
                        int  realDepth = (Image.Bits[n + 1] << 5) | (Image.Bits[n] >> 3);
                        byte intensity = (byte)((255 - (255 * realDepth / 0x0fff)) / 2);
                        this.depthColor[y * Image.Width + x] = new Color(intensity, intensity, intensity);

                        // プレイヤー毎に色分けする
                        int playerIndex = Image.Bits[n] & 0x07;
                        if (playerIndex > 0)
                        {
                            Color labelColor = colors[playerIndex % ncolors];
                            this.depthColor[y * Image.Width + x] = new Color(labelColor.B * intensity / 256, labelColor.G * intensity / 256, labelColor.R * intensity / 256);
                        }
                    }
                }
                this.texture_depth.SetData(this.depthColor);    //texture_imageにデータを書き込む
            }
        }
        // Video Camera EventHandler
        void mKinectNUI_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage  frame = e.ImageFrame.Image;
            BitmapSource temp  = BitmapSource.Create(frame.Width, frame.Height, 96, 96, PixelFormats.Bgr32, null, frame.Bits, frame.Width * frame.BytesPerPixel);

            mWindow.KinectCameraPanelInMainWindow.VideoCameraWindow.Source = Resize(temp, frame.Width, frame.Height, mVideoX, mVideoY);
        }
Example #15
0
 public void UpdateKinectImages(PlanarImage KinectImage, BitmapSource KinectSource)
 {
     try
     {
         gGD.UpdateImages(KinectImage, KinectSource);
     }
     catch { }
 }
Example #16
0
        private void NUIVideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            // Sets the kinect video image to the canvas
            PlanarImage image = e.ImageFrame.Image;

            CameraImage.Source = BitmapSource.Create(image.Width, image.Height, 96, 96, PixelFormats.Bgr32, null,
                                                     image.Bits, image.Width * image.BytesPerPixel);
        }
Example #17
0
        public void nui_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            // Dump video stream to the Image element
            PlanarImage Image = e.ImageFrame.Image;

            image.Source = BitmapSource.Create(Image.Width, Image.Height, 96, 96,
                                               PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);
        }
Example #18
0
 void _runtime_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     if (chkDisplayVideo.IsChecked.Value)
     {
         PlanarImage imageData = e.ImageFrame.Image;
         imgVideoFrame.Source = BitmapSource.Create(imageData.Width, imageData.Height, 96, 96, PixelFormats.Bgr32, null, imageData.Bits, imageData.Width * imageData.BytesPerPixel);
     }
 }
Example #19
0
        int getValue(PlanarImage PImage, int x, int y)
        {
            int d = PImage.Bits[x * PImage.BytesPerPixel + y * PImage.Width * PImage.BytesPerPixel + 1];

            d <<= 8;
            d  += PImage.Bits[x * PImage.BytesPerPixel + y * PImage.Width * PImage.BytesPerPixel];
            return(d);
        }
        const int maxKinectCount = 1; //Change to 1 if you only want to view one at a time. Switching will be enabled.
        //Each Kinect needs to be in its own USB hub, otherwise it won't have enough USB bandwidth.
        //Currently only 1 Kinect per process can have SkeletalTracking working, but color and depth work for all.
        //KinectSDK TODO: enable a larger maxKinectCount (assuming your PC can dedicate a USB hub for each Kinect)
        #endregion Private state


        #region color and drawing tools
        void nui_ColorFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            Dispatcher.BeginInvoke((Action) delegate
            {
                // 32-bit per pixel, RGBA image
                colorImage = e.ImageFrame.Image;
            });
        }
        void nui_ColorFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage image  = e.ImageFrame.Image;
            ImageFrame  i      = e.ImageFrame;
            Bitmap      bitmap = i.ToBitmap();
            String      info   = "Frame Number:\t" + i.FrameNumber.ToString() + "\nResoulution\t" + i.Resolution.ToString() + "\nTime Stamp\t" + i.Timestamp.ToString();

            video.refresh(info, bitmap);
        }
        void runtime_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            //pull out the video frame from the eventargs and load it into our image object
            PlanarImage  image  = e.ImageFrame.Image;
            BitmapSource source = BitmapSource.Create(image.Width, image.Height, 96, 96,
                                                      PixelFormats.Bgr32, null, image.Bits, image.Width * image.BytesPerPixel);

            videoImage.Source = source;
        }
Example #23
0
        void runtime_VideoFrameReady(object sender, Microsoft.Research.Kinect.Nui.ImageFrameReadyEventArgs e)
        {
            PlanarImage image = e.ImageFrame.Image;

            BitmapSource source = BitmapSource.Create(image.Width, image.Height, 96, 96,
                                                      PixelFormats.Bgr32, null, image.Bits, image.Width * image.BytesPerPixel);

            videoImage.Source = source;
        }
Example #24
0
        void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            Byte[] ColoredBytes = GenerateColoredBytes(e.ImageFrame, currentVideoFrame);

            PlanarImage image = e.ImageFrame.Image;

            Depth_Image.Source = BitmapSource.Create(image.Width, image.Height, 96, 96, PixelFormats.Bgr32, null,
                                                     ColoredBytes, image.Width * PixelFormats.Bgr32.BitsPerPixel / 8);
        }
Example #25
0
        private void RuntimeVideoFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage image = e.ImageFrame.Image;

            byte[] pixels = image.Bits;

            //將取得的圖片轉為BitmapSource,並用來當作Image控制項的Source
            imgVideoFrame.Source = BitmapSource.Create(image.Width, image.Height, 96, 96, PixelFormats.Bgr32, null, pixels, image.Width * PixelFormats.Bgr32.BitsPerPixel / 8);
        }
Example #26
0
        /// <summary>
        /// The PlanarImage objects returned from Kinect are not exactly the same as Bitmap objects, so in order to work with the
        /// images using standard GDI+, this method converts the planar image into a bitmap.
        /// </summary>
        /// <param name="PImage">The PlanarImage object returned from the Kinect</param>
        /// <returns>The Bitmap object</returns>
        private Bitmap PImageToBitmap(PlanarImage PImage)
        {
            Bitmap     bmap     = new Bitmap(PImage.Width, PImage.Height, PixelFormat.Format32bppRgb);
            BitmapData bmapdata = bmap.LockBits(new Rectangle(0, 0, PImage.Width, PImage.Height), ImageLockMode.WriteOnly, bmap.PixelFormat);
            IntPtr     ptr      = bmapdata.Scan0;

            Marshal.Copy(PImage.Bits, 0, ptr, PImage.Width * PImage.BytesPerPixel * PImage.Height);
            bmap.UnlockBits(bmapdata);
            return(bmap);
        }
Example #27
0
        public BitmapSource Update(ImageFrameReadyEventArgs e)
        {
            PlanarImage Image = e.ImageFrame.Image;

            ColorBitmap = BitmapSource.Create(Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, Image.Bits, Image.Width * Image.BytesPerPixel);

            RaisePropertyChanged(() => ColorBitmap);

            return(ColorBitmap);
        }
Example #28
0
        private static Bitmap PImageToBitmap(PlanarImage pImage)
        {
            var bmap = new Bitmap(pImage.Width, pImage.Height, PixelFormat.Format32bppRgb);
              var bmapdata = bmap.LockBits(new Rectangle(0, 0, pImage.Width, pImage.Height), ImageLockMode.WriteOnly,
                                   bmap.PixelFormat);
              var ptr = bmapdata.Scan0;
              Marshal.Copy(pImage.Bits, 0, ptr, pImage.Width * pImage.BytesPerPixel * pImage.Height);

              bmap.UnlockBits(bmapdata);
              return bmap;
        }
Example #29
0
        public static WriteableBitmap CreateLivePlayerRenderer(this Runtime runtime, int depthWidth, int depthHeight)
        {
            PlanarImage depthImage = new PlanarImage();
            WriteableBitmap target = new WriteableBitmap(depthWidth, depthHeight, 96, 96, PixelFormats.Bgra32, null);
            var depthRect = new System.Windows.Int32Rect(0, 0, depthWidth, depthHeight);

            runtime.DepthFrameReady += (s, e) =>
            {
                depthImage = e.ImageFrame.Image;
                Debug.Assert(depthImage.Height == depthHeight && depthImage.Width == depthWidth);
            };
            runtime.VideoFrameReady += (s, e) =>
            {
                // don't do anything if we don't yet have a depth image
                if (depthImage.Bits == null)
                    return;

                byte[] color = e.ImageFrame.Image.Bits;
                byte[] output = new byte[depthWidth * depthHeight * 4];

                // loop over each pixel in the depth image
                int outputIndex = 0;
                for (int depthY = 0, depthIndex = 0; depthY < depthHeight; depthY++)
                {
                    for (int depthX = 0; depthX < depthWidth; depthX++, depthIndex += 2)
                    {
                        // combine the 2 bytes of depth data representing this pixel
                        short depthValue = (short)(depthImage.Bits[depthIndex] | (depthImage.Bits[depthIndex + 1] << 8));

                        // extract the id of a tracked player from the first bit of depth data for this pixel
                        int player = depthImage.Bits[depthIndex] & 7;

                        // find a pixel in the color image which matches this coordinate from the depth image
                        int colorX, colorY;
                        runtime.NuiCamera.GetColorPixelCoordinatesFromDepthPixel(
                                                    e.ImageFrame.Resolution,
                                                    e.ImageFrame.ViewArea,
                                                    depthX, depthY, // depth coordinate
                                                    depthValue,  // depth value
                                                    out colorX, out colorY);  // color coordinate

                        // ensure that the calculated color location is within the bounds of the image
                        colorX = Math.Max(0, Math.Min(colorX, e.ImageFrame.Image.Width - 1));
                        colorY = Math.Max(0, Math.Min(colorY, e.ImageFrame.Image.Height - 1));
                        output[outputIndex++] = color[(4 * (colorX + (colorY * e.ImageFrame.Image.Width))) + 0];
                        output[outputIndex++] = color[(4 * (colorX + (colorY * e.ImageFrame.Image.Width))) + 1];
                        output[outputIndex++] = color[(4 * (colorX + (colorY * e.ImageFrame.Image.Width))) + 2];
                        output[outputIndex++] = player > 0 ? (byte)255 : (byte)0;
                    }
                }
                target.WritePixels(depthRect, output, depthWidth * PixelFormats.Bgra32.BitsPerPixel / 8, 0);
            };
            return target;
        }
Example #30
0
        private static Bitmap PImageToBitmap(PlanarImage pImage)
        {
            var bmap     = new Bitmap(pImage.Width, pImage.Height, PixelFormat.Format32bppRgb);
            var bmapdata = bmap.LockBits(new Rectangle(0, 0, pImage.Width, pImage.Height), ImageLockMode.WriteOnly,
                                         bmap.PixelFormat);
            var ptr = bmapdata.Scan0;

            Marshal.Copy(pImage.Bits, 0, ptr, pImage.Width * pImage.BytesPerPixel * pImage.Height);

            bmap.UnlockBits(bmapdata);
            return(bmap);
        }
Example #31
0
        void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            //Convert depth information for a pixel into color information
            byte[] ColoredBytes = GenerateColoredBytes(e.ImageFrame);

            //create an image based on returned colors

            PlanarImage image = e.ImageFrame.Image;

            image1.Source = BitmapSource.Create(image.Width, image.Height, 96, 96, PixelFormats.Bgr32, null,
                                                ColoredBytes, image.Width * PixelFormats.Bgr32.BitsPerPixel / 8);
        }
Example #32
0
        private void DisplayImage(PlanarImage image)
        {
            if (!this.imageSource.Dispatcher.CheckAccess())
            {
                this.imageSource.Dispatcher.Invoke((Action)(() => this.DisplayImage(image)));
                return;
            }

            this.imageSource.Lock();
            Interop.Memcpy(this.imageSource.BackBuffer, image.Planes[0], image.PlaneSizes[0]);
            this.imageSource.AddDirtyRect(this.imageSourceRect);
            this.imageSource.Unlock();
        }
Example #33
0
        BitmapSource DepthToBitmapSource(PlanarImage PImage)
        {
            BitmapSource bmap = BitmapSource.Create(
                PImage.Width,
                PImage.Height,
                96, 96,
                PixelFormats.Gray16,
                null,
                PImage.Bits,
                PImage.Width * PImage.BytesPerPixel);

            return(bmap);
        }
        // 距離データをグレースケールに変換する
        private static PlanarImage ConvertGrayScale( PlanarImage source )
        {
            // thanks : http://www.atmarkit.co.jp/fdotnet/kinectsdkbeta/kinectsdkbeta02/kinectsdkbeta02_02.html
            for ( int i = 0; i < source.Bits.Length; i += 2 ) {
                // Depthのみ(ユーザーデータなし)のデータ取得
                ushort depth = (ushort)(source.Bits[i] | (source.Bits[i+1] << 8));

                // 0x0fff(Depthの最大値)を0-0xffffの間に変換する
                depth = (ushort)(0xffff - (0xffff * depth / 0x0fff));

                // 戻す
                source.Bits[i] = (byte)(depth & 0xff);
                source.Bits[i+1] = (byte)((depth >> 8) & 0xff);
            }

            return source;
        }
Example #35
0
        public void UpdateImages(PlanarImage KinectImage, BitmapSource VideoSource)
        {
            gVideoImage = KinectImage;

            gVideoSource = VideoSource;

        }
Example #36
0
 void gNUI_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     
     gVideoImage = e.ImageFrame.Image;
     //gVideoSource = BitmapSource.Create(gVideoImage.Width, gVideoImage.Height,
     //        96, 96, PixelFormats.Bgr32, null, gVideoImage.Bits,
     //        gVideoImage.Width * gVideoImage.BytesPerPixel);
     gVideoSource = e.ImageFrame.ToBitmapSource();
     
 }
Example #37
0
        private int[] ConvertPlanarImageToDepthArray(PlanarImage img)
        {
            byte[] DepthData = img.Bits;
            int[] ResultData = new int[img.Width * img.Height];

            var Width = img.Width;
            var Height = img.Height;

            var imgIndex = 0;
            var depthIndex = 0;

            for (var y = 0; y < Height; y++)
            {
                for (var x = 0; x < Width; x++)
                {
                    var distance = GetDistance(DepthData[depthIndex], DepthData[depthIndex + 1]);
                    ResultData[imgIndex] = distance;
                    depthIndex += 2;
                    imgIndex++;
                }
            }

            return ResultData;
        }
Example #38
0
 //void nui_VideoFrameReady(object sender, ImageFrameReadyEventArgs e)
 //{
 //    PlanarImage image = e.ImageFrame.Image;
 //    image1.Source = e.ImageFrame.ToBitmapSource();
 //}
 void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     planarImage = e.ImageFrame.Image;
     image1.Source = e.ImageFrame.ToBitmapSource();
 }
        int findCalibrator(Runtime kinect, PlanarImage depth, PlanarImage color, out int x1, out int y1, out int z1, out int x2, out int y2, out int z2)
        {
            x1 = -1;
            y1 = -1;
            z1 = -1;
            x2 = -1;
            y2 = -1;
            z2 = -1;

            //check from 1500mm to 2500mm for the calibrator
            const int minDepth = 1500;
            const int maxDepth = 2500;

            const int BLUEMIN = 50;
            const int REDMAX = 50;
            const int GREENMAX = 50;
            int[] start = { -1, -1, -1 };
            int[] end = { -1, -1, -1 };

            int height = depth.Height;
            int width = depth.Width;
            int i = 0;

            bool foundCalibrator = false;
            bool endloop = false;

            //traverse left to right and up to down to find the top centre of the calibrator
            for (int y = 0; y < height && !endloop; y++)
            {
                int heightOffset = y*width;

                for (int x = 0; x < width && !endloop; x++)
                {
                    int z = getDistance(depth.Bits[i], depth.Bits[i+1]);
                    if ( z < maxDepth && z > minDepth){
                        //check for the colour
                        int red, green, blue;
                        short distance = (short)(z << 3);

                        getColorFromDepth(kinect, depth, color, x, y, z, out red, out green, out blue);

                        if (!foundCalibrator) //check to see if we found the blue cylinder
                        {

                            if (blue > BLUEMIN)
                            {
                                System.Windows.MessageBox.Show("1-1 Kinect " + kinect.InstanceIndex + " - R:" + red + " G:" + green + " B:" + blue + " at x:" + x + " y:" + y + " z:" + z);

                                foundCalibrator = true;
                                start[0] = x;
                                start[1] = y;
                                start[2] = z;

                            }
                        }
                        else //check to see if we passed the blue cylinder
                        {
                            if (blue < BLUEMIN)
                            {
                                System.Windows.MessageBox.Show("1-2 Kinect " + kinect.InstanceIndex + " - R:" + red + " G:" + green + " B:" + blue + " at x:" + x + " y:" + y + " z:" + z);

                                end[0] = x;
                                end[1] = y;
                                end[2] = z;

                                getMidpoint(start, end, out x1, out y1, out z1);
                                endloop = true;
                            }
                        }

                    }

                    i +=2;
                }
            }

            //start from the end of the image and work backwards
            foundCalibrator = false;
            i = depth.Bits.Length- 2;

            for (int y = width-1; y >= 0 && i>0; y--)
            {
                int heightOffset = y * width;

                for (int x = width -1; x >= 0; x--)
                {
                    int z = getDistance(depth.Bits[i], depth.Bits[i + 1]);
                    if (z < maxDepth && z > minDepth)
                    {
                        //System.Windows.MessageBox.Show("something found!");

                        //check for the colour
                        int red, green, blue;
                        short distance = (short)(z << 3);

                        getColorFromDepth(kinect, depth, color, x, y, z, out red, out green, out blue);

                        if (!foundCalibrator) //check to see if we found the blue cylinder
                        {
                            //System.Windows.MessageBox.Show("R:" + red + " G:" + green + " B:" + blue);

                            if (blue > BLUEMIN && red < REDMAX & green < GREENMAX)
                            {
                                System.Windows.MessageBox.Show("2-1 Kinect " + kinect.InstanceIndex + " - R:" + red + " G:" + green + " B:" + blue + " at x:" + x + " y:" + y + " z:" + z);

                                foundCalibrator = true;
                                start[0] = x;
                                start[1] = y;
                                start[2] = z;

                            }
                        }
                        else //check to see if we passed the blue cylinder
                        {
                            if (blue < BLUEMIN && red > REDMAX & green > GREENMAX)
                            {
                                System.Windows.MessageBox.Show("2-2 Kinect " + kinect.InstanceIndex + " - R:" + red + " G:" + green + " B:" + blue + " at x:" + x + " y:" + y + " z:" + z);

                                end[0] = x;
                                end[1] = y;
                                end[2] = z;

                                getMidpoint(start, end, out x2, out y2, out z2);
                                return 0;
                            }
                        }

                    }

                    i -= 2;
                }
            }

            return -1;
        }
Example #40
0
 // The FrameReady event handler for displaying the image.
 void ImageFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     videoimage = e.ImageFrame.Image;
 }
Example #41
0
 // Convert a PlanarImage to a bitmap
 private Bitmap PImageToBitmap(PlanarImage PImage)
 {
     Bitmap bmap = new Bitmap(
         PImage.Width,
         PImage.Height,
         PixelFormat.Format32bppRgb);
     BitmapData bmapdata = bmap.LockBits(
         new Rectangle(0, 0, PImage.Width,
                         PImage.Height),
         ImageLockMode.WriteOnly,
         bmap.PixelFormat);
     IntPtr ptr = bmapdata.Scan0;
     Marshal.Copy(PImage.Bits,
         0,
         ptr,
         PImage.Width *
         PImage.BytesPerPixel *
         PImage.Height);
     bmap.UnlockBits(bmapdata);
     return bmap;
 }
 private void getColor(PlanarImage color, int x, int y, out int red, out int green, out int blue)
 {
     int i = x + y*color.Height;
     red = color.Bits[i + RED_IDX];
     green = color.Bits[i + GREEN_IDX];
     blue = color.Bits[i + BLUE_IDX];
 }
Example #43
0
 static void RuntimeVideoFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     latestVideoImage = e.ImageFrame.Image;
 }
Example #44
0
 static void RuntimeDepthFrameReady(object sender, ImageFrameReadyEventArgs e)
 {
     latestDepthImage = e.ImageFrame.Image;
 }
        private void getColorFromDepth(Runtime kinect, PlanarImage depth, PlanarImage color, int x, int y, int z, out int red,  out int green, out int blue)
        {
            short distance = (short)(z << 3);
            int colorX, colorY;

            kinect.NuiCamera.GetColorPixelCoordinatesFromDepthPixel(ImageResolution.Resolution640x480, ViewArea, x, y, distance, out colorX, out colorY);
            getColor(color, colorX, colorY, out red, out green, out blue);
        }
        void nui_DepthFrameReady(object sender, ImageFrameReadyEventArgs e)
        {
            PlanarImage Image = e.ImageFrame.Image;
            ViewArea = e.ImageFrame.ViewArea;
            depthImage1 = Image;
            depth1Ready = true;
            byte[] convertedDepthFrame = convertDepthFrame(Image.Bits);

            depth.Source = BitmapSource.Create(
                Image.Width, Image.Height, 96, 96, PixelFormats.Bgr32, null, convertedDepthFrame, Image.Width * 4);

            ++totalFrames;

            DateTime cur = DateTime.Now;
            if (cur.Subtract(lastTime) > TimeSpan.FromSeconds(1))
            {
                int frameDiff = totalFrames - lastFrames;
                lastFrames = totalFrames;
                lastTime = cur;
                frameRate.Text = frameDiff.ToString() + " fps";
            }
        }
Example #47
0
 int getValue(PlanarImage PImage, int x, int y)
 {
     int d = PImage.Bits[x * PImage.BytesPerPixel + y * PImage.Width * PImage.BytesPerPixel + 1];
     d <<= 8;
     d += PImage.Bits[x * PImage.BytesPerPixel + y * PImage.Width * PImage.BytesPerPixel];
     return d;
 }
Example #48
0
 Bitmap DepthToBitmap(PlanarImage PImage)
 {
     Bitmap bmap = new Bitmap(PImage.Width, PImage.Height, System.Drawing.Imaging.PixelFormat.Format16bppRgb555);
     BitmapData bmapdata = bmap.LockBits(new Rectangle(0, 0, PImage.Width, PImage.Height), ImageLockMode.WriteOnly, bmap.PixelFormat);
     IntPtr ptr = bmapdata.Scan0;
     Marshal.Copy(PImage.Bits, 0, ptr, PImage.Width * PImage.BytesPerPixel * PImage.Height);
     bmap.UnlockBits(bmapdata);
     return bmap;
 }
        public ArrayList detectFaces(PlanarImage image)
        {
            BitmapSource bmapSrc = BitmapSource.Create(image.Width, image.Height, 96, 96, PixelFormats.Bgr32, null, image.Bits, image.Width * image.BytesPerPixel);
            Bitmap bmap = BitmapFromSource(bmapSrc);

            //BitmapSource bmapSrcDepth = mainWindow.curDepthImage;
            //Bitmap bmapDepth = BitmapFromSource(bmapSrcDepth);

            Image<Bgr, Byte> currentFrame = new Image<Bgr, Byte>(bmap);

            //Image<Bgr, Byte> currentDepthFrame = new Image<Bgr, Byte>(bmapDepth);

            Depth planarImage = mainWindow.curDepthPlanarImage;
            ArrayList list = new ArrayList();

            if (currentFrame != null)
            {
                // there's only one channel (greyscale), hence the zero index
                //var faces = nextFrame.DetectHaarCascade(haar)[0];

                Image<Gray, byte> grayframe = currentFrame.Convert<Gray, byte>();

                var faces = currentFrame.DetectHaarCascade(
                                haarFaces,
                                1.4,
                                4,
                                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                                new System.Drawing.Size(currentFrame.Width / 8, currentFrame.Height / 8)
                                )[0];

                //var faces =
                //currentFrame.DetectHaarCascade(
                //haar,
                //1.4,
                //4,
                //Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                //new System.Drawing.Size(currentFrame.Width / 8, currentFrame.Height / 8)
                //)[0];

                PlanarImage depthFrame = mainWindow.curDepthPlanarImage;

                byte[] depthFrame16 = depthFrame.Bits;

                foreach (var face in faces)
                {
                    try
                    {
                        //mainWindow.updateAppStatus("FaceDetection:  colorFrame=" + currentFrame.Width + "," + currentFrame.Height + " planarImage=" + planarImage.Width + "," + planarImage.Height);

                        int centroidXcolor = face.rect.X + (face.rect.Width / 2);
                        int centroidYcolor = face.rect.Y + (face.rect.Height / 2);

                        int centroidXdepth = (centroidXcolor * 320) / 640;
                        int centroidYdepth = (centroidYcolor * 240) / 480;

                        byte[] depth = planarImage.Bits;
                        int width = planarImage.Width;
                        int height = planarImage.Height;
                        byte[] color = new byte[width * height * 4];

                        int index = (centroidYdepth * width + centroidXdepth) * 2;
                        int player = depth[index] & 0x07;
                        int depthValue = (depth[index + 1] << 5) | (depth[index] >> 3);

                        //mainWindow.updateAppStatus("FaceDetection:  index=" + index + ", player=" + player + ", depthValue=" + depthValue);
                        //mainWindow.updateAppStatus("FaceDetection:  centroidXcolor=" + centroidXcolor + ", centroidYcolor=" + centroidYcolor + ", centroidXdepth=" + centroidXdepth + ", centroidYdepth=" + centroidYdepth);

                        FaceFrame faceFrame = new FaceFrame(face.rect, depthValue, player);
                        list.Add(faceFrame);
                    }
                    catch (Exception e) {
                        mainWindow.updateAppStatus("FaceDetection:  caught exception");
                    }

                }

            }
            return list;
        }