private async void Window_Loaded(object sender, RoutedEventArgs e)
        {
            while (true)
            {
                // Get Capture Frame
                using (K4A.Capture capture = await Task.Run(() => { return(this.device.GetCapture()); }))
                // Get Capture Image and Transformed Image
                #if TO_COLOR
                    using (K4A.Image color_image = capture.Color)
                        using (K4A.Image depth_image = transformation.DepthImageToColorCamera(capture))
                #else
                    using (K4A.Image color_image = transformation.ColorImageToDepthCamera(capture))
                        using (K4A.Image depth_image = capture.Depth)
                #endif
                        {
                            // Get Color Buffer and Write Bitmap
                            byte[] color_buffer = color_image.Memory.ToArray();
                            color_bitmap.WritePixels(color_rect, color_buffer, color_stride, 0, 0);

                            // Get Depth Buffer, and Write Bitmap
                            ushort[] depth_buffer = depth_image.GetPixels <ushort>().ToArray();
                            depth_bitmap.WritePixels(depth_rect, Array.ConvertAll(depth_buffer, i => (byte)(i * (255.0 / 5000.0))), depth_stride, 0, 0);
                        }
            }
        }
Exemple #2
0
 private async void Window_Loaded(object sender, RoutedEventArgs e)
 {
     while (true)
     {
         // Get Capture Frame
         using (K4A.Capture capture = await Task.Run(() => { return(this.device.GetCapture()); }))
             // Get Capture Image
             using (K4A.Image depth_image = capture.Depth)
             {
                 // Get Depth Buffer, and Write Bitmap
                 ushort[] depth_buffer = depth_image.GetPixels <ushort>().ToArray();
                 depth_bitmap.WritePixels(depth_rect, Array.ConvertAll(depth_buffer, i => (byte)(i * (255.0 / 5000.0))), depth_stride, 0, 0);
             }
     }
 }
Exemple #3
0
 private async void Window_Loaded(object sender, RoutedEventArgs e)
 {
     while (true)
     {
         // Get Capture Frame
         using (K4A.Capture capture = await Task.Run(() => { return(this.device.GetCapture()); }))
             // Get Capture Image
             using (K4A.Image infrared_image = capture.IR)
             {
                 // Get Infrared Buffer, and Write Bitmap
                 ushort[] infrared_buffer = infrared_image.GetPixels <ushort>().ToArray();
                 infrared_bitmap.WritePixels(infrared_rect, Array.ConvertAll(infrared_buffer, i => (byte)(i * 0.5)), infrared_stride, 0, 0);
             }
     }
 }
Exemple #4
0
        /// <summary>
        /// This will capture a Depth enabled Color Image and save it as <see cref="Bitmap"/>
        /// Picture will be colorized with Red where there is no depth data, and Green
        /// where there is depth data at more than 1.5 meters
        /// </summary>
        /// <param name="kinectSensor">The initialized Kinect Sensor object</param>
        /// <returns>Returns the Picture from the Color Camera as <see cref="Bitmap"/></returns>
        public static async Task <Bitmap> CreateDepthColorBitmapAsync(this Device kinectSensor)
        {
            try
            {
                //Declare calibration settings
                int colorWidth;
                int colorHeight;

                //Check for initialized Camera
                try
                {
                    colorWidth  = kinectSensor.GetCalibration().ColorCameraCalibration.ResolutionWidth;
                    colorHeight = kinectSensor.GetCalibration().ColorCameraCalibration.ResolutionHeight;
                }
                catch (Exception e)
                {
                    //Camera not initialized
                    throw new CameraNotInitializedException("The Camera was not initialized correctly.", e);
                }

                // Configure transformation module in order to transform depth enabled picture as bitmap
                using (Microsoft.Azure.Kinect.Sensor.Image transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image(ImageFormat.Depth16, colorWidth, colorHeight))
                    using (Microsoft.Azure.Kinect.Sensor.Image outputColorImage = new Microsoft.Azure.Kinect.Sensor.Image(ImageFormat.ColorBGRA32, colorWidth, colorHeight))
                        using (Transformation transform = kinectSensor.GetCalibration().CreateTransformation())
                        {
                            // Wait for a capture on a thread pool thread
                            using (Capture capture = await Task.Run(() => { return(kinectSensor.GetCapture()); }).ConfigureAwait(true))
                            {
                                // Compute the colorized output bitmap on a thread pool thread
                                Task <System.Drawing.Bitmap> createDepthColorBitmapTask = Task.Run(() =>
                                {
                                    // Transform the depth image to the perspective of the color camera
                                    transform.DepthImageToColorCamera(capture, transformedDepth);

                                    // Get Span<T> references to the pixel buffers for fast pixel access.
                                    Span <ushort> depthBuffer     = transformedDepth.GetPixels <ushort>().Span;
                                    Span <BGRA> colorBuffer       = capture.Color.GetPixels <BGRA>().Span;
                                    Span <BGRA> outputColorBuffer = outputColorImage.GetPixels <BGRA>().Span;

                                    // Create an output color image with data from the depth image
                                    for (int i = 0; i < colorBuffer.Length; i++)
                                    {
                                        // The output image will be the same as the input color image,
                                        // but colorized with Red where there is no depth data, and Green
                                        // where there is depth data at more than 1.5 meters
                                        outputColorBuffer[i] = colorBuffer[i];
                                        if (depthBuffer[i] == 0)
                                        {
                                            outputColorBuffer[i].R = 255;
                                        }
                                        else if (depthBuffer[i] > 1500)
                                        {
                                            outputColorBuffer[i].G = 255;
                                        }
                                    }

                                    return(outputColorImage.CreateBitmap());
                                });

                                // Wait for  bitmap and return
                                var depthColorBitmap = await createDepthColorBitmapTask.ConfigureAwait(true);

                                return(depthColorBitmap);
                            }
                        }
            }
            catch (Exception e)
            {
                throw e;
            }
        }
Exemple #5
0
        static async Task Main()
        {
            using (var visualizerData = new VisualizerData())
            {
                var renderer = new PosSaver(visualizerData);

                renderer.StartVisualizationThread();

                // Open device.
                using (Device device = Device.Open())
                {
                    device.StartCameras(new DeviceConfiguration()
                    {
                        ColorFormat            = ImageFormat.ColorBGRA32,
                        ColorResolution        = ColorResolution.R720p,
                        DepthMode              = DepthMode.NFOV_Unbinned,
                        SynchronizedImagesOnly = true,
                        WiredSyncMode          = WiredSyncMode.Standalone,
                        CameraFPS              = FPS.FPS15
                    });

                    var deviceCalibration = device.GetCalibration();
                    var transformation    = deviceCalibration.CreateTransformation();
                    PointCloud.ComputePointCloudCache(deviceCalibration);

                    using (Tracker tracker = Tracker.Create(deviceCalibration, new TrackerConfiguration()
                    {
                        ProcessingMode = TrackerProcessingMode.Gpu, SensorOrientation = SensorOrientation.Default
                    }))
                    {
                        while (renderer.IsActive)
                        {
                            using (Capture sensorCapture = await Task.Run(() => device.GetCapture()).ConfigureAwait(true))
                            {
                                // Queue latest frame from the sensor.
                                tracker.EnqueueCapture(sensorCapture);
                                if (renderer.IsHuman)
                                {
                                    unsafe
                                    {
                                        //Depth画像の横幅(width)と縦幅(height)を取得
                                        int depth_width  = device.GetCalibration().DepthCameraCalibration.ResolutionWidth;
                                        int depth_height = device.GetCalibration().DepthCameraCalibration.ResolutionHeight;
                                        // Bitmap depthBitmap = new Bitmap(depth_width, depth_height, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
                                        Bitmap colorBitmap = new Bitmap(depth_width, depth_height, System.Drawing.Imaging.PixelFormat.Format32bppArgb);

                                        // Image depthImage = sensorCapture.Depth;
                                        Image colorImage = transformation.ColorImageToDepthCamera(sensorCapture);
                                        // ushort[] depthArray = depthImage.GetPixels<ushort>().ToArray();
                                        BGRA[] colorArray = colorImage.GetPixels <BGRA>().ToArray();
                                        // BitmapData bitmapData = depthBitmap.LockBits(new Rectangle(0, 0, depthBitmap.Width, depthBitmap.Height), System.Drawing.Imaging.ImageLockMode.WriteOnly, PixelFormat.Format32bppArgb);
                                        BitmapData bitmapData = colorBitmap.LockBits(new Rectangle(0, 0, colorBitmap.Width, colorBitmap.Height), System.Drawing.Imaging.ImageLockMode.WriteOnly, PixelFormat.Format32bppArgb);
                                        //各ピクセルの値へのポインタ
                                        byte *pixels = (byte *)bitmapData.Scan0;
                                        int   index  = 0;
                                        //一ピクセルずつ処理
                                        for (int i = 0; i < colorArray.Length; i++)
                                        {
                                            pixels[index++] = colorArray[i].B;
                                            pixels[index++] = colorArray[i].G;
                                            pixels[index++] = colorArray[i].R;
                                            pixels[index++] = 255;//Alpha値を固定して不透過に
                                        }
                                        //書き込み終了
                                        colorBitmap.UnlockBits(bitmapData);
                                        string string_now = renderer.now.ToString("HHmmssfff");
                                        colorBitmap.Save($@"{PosSaver.path}\{renderer.day}\{renderer.scene}\depth\{string_now}.png", System.Drawing.Imaging.ImageFormat.Png);
                                    }
                                }
                            }

                            // Try getting latest tracker frame.
                            using (Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false))
                            {
                                if (frame != null)
                                {
                                    // Save this frame for visualization in Renderer.

                                    // One can access frame data here and extract e.g. tracked bodies from it for the needed purpose.
                                    // Instead, for simplicity, we transfer the frame object to the rendering background thread.
                                    // This example shows that frame popped from tracker should be disposed. Since here it is used
                                    // in a different thread, we use Reference method to prolong the lifetime of the frame object.
                                    // For reference on how to read frame data, please take a look at Renderer.NativeWindow_Render().
                                    visualizerData.Frame = frame.Reference();
                                }
                            }
                        }
                    }
                }
            }
        }
Exemple #6
0
        //Show initial image
        private async Task <BitmapSource> ShowInitImage(Capture capture, Transformation transform)
        {
            BitmapSource initImageBitmap = null;
            // Create a BitmapSource for the unmodified color image.
            // Creating the BitmapSource is slow, so do it asynchronously on another thread
            Task <BitmapSource> createColorBitmapTask = new Task <BitmapSource>(() =>
            {
                BitmapSource source = capture.Color.CreateBitmapSource();

                // Allow the bitmap to move threads
                source.Freeze();
                return(source);
            });

            // Compute the colorized output bitmap on a thread pool thread
            Task <BitmapSource> createDepthBitmapTask = new Task <BitmapSource>(() =>
            {
                int colorWidth  = this.KinectDevice.GetCalibration().ColorCameraCalibration.ResolutionWidth;
                int colorHeight = this.KinectDevice.GetCalibration().ColorCameraCalibration.ResolutionHeight;
                // Allocate image buffers for us to manipulate
                var transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image(ImageFormat.Depth16, colorWidth, colorHeight);
                var outputColorImage = new Microsoft.Azure.Kinect.Sensor.Image(ImageFormat.ColorBGRA32, colorWidth, colorHeight);
                // Transform the depth image to the perspective of the color camera
                transform.DepthImageToColorCamera(capture, transformedDepth);

                // Get Span<T> references to the pixel buffers for fast pixel access.
                Span <ushort> depthBuffer     = transformedDepth.GetPixels <ushort>().Span;
                Span <BGRA> colorBuffer       = capture.Color.GetPixels <BGRA>().Span;
                Span <BGRA> outputColorBuffer = outputColorImage.GetPixels <BGRA>().Span;

                // Create an output color image with data from the depth image
                for (int i = 0; i < colorBuffer.Length; i++)
                {
                    // The output image will be the same as the input color image,
                    // but colorized with Red where there is no depth data, and Green
                    // where there is depth data at more than 1.5 meters
                    outputColorBuffer[i] = colorBuffer[i];

                    if (depthBuffer[i] == 0)
                    {
                        outputColorBuffer[i].R = 255;
                    }
                    else if (depthBuffer[i] > 1500)
                    {
                        outputColorBuffer[i].G = 255;
                    }
                }

                BitmapSource source = outputColorImage.CreateBitmapSource();

                // Allow the bitmap to move threads
                source.Freeze();

                return(source);
            });

            if (this.sysState.DeviceDepthMode)
            {
                createDepthBitmapTask.Start();
                initImageBitmap = await createDepthBitmapTask.ConfigureAwait(false);
            }
            else
            {
                createColorBitmapTask.Start();
                initImageBitmap = await createColorBitmapTask.ConfigureAwait(false);
            }

            return(initImageBitmap);
        }
        private async void CameraImage()
        {
            try {
                device = Device.Open();

                DeviceConfiguration config = new DeviceConfiguration();

                config.ColorFormat            = ImageFormat.ColorBGRA32;
                config.ColorResolution        = ColorResolution.R1080p;
                config.DepthMode              = DepthMode.WFOV_2x2Binned;
                config.SynchronizedImagesOnly = true;
                config.CameraFPS              = FPS.FPS30;

                device.StartCameras(config);

                int colorWidth  = device.GetCalibration().ColorCameraCalibration.ResolutionWidth;
                int colorHeight = device.GetCalibration().ColorCameraCalibration.ResolutionHeight;

                using (Microsoft.Azure.Kinect.Sensor.Image transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image(ImageFormat.Depth16, colorWidth, colorHeight))
                    using (Microsoft.Azure.Kinect.Sensor.Image outputColorImage = new Microsoft.Azure.Kinect.Sensor.Image(ImageFormat.ColorBGRA32, colorWidth, colorHeight))
                        using (Transformation transform = device.GetCalibration().CreateTransformation())

                            while (running)
                            {
                                using (Capture capture = await Task.Run(() => { return(this.device.GetCapture()); }).ConfigureAwait(true))
                                {
                                    Task <BitmapSource> createInputColorBitmapTask = Task.Run(() =>
                                    {
                                        Microsoft.Azure.Kinect.Sensor.Image color = capture.Color;
                                        BitmapSource source = BitmapSource.Create(color.WidthPixels, color.HeightPixels, 96, 96, PixelFormats.Bgra32, null, color.Memory.ToArray(), color.StrideBytes);

                                        source.Freeze();
                                        return(source);
                                    });
                                    Task <BitmapSource> createOutputColorBitmapTask = Task.Run(() =>
                                    {
                                        transform.DepthImageToColorCamera(capture, transformedDepth);

                                        Span <ushort> depthBuffer     = transformedDepth.GetPixels <ushort>().Span;
                                        Span <BGRA> colorBuffer       = capture.Color.GetPixels <BGRA>().Span;
                                        Span <BGRA> outputColorBuffer = outputColorImage.GetPixels <BGRA>().Span;

                                        for (int i = 0; i < colorBuffer.Length; i++)
                                        {
                                            outputColorBuffer[i] = colorBuffer[i];

                                            if (depthBuffer[i] == 0)
                                            {
                                                outputColorBuffer[i].R = 200;
                                                outputColorBuffer[i].G = 200;
                                                outputColorBuffer[i].B = 200;
                                            }
                                            else if (depthBuffer[i] > 10 && depthBuffer[i] <= 500)
                                            {
                                                outputColorBuffer[i].R = 100;
                                                outputColorBuffer[i].G = 100;
                                                outputColorBuffer[i].B = 100;
                                            }
                                            else if (depthBuffer[i] > 500)
                                            {
                                                outputColorBuffer[i].R = 10;
                                                outputColorBuffer[i].G = 10;
                                                outputColorBuffer[i].B = 10;
                                            }
                                        }

                                        BitmapSource source = BitmapSource.Create(outputColorImage.WidthPixels, outputColorImage.HeightPixels, 96, 96, PixelFormats.Bgra32, null, outputColorImage.Memory.ToArray(), outputColorImage.StrideBytes);

                                        source.Freeze();
                                        return(source);
                                    });

                                    BitmapSource inputColorBitmap = await createInputColorBitmapTask.ConfigureAwait(true);

                                    BitmapSource outputColorBitmap = await createOutputColorBitmapTask.ConfigureAwait(true);

                                    if (!depthmapActivated)
                                    {
                                        this.displayImg.Source = inputColorBitmap;
                                    }
                                    else
                                    {
                                        this.displayImg.Source = outputColorBitmap;
                                    }
                                }
                            }

                device.Dispose();
            } catch (Exception ex)
            {
                Console.WriteLine(ex);
            }
        }