private async void Window_Loaded(object sender, RoutedEventArgs e)
        {
            while (true)
            {
                // Get Capture Frame
                using (K4A.Capture capture = await Task.Run(() => { return(this.device.GetCapture()); }))
                {
                    // Enque Capture
                    tracker.EnqueueCapture(capture);

                    // Pop Result
                    using (K4ABT.Frame frame = tracker.PopResult())
                        // Get Color Image
                        using (K4A.Image color_image = frame.Capture.Color)
                        {
                            // Get Color Buffer and Write Bitmap
                            byte[] color_buffer = color_image.Memory.ToArray();
                            color_bitmap.WritePixels(color_rect, color_buffer, color_stride, 0, 0);

                            // Clear All Ellipse from Canvas
                            Canvas_Body.Children.Clear();

                            // Draw Skeleton
                            for (uint body_index = 0; body_index < frame.NumberOfBodies; body_index++)
                            {
                                // Get Skeleton and ID
                                K4ABT.Skeleton skeleton = frame.GetBodySkeleton(body_index);
                                uint           id       = frame.GetBodyId(body_index);

                                // Draw Joints
                                for (int joint_index = 0; joint_index < (int)K4ABT.JointId.Count; joint_index++)
                                {
                                    // Get Joint and Position
                                    K4ABT.Joint joint    = skeleton.GetJoint(joint_index);
                                    Vector2?    position = calibration.TransformTo2D(joint.Position, K4A.CalibrationDeviceType.Depth, K4A.CalibrationDeviceType.Color);

                                    if (!position.HasValue)
                                    {
                                        continue;
                                    }

                                    // Create Ellipse
                                    const int       radius       = 10;
                                    SolidColorBrush stroke_color = new SolidColorBrush(colors[id % colors.Length]);
                                    SolidColorBrush fill_color   = new SolidColorBrush((joint.ConfidenceLevel >= K4ABT.JointConfidenceLevel.Medium) ? colors[id % colors.Length] : Colors.Transparent);
                                    Ellipse         ellipse      = new Ellipse()
                                    {
                                        Width = radius, Height = radius, StrokeThickness = 1, Stroke = stroke_color, Fill = fill_color
                                    };

                                    // Add Ellipse to Canvas
                                    Canvas.SetLeft(ellipse, position.Value.X - (radius / 2));
                                    Canvas.SetTop(ellipse, position.Value.Y - (radius / 2));
                                    Canvas_Body.Children.Add(ellipse);
                                }
                            }
                        }
                }
            }
        }
Пример #2
0
        private WriteableBitmap CreateNewBitmap(
            Microsoft.Azure.Kinect.Sensor.Image image,
            int xLeft, int yLeft,
            int xRight, int yRight)
        {
            WriteableBitmap wb = new WriteableBitmap(
                image.WidthPixels + 30,
                image.HeightPixels + 30,
                96,
                96,
                PixelFormats.Bgra32, null);

            var region = new Int32Rect(0, 0, image.WidthPixels, image.HeightPixels);

            unsafe
            {
                using (var pin = image.Memory.Pin())
                {
                    wb.WritePixels(region, (IntPtr)pin.Pointer, (int)image.Size, image.StrideBytes);
                }
            }
            using (wb.GetBitmapContext())
            {
                wb.FillEllipse(xLeft, yLeft, xLeft + 40, yLeft + 40, Colors.Red);
                wb.FillEllipse(xRight, yRight, xRight + 40, yRight + 40, Colors.Blue);
            }

            return(wb);
        }
Пример #3
0
        private async void Window_Loaded(object sender, RoutedEventArgs e)
        {
            while (true)
            {
                // Get Capture Frame
                using (K4A.Capture capture = await Task.Run(() => { return(this.device.GetCapture()); }))
                // Get Capture Image and Transformed Image
                #if TO_COLOR
                    using (K4A.Image color_image = capture.Color)
                        using (K4A.Image depth_image = transformation.DepthImageToColorCamera(capture))
                #else
                    using (K4A.Image color_image = transformation.ColorImageToDepthCamera(capture))
                        using (K4A.Image depth_image = capture.Depth)
                #endif
                        {
                            // Get Color Buffer and Write Bitmap
                            byte[] color_buffer = color_image.Memory.ToArray();
                            color_bitmap.WritePixels(color_rect, color_buffer, color_stride, 0, 0);

                            // Get Depth Buffer, and Write Bitmap
                            ushort[] depth_buffer = depth_image.GetPixels <ushort>().ToArray();
                            depth_bitmap.WritePixels(depth_rect, Array.ConvertAll(depth_buffer, i => (byte)(i * (255.0 / 5000.0))), depth_stride, 0, 0);
                        }
            }
        }
Пример #4
0
    //Kinectからデータを取得し、描画するメソッド
    private async Task KinectLoop()
    {
        //while文でkinectからデータを取り続ける
        while (true)
        {
            //GetCaptureでKinectのデータを取得
            using (Capture capture = await Task.Run(() => kinect.GetCapture()).ConfigureAwait(true))
            {
                //Depth画像との位置・サイズ合わせ済みの画像を取得
                // Image colorImage = transformation.ColorImageToDepthCamera(capture);
                Image colorImage = capture.Color;

                //色情報のみの配列を取得
                // BGRA[] colorArray = colorImage.GetPixels<BGRA>().ToArray();
                using (MemoryHandle pin = colorImage.Memory.Pin())
                {
                    //Bitmap画像を作成
                    colorBitmap = new Bitmap(
                        colorImage.WidthPixels,      //カラー画像の横幅
                        colorImage.HeightPixels,     //カラー画像の縦幅
                        colorImage.StrideBytes,      //横一列のバイト数(width*4)
                        PixelFormat.Format32bppArgb, //カラーフォーマット(RGBA)
                        (IntPtr)pin.Pointer);        //各ピクセルの色情報
                }
            }
        }
    }
Пример #5
0
        private async void Window_Loaded(object sender, RoutedEventArgs e)
        {
            while (true)
            {
                // Get Capture Frame
                using (K4A.Capture capture = await Task.Run(() => { return(this.device.GetCapture()); }))
                {
                    // Enqueue Tracker
                    tracker.EnqueueCapture(capture);

                    // Pop Result
                    using (K4ABT.Frame frame = tracker.PopResult())
                        // Get Color Image
                        using (K4A.Image color_image = frame.Capture.Color)
                        {
                            // Get Color Buffer and Write Bitmap
                            byte[] color_buffer = color_image.Memory.ToArray();
                            color_bitmap.WritePixels(color_rect, color_buffer, color_stride, 0, 0);

                            // Transform Body Index Map to Color Camera
                            (K4A.Image _, K4A.Image body_index_map)reult =
                                transformation.DepthImageToColorCameraCustom(
                                    frame.Capture.Depth,
                                    frame.BodyIndexMap,
                                    K4A.TransformationInterpolationType.Nearest,
                                    K4ABT.Frame.BodyIndexMapBackground
                                    );

                            //Get Body Index Map Buffer
                            byte[] body_index_map_buffer = reult.body_index_map.Memory.ToArray();

                            // Draw Body Index Map and Write Bitmap
                            Parallel.For(0, body_index_map_buffer.Length, i =>
                            {
                                uint index = (uint)body_index_map_buffer[i];
                                if (index != K4ABT.Frame.BodyIndexMapBackground)
                                {
                                    Color color = colors[index % colors.Length];
                                    colorized_body_index_map_buffer[i * 4 + 0] = color.B;
                                    colorized_body_index_map_buffer[i * 4 + 1] = color.G;
                                    colorized_body_index_map_buffer[i * 4 + 2] = color.R;
                                    colorized_body_index_map_buffer[i * 4 + 3] = color.A;
                                }
                                else
                                {
                                    colorized_body_index_map_buffer[i * 4 + 0] = 0;
                                    colorized_body_index_map_buffer[i * 4 + 1] = 0;
                                    colorized_body_index_map_buffer[i * 4 + 2] = 0;
                                    colorized_body_index_map_buffer[i * 4 + 3] = 255;
                                }
                            });
                            body_index_map_bitmap.WritePixels(body_index_map_rect, colorized_body_index_map_buffer, body_index_map_stride, 0, 0);

                            // Clear Body Index Map
                            reult.body_index_map.Dispose();
                        }
                }
            }
        }
Пример #6
0
 private async void Window_Loaded(object sender, RoutedEventArgs e)
 {
     while (true)
     {
         // Get Capture Frame
         using (K4A.Capture capture = await Task.Run(() => { return(this.device.GetCapture()); }))
             // Get Capture Image
             using (K4A.Image color_image = capture.Color)
             {
                 // Get Color Buffer and Write Bitmap
                 byte[] color_buffer = color_image.Memory.ToArray();
                 color_bitmap.WritePixels(color_rect, color_buffer, color_stride, 0, 0);
             }
     }
 }
Пример #7
0
 private async void Window_Loaded(object sender, RoutedEventArgs e)
 {
     while (true)
     {
         // Get Capture Frame
         using (K4A.Capture capture = await Task.Run(() => { return(this.device.GetCapture()); }))
             // Get Capture Image
             using (K4A.Image infrared_image = capture.IR)
             {
                 // Get Infrared Buffer, and Write Bitmap
                 ushort[] infrared_buffer = infrared_image.GetPixels <ushort>().ToArray();
                 infrared_bitmap.WritePixels(infrared_rect, Array.ConvertAll(infrared_buffer, i => (byte)(i * 0.5)), infrared_stride, 0, 0);
             }
     }
 }
Пример #8
0
 private async void Window_Loaded(object sender, RoutedEventArgs e)
 {
     while (true)
     {
         // Get Capture Frame
         using (K4A.Capture capture = await Task.Run(() => { return(this.device.GetCapture()); }))
             // Get Capture Image
             using (K4A.Image depth_image = capture.Depth)
             {
                 // Get Depth Buffer, and Write Bitmap
                 ushort[] depth_buffer = depth_image.GetPixels <ushort>().ToArray();
                 depth_bitmap.WritePixels(depth_rect, Array.ConvertAll(depth_buffer, i => (byte)(i * (255.0 / 5000.0))), depth_stride, 0, 0);
             }
     }
 }
 private void CreateColorImage(Capture capture)
 {
     unsafe
     {
         //Getting color image of kinect.
         Image colorImage = capture.Color;
         //Geting the pointer of color image
         using (MemoryHandle pin = colorImage.Memory.Pin())
         {
             //creating bitmap image
             colorBitmap = new Bitmap(
                 colorImage.WidthPixels,      //width of color image
                 colorImage.HeightPixels,     //height of color image
                 colorImage.StrideBytes,      //data size of a stride (width*4)
                 PixelFormat.Format32bppArgb, //format (RGBA)
                 (IntPtr)pin.Pointer);        //pointer of each pixel
         }
     }
 }
        public static Bitmap CreateBitmap(this Microsoft.Azure.Kinect.Sensor.Image image)
        {
            if (image is null)
            {
                throw new ArgumentNullException(nameof(image));
            }

            System.Drawing.Imaging.PixelFormat pixelFormat;

            using (Microsoft.Azure.Kinect.Sensor.Image reference = image.Reference())
            {
                unsafe
                {
                    switch (reference.Format)
                    {
                    case ImageFormat.ColorBGRA32:
                        pixelFormat = System.Drawing.Imaging.PixelFormat.Format32bppArgb;
                        break;

                    case ImageFormat.Depth16:
                    case ImageFormat.IR16:
                        pixelFormat = System.Drawing.Imaging.PixelFormat.Format16bppGrayScale;
                        break;

                    default:
                        throw new AzureKinectException($"Pixel format {reference.Format} cannot be converted to a BitmapSource");
                    }

                    using (MemoryHandle pin = image.Memory.Pin())
                    {
                        return(new Bitmap(
                                   image.WidthPixels,
                                   image.HeightPixels,
                                   image.StrideBytes,
                                   pixelFormat,
                                   (IntPtr)pin.Pointer));
                    }
                }
            }
        }
        async private void WindowLoaded(object sender, RoutedEventArgs e)
        {
            Device device;

            try
            {
                using (device = Device.Open(0))
                {
                    device.StartCameras(new DeviceConfiguration
                    {
                        ColorFormat            = ImageFormat.ColorBGRA32,
                        ColorResolution        = ColorResolution.R720p,
                        DepthMode              = DepthMode.NFOV_Unbinned,
                        SynchronizedImagesOnly = true,
                        CameraFPS              = FPS.FPS30,
                    });
                    int colorWidth    = device.GetCalibration().ColorCameraCalibration.ResolutionWidth;
                    int colorHeight   = device.GetCalibration().ColorCameraCalibration.ResolutionHeight;
                    var callibration  = device.GetCalibration(DepthMode.NFOV_Unbinned, ColorResolution.R720p);
                    var trackerConfig = new TrackerConfiguration();
                    trackerConfig.ProcessingMode    = TrackerProcessingMode.Gpu;
                    trackerConfig.SensorOrientation = SensorOrientation.Default;
                    using (var tracker = Tracker.Create(callibration, trackerConfig))
                    {
                        using (Transformation transform = device.GetCalibration().CreateTransformation())
                        {
                            while (true)
                            {
                                using (Capture capture = await Task.Run(() => { return(device.GetCapture()); }).ConfigureAwait(true))
                                {
                                    Task <BitmapSource> createInputColorBitmapTask = Task.Run(() =>
                                    {
                                        Image color         = capture.Color;
                                        BitmapSource source = BitmapSource.Create(color.WidthPixels, color.HeightPixels, 96, 96, PixelFormats.Bgra32, null, color.Memory.ToArray(), color.StrideBytes);
                                        source.Freeze();
                                        return(source);
                                    });
                                    this.inputColorBitmap = await createInputColorBitmapTask.ConfigureAwait(true);

                                    this.InputColorImageViewPane.Source = inputColorBitmap;
                                    tracker.EnqueueCapture(capture);
                                    using (Microsoft.Azure.Kinect.BodyTracking.Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false))
                                    {
                                        if (frame != null)
                                        {
                                            Console.WriteLine("Number Body: " + frame.NumberOfBodies);
                                            if (frame.NumberOfBodies > 0)
                                            {
                                                await SaveFile(this.inputColorBitmap);

                                                if (await callDetectDarknessServices().ConfigureAwait(true))
                                                {
                                                    speech("The room is too dark please turn on the light.");
                                                    this.DarknessAlertCount = this.DarknessAlertCount + 1;;
                                                }
                                                else if (await callDetectionObjectServices().ConfigureAwait(true))
                                                {
                                                    speech("Please Beware of Object On the Floor. Please Beware of Object On the Floor.");
                                                    this.ObjectAlertCount = this.ObjectAlertCount + 1;;
                                                }
                                            }
                                        }
                                    }
                                }
                                switch ((DateTime.Now.ToString("HH:mm", System.Globalization.DateTimeFormatInfo.InvariantInfo)))
                                {
                                case "00:00":
                                case "00:01":
                                case "00:02": this.ObjectAlertCount = 0; this.DarknessAlertCount = 0; break;

                                default: break;
                                }
                                await CallLineApiService().ConfigureAwait(true);
                            }
                        }
                    }
                }
            }
            catch (Exception err)
            {
                Console.WriteLine(err);
            }
        }
        private async void StartCamera()
        {
            //body track
            using (Tracker tracker = Tracker.Create(this.kinect.GetCalibration(), new TrackerConfiguration()
            {
                ProcessingMode = TrackerProcessingMode.Gpu, SensorOrientation = SensorOrientation.Default
            }))
                // sensor camera
                using (Microsoft.Azure.Kinect.Sensor.Image transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image(
                           Microsoft.Azure.Kinect.Sensor.ImageFormat.Depth16, colorWidth, colorHeight, colorWidth * sizeof(UInt16)))
                {
                    //local variables


                    // photo for prediction
                    float[,] lastImage;
                    // for show lastImage
                    Bitmap bm = null;

                    // Image camera
                    uint trackedBodyId        = 0;
                    JointConfidenceLevel conf = JointConfidenceLevel.Low;

                    while (running)
                    {
                        using (Capture capture = await Task.Run(() => { return(this.kinect.GetCapture()); }))
                        {
                            bm = new Bitmap(30, 30);
                            // for final image
                            uint[,] foto = new uint[300, 300];
                            lastImage    = new float[30, 30];
                            for (int i = 0; i < 30; i++)
                            {
                                for (int j = 0; j < 30; j++)
                                {
                                    lastImage[i, j] = 1;
                                    bm.SetPixel(i, j, System.Drawing.Color.Red);
                                }
                            }



                            // get depth
                            this.transform.DepthImageToColorCamera(capture, transformedDepth);

                            //this.StatusText = "Received Capture: " + capture.Depth.DeviceTimestamp;

                            this.bitmapColorCamera.Lock();


                            // show normal camera
                            var color = capture.Color;

                            var region  = new Int32Rect(0, 0, color.WidthPixels, color.HeightPixels);
                            var region1 = new Int32Rect(0, 0, 30, 30);

                            // Queue latest frame from the sensor.
                            tracker.EnqueueCapture(capture);


                            //get position of the hand
                            using (Microsoft.Azure.Kinect.BodyTracking.Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false))
                            {
                                //get hand position
                                if (frame != null && frame.NumberOfBodies >= 1)
                                {
                                    if (trackedBodyId <= 0)
                                    {
                                        trackedBodyId = frame.GetBody(0).Id;
                                    }
                                    var numOfSkeletos = frame.NumberOfBodies;

                                    var body = frame.GetBodySkeleton(0);
                                    var pos  = body.GetJoint(JointId.HandRight).Position;
                                    conf = body.GetJoint(JointId.HandRight).ConfidenceLevel;
                                    var handposition = kinect.GetCalibration().TransformTo2D(pos, CalibrationDeviceType.Depth, CalibrationDeviceType.Color);
                                    handpositionGlobal.X = handposition.Value.X;
                                    handpositionGlobal.Y = handposition.Value.Y;
                                    // z is the same
                                    handpositionGlobal.Z = pos.Z;
                                }
                            }

                            unsafe
                            {
                                using (var pin = color.Memory.Pin())
                                {
                                    // put color camera in bitmap
                                    this.bitmapColorCamera.WritePixels(region, (IntPtr)pin.Pointer, (int)color.Size, color.StrideBytes);

                                    //get pixel from color camera
                                    uint *  colorPixels = (uint *)this.bitmapColorCamera.BackBuffer;
                                    ushort *depthPixels = (ushort *)transformedDepth.Memory.Pin().Pointer;


                                    int closest = 501;
                                    int x       = 0;
                                    int y       = 0;
                                    int cx      = 0;
                                    int cy      = 0;
                                    // Debug.WriteLine(pos.X);
                                    bool isFirst = true;
                                    int  xref = 0, yref = 0;
                                    int  finalcount = 0;
                                    for (int i = 0; i < this.colorHeight * this.colorWidth; i++)
                                    {
                                        //if (i > 512000)
                                        //{
                                        //    colorPixels[i] = 0;
                                        //    continue;
                                        //}
                                        x++;
                                        if (i % 1280 == 0)
                                        {
                                            x = 0;
                                            y++;
                                        }


                                        //draw square around hand
                                        int color_data = 255 << 16; // R
                                        color_data |= 128 << 8;     // G
                                        color_data |= 255 << 0;     // B
                                        int ss = 150;

                                        if (x - ss == ((int)handpositionGlobal.X) && (y - ss) < ((int)handpositionGlobal.Y) && (y + ss) > ((int)handpositionGlobal.Y))
                                        {
                                            colorPixels[i] = (uint)color_data;
                                        }

                                        if (x + ss == ((int)handpositionGlobal.X) && (y - ss) < ((int)handpositionGlobal.Y) && (y + ss) > ((int)handpositionGlobal.Y))
                                        {
                                            colorPixels[i] = (uint)color_data;
                                        }
                                        if (y + ss == ((int)handpositionGlobal.Y) && (x - ss) < ((int)handpositionGlobal.X) && (x + ss) > ((int)handpositionGlobal.X))
                                        {
                                            colorPixels[i] = (uint)color_data;
                                        }

                                        if (y - ss == ((int)handpositionGlobal.Y) && (x - ss) < ((int)handpositionGlobal.X) && (x + ss) > ((int)handpositionGlobal.X))
                                        {
                                            colorPixels[i] = (uint)color_data;
                                        }

                                        //insede the square
                                        if ((y - ss) < ((int)handpositionGlobal.Y) && (y + ss) > ((int)handpositionGlobal.Y) && (x - ss) < ((int)handpositionGlobal.X) && (x + ss) > ((int)handpositionGlobal.X))
                                        {
                                            if (isFirst)
                                            {
                                                xref    = x;
                                                yref    = y;
                                                isFirst = false;
                                            }

                                            //select pixels insede the selected deep
                                            if (depthPixels[i] < handpositionGlobal.Z + 50 && depthPixels[i] > handpositionGlobal.Z - 150 && depthPixels[i] != 0)
                                            {
                                                //scale actual depth
                                                float deep  = (((int)depthPixels[i] - ((int)handpositionGlobal.Z - 150)) * 250) / 200;
                                                float deepF = deep / 255.0f;

                                                foto[y - yref, x - xref] = (uint)deep;

                                                uint xs = (uint)((x - xref) / 10);
                                                uint ys = (uint)((y - yref) / 10);

                                                if (deepF > lastImage[ys, xs] || lastImage[ys, xs] == 1)
                                                {
                                                    lastImage[ys, xs] = deepF;
                                                    var temp = System.Drawing.Color.FromArgb((int)deep, (int)deep, (int)deep);
                                                    bm.SetPixel((int)ys, (int)xs, temp);
                                                }
                                            }
                                            else
                                            {
                                                foto[y - yref, x - xref] = 1;

                                                uint xs = (uint)((x - xref) / 10);
                                                uint ys = (uint)((y - yref) / 10);



                                                // bm.SetPixel(x - xref, y - yref, System.Drawing.Color.FromArgb((int)(depthPixels[i] / 100), (int)(depthPixels[i] / 100), (int)(depthPixels[i]) / 100));
                                            }
                                        }
                                    }
                                }
                            }

                            this.bitmapColorCamera.AddDirtyRect(region);
                            this.bitmapColorCamera.Unlock();

                            //this.bitmapFinal = System.Windows.Interop.Imaging.CreateBitmapSourceFromHBitmap(bm.GetHbitmap(), IntPtr.Zero, Int32Rect.Empty, BitmapSizeOptions.FromEmptyOptions());

                            if (saveNext && actual > 0 && conf != JointConfidenceLevel.Low)
                            {
                                WriteToFile(lastImage, status);
                                if (actual == 1)
                                {
                                    saveNext = false;
                                    _main.RecordFedbackFill = new SolidColorBrush(Colors.Green);
                                }
                                actual--;
                            }
                        }
                    }
                }
        }
Пример #13
0
        //Show initial image
        private async Task <BitmapSource> ShowInitImage(Capture capture, Transformation transform)
        {
            BitmapSource initImageBitmap = null;
            // Create a BitmapSource for the unmodified color image.
            // Creating the BitmapSource is slow, so do it asynchronously on another thread
            Task <BitmapSource> createColorBitmapTask = new Task <BitmapSource>(() =>
            {
                BitmapSource source = capture.Color.CreateBitmapSource();

                // Allow the bitmap to move threads
                source.Freeze();
                return(source);
            });

            // Compute the colorized output bitmap on a thread pool thread
            Task <BitmapSource> createDepthBitmapTask = new Task <BitmapSource>(() =>
            {
                int colorWidth  = this.KinectDevice.GetCalibration().ColorCameraCalibration.ResolutionWidth;
                int colorHeight = this.KinectDevice.GetCalibration().ColorCameraCalibration.ResolutionHeight;
                // Allocate image buffers for us to manipulate
                var transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image(ImageFormat.Depth16, colorWidth, colorHeight);
                var outputColorImage = new Microsoft.Azure.Kinect.Sensor.Image(ImageFormat.ColorBGRA32, colorWidth, colorHeight);
                // Transform the depth image to the perspective of the color camera
                transform.DepthImageToColorCamera(capture, transformedDepth);

                // Get Span<T> references to the pixel buffers for fast pixel access.
                Span <ushort> depthBuffer     = transformedDepth.GetPixels <ushort>().Span;
                Span <BGRA> colorBuffer       = capture.Color.GetPixels <BGRA>().Span;
                Span <BGRA> outputColorBuffer = outputColorImage.GetPixels <BGRA>().Span;

                // Create an output color image with data from the depth image
                for (int i = 0; i < colorBuffer.Length; i++)
                {
                    // The output image will be the same as the input color image,
                    // but colorized with Red where there is no depth data, and Green
                    // where there is depth data at more than 1.5 meters
                    outputColorBuffer[i] = colorBuffer[i];

                    if (depthBuffer[i] == 0)
                    {
                        outputColorBuffer[i].R = 255;
                    }
                    else if (depthBuffer[i] > 1500)
                    {
                        outputColorBuffer[i].G = 255;
                    }
                }

                BitmapSource source = outputColorImage.CreateBitmapSource();

                // Allow the bitmap to move threads
                source.Freeze();

                return(source);
            });

            if (this.sysState.DeviceDepthMode)
            {
                createDepthBitmapTask.Start();
                initImageBitmap = await createDepthBitmapTask.ConfigureAwait(false);
            }
            else
            {
                createColorBitmapTask.Start();
                initImageBitmap = await createColorBitmapTask.ConfigureAwait(false);
            }

            return(initImageBitmap);
        }
Пример #14
0
        private async void  PictureRecorder_Loaded(object sender, RoutedEventArgs e)
        {
            using (Tracker tracker = Tracker.Create(this.kinect.GetCalibration(), new TrackerConfiguration()
            {
                ProcessingMode = TrackerProcessingMode.Gpu, SensorOrientation = SensorOrientation.Default
            }))
            {
                while (true)
                {
                    uint[,] foto = new uint[300, 300];

                    bm = new Bitmap(300, 300);

                    using (Microsoft.Azure.Kinect.Sensor.Image transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image(
                               Microsoft.Azure.Kinect.Sensor.ImageFormat.Depth16, colorWidth, colorHeight, colorWidth * sizeof(UInt16)))
                        using (Capture capture = await Task.Run(() => { return(this.kinect.GetCapture()); }))
                        {
                            Text.Content = this.StatusText;

                            this.transform.DepthImageToColorCamera(capture, transformedDepth);

                            this.bitmap.Lock();

                            var color = capture.Color;

                            var region = new Int32Rect(0, 0, color.WidthPixels, (int)(color.HeightPixels / 1));



                            // Queue latest frame from the sensor.
                            tracker.EnqueueCapture(capture);


                            unsafe
                            {
                                using (Microsoft.Azure.Kinect.BodyTracking.Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false))
                                    using (var pin = color.Memory.Pin())
                                    {
                                        //get hand position
                                        if (frame != null && frame.NumberOfBodies >= 1)
                                        {
                                            var body = frame.GetBodySkeleton(0);
                                            var pos  = body.GetJoint(JointId.HandRight).Position;


                                            //  var temp = kinect.GetCalibration().TransformTo2D(new Vector2(pos.X,pos.Y),-pos.Z ,CalibrationDeviceType.,CalibrationDeviceType.Color);
                                            var temp = kinect.GetCalibration().TransformTo2D(pos, CalibrationDeviceType.Depth, CalibrationDeviceType.Color);
                                            if (temp != null)
                                            {
                                                //Debug.WriteLine(temp.Value.X);

                                                this.bitmap.WritePixels(region, (IntPtr)pin.Pointer, (int)color.Size, color.StrideBytes);
                                            }

                                            uint *  colorPixels = (uint *)this.bitmap.BackBuffer;
                                            ushort *depthPixels = (ushort *)transformedDepth.Memory.Pin().Pointer;

                                            int closest = 501;
                                            int x       = 0;
                                            int y       = 0;
                                            int cx      = 0;
                                            int cy      = 0;
                                            // Debug.WriteLine(pos.X);
                                            bool isFirst = true;
                                            int  xref = 0, yref = 0;
                                            for (int i = 0; i < this.colorHeight * this.colorWidth; i++)
                                            {
                                                //if (i > 512000)
                                                //{
                                                //    colorPixels[i] = 0;
                                                //    continue;
                                                //}
                                                x++;
                                                if (i % 1280 == 0)
                                                {
                                                    x = 0;
                                                    y++;
                                                }


                                                //colorPixels[i] = 0;
                                                //// Compute the pixel's color.
                                                int color_data = 255 << 16; // R
                                                color_data |= 128 << 8;     // G
                                                color_data |= 255 << 0;     // B
                                                int ss = 150;

                                                if (x - ss == ((int)temp.Value.X) && (y - ss) < ((int)temp.Value.Y) && (y + ss) > ((int)temp.Value.Y))
                                                {
                                                    colorPixels[i] = (uint)color_data;
                                                }

                                                if (x + ss == ((int)temp.Value.X) && (y - ss) < ((int)temp.Value.Y) && (y + ss) > ((int)temp.Value.Y))
                                                {
                                                    colorPixels[i] = (uint)color_data;
                                                }
                                                if (y + ss == ((int)temp.Value.Y) && (x - ss) < ((int)temp.Value.X) && (x + ss) > ((int)temp.Value.X))
                                                {
                                                    colorPixels[i] = (uint)color_data;
                                                }

                                                if (y - ss == ((int)temp.Value.Y) && (x - ss) < ((int)temp.Value.X) && (x + ss) > ((int)temp.Value.X))
                                                {
                                                    colorPixels[i] = (uint)color_data;
                                                }

                                                //insede the square
                                                if ((y - ss) < ((int)temp.Value.Y) && (y + ss) > ((int)temp.Value.Y) && (x - ss) < ((int)temp.Value.X) && (x + ss) > ((int)temp.Value.X))
                                                {
                                                    if (isFirst)
                                                    {
                                                        xref    = x;
                                                        yref    = y;
                                                        isFirst = false;
                                                    }

                                                    if (depthPixels[i] < pos.Z + 100 && depthPixels[i] > pos.Z - 100 && depthPixels[i] != 0)
                                                    {
                                                        foto[y - yref, x - xref] = colorPixels[i];
                                                        int deep = (((int)depthPixels[i] - ((int)pos.Z - 100)) * 250) / 200;
                                                        //Debug.WriteLine(deep);
                                                        bm.SetPixel(x - xref, y - yref, System.Drawing.Color.FromArgb(deep, deep, deep));
                                                    }
                                                    else
                                                    {
                                                        foto[y - yref, x - xref] = 0;
                                                        // bm.SetPixel(x - xref, y - yref, System.Drawing.Color.FromArgb((int)(depthPixels[i] / 100), (int)(depthPixels[i] / 100), (int)(depthPixels[i]) / 100));
                                                        bm.SetPixel(x - xref, y - yref, System.Drawing.Color.White);
                                                    }
                                                }

                                                if (depthPixels[i] < pos.Z + 100 && depthPixels[i] > pos.Z - 100 && depthPixels[i] != 0)
                                                {
                                                    if (depthPixels[i] < closest)
                                                    {
                                                        if (ox == 0)
                                                        {
                                                            ox = cx;
                                                            oy = cy;
                                                        }
                                                        closest = depthPixels[i];

                                                        cx = x;
                                                        cy = y;
                                                    }


                                                    continue;
                                                }
                                                //colorPixels[i] = 0;
                                            }
                                            // Debug.WriteLine("x: " + cx);
                                            // Debug.WriteLine("y: " + cy);
                                            MoveEllipse(cx, cy);
                                            // Debug.WriteLine(closest);
                                            if (closest == 701)
                                            {
                                                //mouse_event((uint)MouseEventFlags.LEFTDOWN, 0, 0, 0, 0);
                                                // mouse_event((uint)MouseEventFlags.LEFTUP, 0, 0, 0, 0);
                                            }
                                        }
                                    }
                            }
                            //Bitmap btm = new Bitmap(this.colorWidth, this.colorHeight);

                            this.bitmap.AddDirtyRect(region);

                            this.bitmap.Unlock();
                            Bitmap bitmap1;
                            unsafe
                            {
                                fixed(uint *intPtr = &foto[0, 0])
                                {
                                    bitmap1 = new Bitmap(300, 300, 300 * 4, System.Drawing.Imaging.PixelFormat.Format32bppRgb, new IntPtr(intPtr));
                                }

                                MyIm.Source = this.bitmap;
                                var bitmapSource = System.Windows.Interop.Imaging.CreateBitmapSourceFromHBitmap(bitmap1.GetHbitmap(), IntPtr.Zero, Int32Rect.Empty, BitmapSizeOptions.FromEmptyOptions());

                                MyIm1.Source = bitmapSource;
                                var bitmapSource1 = System.Windows.Interop.Imaging.CreateBitmapSourceFromHBitmap(bm.GetHbitmap(), IntPtr.Zero, Int32Rect.Empty, BitmapSizeOptions.FromEmptyOptions());

                                MyIm2.Source = bitmapSource1;
                                // CreateThumbnail("file.png", this.bitmap.Clone());
                                // System.Windows.Application.Current.Shutdown();
                            }
                        }
                }
            }
        }
Пример #15
0
        public PoseData GetNextPose()
        {
            switch (CurrentPoseInputSource)
            {
            case PoseInputSource.WEBSOCKET:
#if !UNITY_WEBGL || UNITY_EDITOR
                websocket.DispatchMessageQueue();
#endif
                // poseLiveWS is non-null if alternative is sending pose data over websocket
                if (poseLiveWS != null)
                {
                    // Assign last pose from websocket
                    CurrentPose = poseLiveWS;
                }
                else
                {
                    Debug.Log("No pose recieved from WebSocket!");
                }
                break;

            case PoseInputSource.FILE:

                if (SequenceEnum != null && SequenceEnum.MoveNext())
                {
                    _CurrentFilePoseNumber++;
                }
                else
                {
                    // Quick and dirty way to loop (by reloading file)
                    if (SequenceEnum != null && !loop)
                    {
                        break;
                    }
                    LoadData();
                    SequenceEnum.MoveNext();
                    _CurrentFilePoseNumber = 1;
                }


                string   frame_json     = SequenceEnum.Current;
                PoseData fake_live_data = PoseDataUtils.JSONstring2PoseData(frame_json);
                CurrentPose = fake_live_data;

                if (recording)     // recording
                {
                    File.AppendAllText(WriteDataPath, frame_json + Environment.NewLine);
                }
                break;

            case PoseInputSource.KINECT:
                if (device != null)
                {
                    using (Capture capture = device.GetCapture())
                    {
                        // Make tracker estimate body
                        tracker.EnqueueCapture(capture);

                        // Code for getting RGB image from camera

                        Microsoft.Azure.Kinect.Sensor.Image color = capture.Color;
                        if (color != null && color.WidthPixels > 0 && (streamCanvas != null || videoRenderer != null))
                        {
                            UnityEngine.Object.Destroy(tex);    // required to not keep old images in memory
                            tex = new Texture2D(color.WidthPixels, color.HeightPixels, TextureFormat.BGRA32, false);
                            tex.LoadRawTextureData(color.Memory.ToArray());
                            tex.Apply();

                            //Fetch the RawImage component from the GameObject
                            if (tex != null)
                            {
                                if (streamCanvas != null)
                                {
                                    m_RawImage         = streamCanvas.GetComponent <RawImage>();
                                    m_RawImage.texture = tex;
                                }
                                if (videoRenderer != null)
                                {
                                    videoRenderer.material.mainTexture = tex;
                                }
                            }
                        }
                    }

                    // Get pose estimate from tracker
                    using (Frame frame = tracker.PopResult())
                    {
                        //Debug.LogFormat("{0} bodies found.", frame.NumberOfBodies);

                        //  At least one body found by Body Tracking
                        if (frame.NumberOfBodies > 0)
                        {
                            // Use first estimated person, if mutiple are in the image
                            // !!! There are (probably) no guarantees on consisitent ordering between estimates
                            //var bodies = frame.Bodies;
                            var body = frame.GetBody(0);

                            // Apply pose to user avatar(s)
                            PoseData live_data = PoseDataUtils.Body2PoseData(body);

                            if (recording)     // recording
                            {
                                PoseDataJSON jdl = PoseDataUtils.Body2PoseDataJSON(body);
                                AppendRecordedFrame(jdl);
                            }
                            CurrentPose = live_data;
                        }
                    }
                }
                else
                {
                    Debug.Log("device is null!");
                }
                break;
            }
            return(CurrentPose);
        }
        private async void Recorder_Loaded()
        {
            //body track
            using (Tracker tracker = Tracker.Create(this.kinect.GetCalibration(), new TrackerConfiguration()
            {
                ProcessingMode = TrackerProcessingMode.Gpu, SensorOrientation = SensorOrientation.Default
            }))
                // sensor camera
                using (Microsoft.Azure.Kinect.Sensor.Image transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image(Microsoft.Azure.Kinect.Sensor.ImageFormat.Depth16, colorWidth, colorHeight, colorWidth * sizeof(UInt16)))
                {
                    uint trackedBodyId        = 0;
                    JointConfidenceLevel conf = JointConfidenceLevel.Low;

                    while (running)
                    {
                        //get capture
                        using (Capture capture = await Task.Run(() => { return(this.kinect.GetCapture()); }))
                        {
                            //create output array that represent the image
                            float[,] actualPhoto = new float[30, 30];
                            for (int i = 0; i < 30; i++)
                            {
                                for (int j = 0; j < 30; j++)
                                {
                                    actualPhoto[i, j] = 255;
                                }
                            }

                            this.transform.DepthImageToColorCamera(capture, transformedDepth);

                            // Queue latest frame from the sensor.
                            tracker.EnqueueCapture(capture);

                            //get position of the hand
                            using (Microsoft.Azure.Kinect.BodyTracking.Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false))
                            {
                                //get hand position
                                if (frame != null && frame.NumberOfBodies >= 1)
                                {
                                    if (trackedBodyId <= 0)
                                    {
                                        trackedBodyId = frame.GetBody(0).Id;
                                    }
                                    var numOfSkeletos = frame.NumberOfBodies;
                                    // Debug.WriteLine("num of bodies: " + numOfSkeletos);

                                    var body = frame.GetBodySkeleton(0);
                                    var pos  = body.GetJoint(JointId.HandRight).Position;

                                    var fpos  = body.GetJoint(JointId.HandTipRight).Position;
                                    var rpost = body.GetJoint(JointId.WristRight).Position;
                                    conf = body.GetJoint(JointId.HandRight).ConfidenceLevel;
                                    Quaternion orin = body.GetJoint(JointId.HandTipRight).Quaternion;
                                    //Debug.WriteLine(orin);
                                    //   Debug.WriteLine(pos);

                                    /*
                                     * if (conf == JointConfidenceLevel.Low)
                                     * {
                                     *  this.bitmapMain.AddDirtyRect(region);
                                     *  this.bitmapMain.Unlock();
                                     *
                                     *  this.Crop.Source = System.Windows.Interop.Imaging.CreateBitmapSourceFromHBitmap(bm.GetHbitmap(), IntPtr.Zero, Int32Rect.Empty, BitmapSizeOptions.FromEmptyOptions());
                                     *  continue;
                                     * }*/

                                    //  Debug.WriteLine("confidende level   : " + conf);
                                    // transfor position to capture camera
                                    var oldHand = HandPositionColor;
                                    //set hand for prediction square
                                    var handposition = kinect.GetCalibration().TransformTo2D(pos, CalibrationDeviceType.Depth, CalibrationDeviceType.Color);
                                    HandPositionColor.X = handposition.Value.X;
                                    HandPositionColor.Y = handposition.Value.Y;
                                    // z is the same
                                    HandPositionColor.Z = pos.Z;


                                    if (conf == JointConfidenceLevel.Medium || conf == JointConfidenceLevel.High)
                                    {
                                        manager.SetPosition(pos);
                                    }
                                    //  }
                                }
                            }


                            // show normal camera
                            var color = capture.Color;
                            unsafe
                            {
                                using (var pin = color.Memory.Pin())
                                {
                                    // we only using deep now
                                    ushort *depthPixels = (ushort *)transformedDepth.Memory.Pin().Pointer;

                                    int closest = 501;
                                    int x       = 0;
                                    int y       = 0;
                                    int cx      = 0;
                                    int cy      = 0;
                                    // Debug.WriteLine(pos.X);
                                    bool isFirst = true;
                                    int  xref = 0, yref = 0;
                                    int  squareRadious = 150;

                                    for (int i = 0; i < this.colorHeight * this.colorWidth; i++)
                                    {
                                        x++;
                                        if (i % 1280 == 0)
                                        {
                                            x = 0;
                                            y++;
                                        }
                                        //insede the square
                                        if ((y - squareRadious) < ((int)HandPositionColor.Y) && (y + squareRadious) > ((int)HandPositionColor.Y) && (x - squareRadious) < ((int)HandPositionColor.X) && (x + squareRadious) > ((int)HandPositionColor.X))
                                        {
                                            if (isFirst)
                                            {
                                                xref    = x;
                                                yref    = y;
                                                isFirst = false;
                                            }


                                            if (depthPixels[i] < closest)
                                            {
                                                if (ox == 0)
                                                {
                                                    ox = cx;
                                                    oy = cy;
                                                }
                                                closest = depthPixels[i];

                                                cx = x;
                                                cy = y;
                                            }


                                            //select pixels insede the selected deep
                                            if (depthPixels[i] < HandPositionColor.Z + 50 && depthPixels[i] > HandPositionColor.Z - 150 && depthPixels[i] != 0)
                                            {
                                                float deep = (((int)depthPixels[i] - ((int)HandPositionColor.Z - 150)) * 250) / 200;
                                                //  Debug.WriteLine("deep" + (uint)deep);
                                                uint xs = (uint)((x - xref) / 10);
                                                uint ys = (uint)((y - yref) / 10);

                                                if ((uint)deep > actualPhoto[ys, xs] || actualPhoto[ys, xs] == 255)
                                                {
                                                    actualPhoto[ys, xs] = deep;
                                                }
                                            }
                                        }
                                    }

                                    // manager.SetPosition(new Vector3(cx, cy, 0));
                                }
                            }

                            if (conf != JointConfidenceLevel.Low)
                            {
                                images.Add(actualPhoto);
                                // Debug.WriteLine("add from queue");
                                if (images.Count > 10)
                                {
                                }
                            }


                            //  Debug.WriteLine(fotoS);
                        }
                    }
                }
        }
Пример #17
0
        /// <summary>
        /// This will capture a Depth enabled Color Image and save it as <see cref="Bitmap"/>
        /// Picture will be colorized with Red where there is no depth data, and Green
        /// where there is depth data at more than 1.5 meters
        /// </summary>
        /// <param name="kinectSensor">The initialized Kinect Sensor object</param>
        /// <returns>Returns the Picture from the Color Camera as <see cref="Bitmap"/></returns>
        public static async Task <Bitmap> CreateDepthColorBitmapAsync(this Device kinectSensor)
        {
            try
            {
                //Declare calibration settings
                int colorWidth;
                int colorHeight;

                //Check for initialized Camera
                try
                {
                    colorWidth  = kinectSensor.GetCalibration().ColorCameraCalibration.ResolutionWidth;
                    colorHeight = kinectSensor.GetCalibration().ColorCameraCalibration.ResolutionHeight;
                }
                catch (Exception e)
                {
                    //Camera not initialized
                    throw new CameraNotInitializedException("The Camera was not initialized correctly.", e);
                }

                // Configure transformation module in order to transform depth enabled picture as bitmap
                using (Microsoft.Azure.Kinect.Sensor.Image transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image(ImageFormat.Depth16, colorWidth, colorHeight))
                    using (Microsoft.Azure.Kinect.Sensor.Image outputColorImage = new Microsoft.Azure.Kinect.Sensor.Image(ImageFormat.ColorBGRA32, colorWidth, colorHeight))
                        using (Transformation transform = kinectSensor.GetCalibration().CreateTransformation())
                        {
                            // Wait for a capture on a thread pool thread
                            using (Capture capture = await Task.Run(() => { return(kinectSensor.GetCapture()); }).ConfigureAwait(true))
                            {
                                // Compute the colorized output bitmap on a thread pool thread
                                Task <System.Drawing.Bitmap> createDepthColorBitmapTask = Task.Run(() =>
                                {
                                    // Transform the depth image to the perspective of the color camera
                                    transform.DepthImageToColorCamera(capture, transformedDepth);

                                    // Get Span<T> references to the pixel buffers for fast pixel access.
                                    Span <ushort> depthBuffer     = transformedDepth.GetPixels <ushort>().Span;
                                    Span <BGRA> colorBuffer       = capture.Color.GetPixels <BGRA>().Span;
                                    Span <BGRA> outputColorBuffer = outputColorImage.GetPixels <BGRA>().Span;

                                    // Create an output color image with data from the depth image
                                    for (int i = 0; i < colorBuffer.Length; i++)
                                    {
                                        // The output image will be the same as the input color image,
                                        // but colorized with Red where there is no depth data, and Green
                                        // where there is depth data at more than 1.5 meters
                                        outputColorBuffer[i] = colorBuffer[i];
                                        if (depthBuffer[i] == 0)
                                        {
                                            outputColorBuffer[i].R = 255;
                                        }
                                        else if (depthBuffer[i] > 1500)
                                        {
                                            outputColorBuffer[i].G = 255;
                                        }
                                    }

                                    return(outputColorImage.CreateBitmap());
                                });

                                // Wait for  bitmap and return
                                var depthColorBitmap = await createDepthColorBitmapTask.ConfigureAwait(true);

                                return(depthColorBitmap);
                            }
                        }
            }
            catch (Exception e)
            {
                throw e;
            }
        }
Пример #18
0
        static async Task Main()
        {
            using (var visualizerData = new VisualizerData())
            {
                var renderer = new PosSaver(visualizerData);

                renderer.StartVisualizationThread();

                // Open device.
                using (Device device = Device.Open())
                {
                    device.StartCameras(new DeviceConfiguration()
                    {
                        ColorFormat            = ImageFormat.ColorBGRA32,
                        ColorResolution        = ColorResolution.R720p,
                        DepthMode              = DepthMode.NFOV_Unbinned,
                        SynchronizedImagesOnly = true,
                        WiredSyncMode          = WiredSyncMode.Standalone,
                        CameraFPS              = FPS.FPS15
                    });

                    var deviceCalibration = device.GetCalibration();
                    var transformation    = deviceCalibration.CreateTransformation();
                    PointCloud.ComputePointCloudCache(deviceCalibration);

                    using (Tracker tracker = Tracker.Create(deviceCalibration, new TrackerConfiguration()
                    {
                        ProcessingMode = TrackerProcessingMode.Gpu, SensorOrientation = SensorOrientation.Default
                    }))
                    {
                        while (renderer.IsActive)
                        {
                            using (Capture sensorCapture = await Task.Run(() => device.GetCapture()).ConfigureAwait(true))
                            {
                                // Queue latest frame from the sensor.
                                tracker.EnqueueCapture(sensorCapture);
                                if (renderer.IsHuman)
                                {
                                    unsafe
                                    {
                                        //Depth画像の横幅(width)と縦幅(height)を取得
                                        int depth_width  = device.GetCalibration().DepthCameraCalibration.ResolutionWidth;
                                        int depth_height = device.GetCalibration().DepthCameraCalibration.ResolutionHeight;
                                        // Bitmap depthBitmap = new Bitmap(depth_width, depth_height, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
                                        Bitmap colorBitmap = new Bitmap(depth_width, depth_height, System.Drawing.Imaging.PixelFormat.Format32bppArgb);

                                        // Image depthImage = sensorCapture.Depth;
                                        Image colorImage = transformation.ColorImageToDepthCamera(sensorCapture);
                                        // ushort[] depthArray = depthImage.GetPixels<ushort>().ToArray();
                                        BGRA[] colorArray = colorImage.GetPixels <BGRA>().ToArray();
                                        // BitmapData bitmapData = depthBitmap.LockBits(new Rectangle(0, 0, depthBitmap.Width, depthBitmap.Height), System.Drawing.Imaging.ImageLockMode.WriteOnly, PixelFormat.Format32bppArgb);
                                        BitmapData bitmapData = colorBitmap.LockBits(new Rectangle(0, 0, colorBitmap.Width, colorBitmap.Height), System.Drawing.Imaging.ImageLockMode.WriteOnly, PixelFormat.Format32bppArgb);
                                        //各ピクセルの値へのポインタ
                                        byte *pixels = (byte *)bitmapData.Scan0;
                                        int   index  = 0;
                                        //一ピクセルずつ処理
                                        for (int i = 0; i < colorArray.Length; i++)
                                        {
                                            pixels[index++] = colorArray[i].B;
                                            pixels[index++] = colorArray[i].G;
                                            pixels[index++] = colorArray[i].R;
                                            pixels[index++] = 255;//Alpha値を固定して不透過に
                                        }
                                        //書き込み終了
                                        colorBitmap.UnlockBits(bitmapData);
                                        string string_now = renderer.now.ToString("HHmmssfff");
                                        colorBitmap.Save($@"{PosSaver.path}\{renderer.day}\{renderer.scene}\depth\{string_now}.png", System.Drawing.Imaging.ImageFormat.Png);
                                    }
                                }
                            }

                            // Try getting latest tracker frame.
                            using (Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false))
                            {
                                if (frame != null)
                                {
                                    // Save this frame for visualization in Renderer.

                                    // One can access frame data here and extract e.g. tracked bodies from it for the needed purpose.
                                    // Instead, for simplicity, we transfer the frame object to the rendering background thread.
                                    // This example shows that frame popped from tracker should be disposed. Since here it is used
                                    // in a different thread, we use Reference method to prolong the lifetime of the frame object.
                                    // For reference on how to read frame data, please take a look at Renderer.NativeWindow_Render().
                                    visualizerData.Frame = frame.Reference();
                                }
                            }
                        }
                    }
                }
            }
        }
        private async void StartCamera()
        {
            //Start body track and sensor camera
            using (Tracker tracker = Tracker.Create(this.kinect.GetCalibration(), new TrackerConfiguration()
            {
                ProcessingMode = TrackerProcessingMode.Gpu, SensorOrientation = SensorOrientation.Default
            }))
                using (Microsoft.Azure.Kinect.Sensor.Image transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image(Microsoft.Azure.Kinect.Sensor.ImageFormat.Depth16, colorWidth, colorHeight, colorWidth * sizeof(UInt16)))
                {
                    // Local variables
                    //
                    // photo for prediction
                    float[,] actualPhoto;

                    // hand position in color camera
                    Vector3 handPositionColor = new Vector3();

                    //hand position in deepth camera
                    Vector3 HandPositionDepth = Vector3.Zero;

                    //Elbow depth position
                    Vector3 ElbowPositionDepth = Vector3.Zero;

                    //tracked body id
                    uint trackedBodyId = 0;

                    // hand reading confidence level
                    JointConfidenceLevel handConf = JointConfidenceLevel.Low;

                    // elbow reading confindence level
                    JointConfidenceLevel elbowConf = JointConfidenceLevel.Low;

                    int x, y;

                    //body skeleton
                    Skeleton body;

                    while (running)
                    {
                        // create reset prediction photo
                        actualPhoto = new float[actualPhotoWidht, actualPhotoHeight];
                        for (int i = 0; i < 30; i++)
                        {
                            for (int j = 0; j < 30; j++)
                            {
                                actualPhoto[i, j] = 255;
                            }
                        }

                        //get capture
                        using (Capture capture = await Task.Run(() => { return(this.kinect.GetCapture()); }))
                        {
                            //get depth transform
                            this.transform.DepthImageToColorCamera(capture, transformedDepth);

                            // Queue latest frame from the sensor.
                            tracker.EnqueueCapture(capture);

                            //get position of the hand
                            using (Microsoft.Azure.Kinect.BodyTracking.Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false))
                            {
                                //check if we have bodies
                                if (frame != null && frame.NumberOfBodies >= 1)
                                {
                                    if (trackedBodyId <= 0)
                                    {
                                        trackedBodyId = frame.GetBody(0).Id;
                                    }

                                    //get body
                                    body = frame.GetBodySkeleton(0);

                                    //check confidence
                                    // we will use data only of confidence is medium or high
                                    handConf = body.GetJoint(JointId.HandRight).ConfidenceLevel;

                                    //skip if confidnece is not high or medium
                                    if (handConf == JointConfidenceLevel.Low || handConf == JointConfidenceLevel.None)
                                    {
                                        continue;
                                    }

                                    // get hand position
                                    HandPositionDepth = body.GetJoint(JointId.HandRight).Position;

                                    // elbow confidence and position
                                    ElbowPositionDepth = body.GetJoint(JointId.ElbowRight).Position;
                                    elbowConf          = body.GetJoint(JointId.ElbowRight).ConfidenceLevel;

                                    // get hand position in color camera
                                    var handPositionColorQ = kinect.GetCalibration().TransformTo2D(HandPositionDepth, CalibrationDeviceType.Depth, CalibrationDeviceType.Color);

                                    handPositionColor.X = handPositionColorQ.Value.X;
                                    handPositionColor.Y = handPositionColorQ.Value.Y;
                                    handPositionColor.Z = HandPositionDepth.Z;

                                    // loop thorugh all color, and select depth that are insede the requerid square

                                    // show normal camera
                                    var color = capture.Color;

                                    // we need unsafe for use pointer to array
                                    unsafe
                                    {
                                        // get memory handler
                                        using (var pin = color.Memory.Pin())
                                        {
                                            // get pointer to depth capture, this is basically a array
                                            ushort *depthPixels = (ushort *)transformedDepth.Memory.Pin().Pointer;

                                            // staring point of the hand square
                                            bool isFirst = true;
                                            int  xref = 0, yref = 0;
                                            x = 0; y = 0;
                                            //loop through the depht pixel arrat
                                            for (int i = 0; i < this.colorHeight * this.colorWidth; i++)
                                            {
                                                //kepp coubnt of x and y for 2d array
                                                x++;
                                                if (i % 1280 == 0)
                                                {
                                                    x = 0;
                                                    y++;
                                                }
                                                //insede the square
                                                if ((y - squareRadious) < ((int)handPositionColor.Y) && (y + squareRadious) > ((int)handPositionColor.Y) && (x - squareRadious) < ((int)handPositionColor.X) && (x + squareRadious) > ((int)handPositionColor.X))
                                                {
                                                    //get corner of the square if is the first
                                                    if (isFirst)
                                                    {
                                                        xref    = x;
                                                        yref    = y;
                                                        isFirst = false;
                                                    }

                                                    //select pixels insede the selected deep
                                                    if (depthPixels[i] < handPositionColor.Z + d2 && depthPixels[i] > handPositionColor.Z - d1 && depthPixels[i] != 0)
                                                    {
                                                        //scale actual depth
                                                        float deep = (((int)depthPixels[i] - ((int)handPositionColor.Z - d1)) * 250) / 200;


                                                        uint xs = (uint)((x - xref) / 10);
                                                        uint ys = (uint)((y - yref) / 10);

                                                        // we only add the max or the first, is first when is 255
                                                        if ((uint)deep > actualPhoto[ys, xs] || actualPhoto[ys, xs] == 255)
                                                        {
                                                            actualPhoto[ys, xs] = deep;
                                                        }
                                                    }
                                                }
                                            }
                                        }
                                    }//unsafe
                                }
                            }
                        }

                        //add to queues
                        images.Add(actualPhoto);
                        bodyData.Add(new BodyData()
                        {
                            HandConfidence = handConf,
                            HandPosition   = HandPositionDepth,
                            ElvowPosition  = ElbowPositionDepth
                        });
                    }//while (running)
                }
        }
        private async void CameraImage()
        {
            try {
                device = Device.Open();

                DeviceConfiguration config = new DeviceConfiguration();

                config.ColorFormat            = ImageFormat.ColorBGRA32;
                config.ColorResolution        = ColorResolution.R1080p;
                config.DepthMode              = DepthMode.WFOV_2x2Binned;
                config.SynchronizedImagesOnly = true;
                config.CameraFPS              = FPS.FPS30;

                device.StartCameras(config);

                int colorWidth  = device.GetCalibration().ColorCameraCalibration.ResolutionWidth;
                int colorHeight = device.GetCalibration().ColorCameraCalibration.ResolutionHeight;

                using (Microsoft.Azure.Kinect.Sensor.Image transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image(ImageFormat.Depth16, colorWidth, colorHeight))
                    using (Microsoft.Azure.Kinect.Sensor.Image outputColorImage = new Microsoft.Azure.Kinect.Sensor.Image(ImageFormat.ColorBGRA32, colorWidth, colorHeight))
                        using (Transformation transform = device.GetCalibration().CreateTransformation())

                            while (running)
                            {
                                using (Capture capture = await Task.Run(() => { return(this.device.GetCapture()); }).ConfigureAwait(true))
                                {
                                    Task <BitmapSource> createInputColorBitmapTask = Task.Run(() =>
                                    {
                                        Microsoft.Azure.Kinect.Sensor.Image color = capture.Color;
                                        BitmapSource source = BitmapSource.Create(color.WidthPixels, color.HeightPixels, 96, 96, PixelFormats.Bgra32, null, color.Memory.ToArray(), color.StrideBytes);

                                        source.Freeze();
                                        return(source);
                                    });
                                    Task <BitmapSource> createOutputColorBitmapTask = Task.Run(() =>
                                    {
                                        transform.DepthImageToColorCamera(capture, transformedDepth);

                                        Span <ushort> depthBuffer     = transformedDepth.GetPixels <ushort>().Span;
                                        Span <BGRA> colorBuffer       = capture.Color.GetPixels <BGRA>().Span;
                                        Span <BGRA> outputColorBuffer = outputColorImage.GetPixels <BGRA>().Span;

                                        for (int i = 0; i < colorBuffer.Length; i++)
                                        {
                                            outputColorBuffer[i] = colorBuffer[i];

                                            if (depthBuffer[i] == 0)
                                            {
                                                outputColorBuffer[i].R = 200;
                                                outputColorBuffer[i].G = 200;
                                                outputColorBuffer[i].B = 200;
                                            }
                                            else if (depthBuffer[i] > 10 && depthBuffer[i] <= 500)
                                            {
                                                outputColorBuffer[i].R = 100;
                                                outputColorBuffer[i].G = 100;
                                                outputColorBuffer[i].B = 100;
                                            }
                                            else if (depthBuffer[i] > 500)
                                            {
                                                outputColorBuffer[i].R = 10;
                                                outputColorBuffer[i].G = 10;
                                                outputColorBuffer[i].B = 10;
                                            }
                                        }

                                        BitmapSource source = BitmapSource.Create(outputColorImage.WidthPixels, outputColorImage.HeightPixels, 96, 96, PixelFormats.Bgra32, null, outputColorImage.Memory.ToArray(), outputColorImage.StrideBytes);

                                        source.Freeze();
                                        return(source);
                                    });

                                    BitmapSource inputColorBitmap = await createInputColorBitmapTask.ConfigureAwait(true);

                                    BitmapSource outputColorBitmap = await createOutputColorBitmapTask.ConfigureAwait(true);

                                    if (!depthmapActivated)
                                    {
                                        this.displayImg.Source = inputColorBitmap;
                                    }
                                    else
                                    {
                                        this.displayImg.Source = outputColorBitmap;
                                    }
                                }
                            }

                device.Dispose();
            } catch (Exception ex)
            {
                Console.WriteLine(ex);
            }
        }