Esempio n. 1
0
        private void ReadingLoop_CaptureReady(object sender, CaptureReadyEventArgs e)
        {
            using (var colorImage = e.Capture.ColorImage)
            {
                var was = colorImageVisualizer?.Update(colorImage);
                UpdateFpsIfNeeded(was, colorFps, nameof(ColorFps));
            }

            using (var depthImage = e.Capture.DepthImage)
            {
                var was = depthImageVisualizer?.Update(depthImage);
                UpdateFpsIfNeeded(was, depthFps, nameof(DepthFps));

                if (depthImage != null && transformation != null && depthOverColorImage != null && depthOverColorImageVisualizer != null)
                {
                    // Object can be disposed from different thread
                    // As a result depthOverColorImage may be disposed while we're working with it
                    // To protect from such scenario, keep reference to it
                    using (var depthOverColorImageRef = depthOverColorImage.DuplicateReference())
                    {
                        transformation.DepthImageToColorCamera(depthImage, depthOverColorImageRef);
                        depthOverColorImageVisualizer?.Update(depthOverColorImageRef);
                    }
                }
            }

            using (var irImage = e.Capture.IRImage)
            {
                var was = irImageVisualizer?.Update(irImage);
                UpdateFpsIfNeeded(was, irFps, nameof(IRFps));
            }
        }
Esempio n. 2
0
        public void ProcessCameraFrame()
        {
            if (_IsCameraStarted)
            {
                Capture capture = _KinectSensor.GetCapture();

                if (capture.Color != null)
                {
                    _RawColorImage         = capture.Color.Memory.ToArray();
                    _TransformedColorImage = _Transformation.ColorImageToDepthCamera(capture).Memory.ToArray();
                }

                if (capture.Depth != null)
                {
                    Image depthImage            = capture.Depth;
                    Image transformedDepthImage = _Transformation.DepthImageToColorCamera(capture);

                    _RawDepthImage         = depthImage.GetPixels <short>().ToArray();
                    _TransformedDepthImage = transformedDepthImage.GetPixels <short>().ToArray();

                    _PointCloud = _Transformation.DepthImageToPointCloud(depthImage)
                                  .GetPixels <Short3>().ToArray();
                }

                _ImuSample = _KinectSensor.GetImuSample();

                capture.Dispose();
            }
        }
    private async Task KinectLoop()
    {
        while (true)
        {
            using (Capture capture = await Task.Run(() => kinect.GetCapture()).ConfigureAwait(true)) {
                //Getting depth and color information
                Image depthImage = transformation.DepthImageToColorCamera(capture.Depth);
                Image colorImage = capture.Color;

                BGRA[] colorArray = colorImage.GetPixels <BGRA>().ToArray();

                Image    xyzImage = transformation.DepthImageToPointCloud(capture.Depth);
                Short3[] xyzArray = xyzImage.GetPixels <Short3>().ToArray();

                for (int i = 0; i < num; i++)
                {
                    vertices[i].x = xyzArray[i].X * 0.001f;
                    vertices[i].y = -xyzArray[i].Y * 0.001f;//reverse
                    vertices[i].z = xyzArray[i].Z * 0.001f;

                    float norm = xyzArray[i].Z * 0.001f / 10.0f;

                    byte depth = 0;

                    if (norm >= foregroundBoundry && norm <= backgroundBoundry)
                    {
                        depth = (byte)(255 - (norm * 255));
                    }

                    depthValues[i].g = depth;
                    depthValues[i].r = depth;
                    depthValues[i].b = depth;
                    depthValues[i].a = 255;

                    colors[i].b = colorArray[i].B;
                    colors[i].g = colorArray[i].G;
                    colors[i].r = colorArray[i].R;
                    colors[i].a = 255;
                }

                //set pixels for textures ans apply changes
                depthTexture.SetPixels32(depthValues);
                depthTexture.Apply(true, false);

                colorTexture.SetPixels32(colors);
                colorTexture.Apply(true, false);

                // set main textures of materials
                depthMat.mainTexture = depthTexture;
                colorMat.mainTexture = colorTexture;

                //mesh.vertices = vertices;
                //mesh.colors32 = colors;
                //mesh.RecalculateBounds();
            }
        }
    }
Esempio n. 4
0
    private async Task CameraLoop(Device device)
    {
        while (running)
        {
            if (collectCameraData)
            {
                using (Capture capture = await Task.Run(() => device.GetCapture()).ConfigureAwait(true))
                {
                    switch (transformationMode)
                    {
                        case TransformationMode.ColorToDepth:
                            finalColor = transformation.ColorImageToDepthCamera(capture);
                            finalDepth = capture.Depth;
                            break;
                        case TransformationMode.DepthToColor:
                            finalColor = capture.Color;
                            finalDepth = transformation.DepthImageToColorCamera(capture);
                            break;
                        case TransformationMode.None:
                            finalColor = capture.Color;
                            finalDepth = capture.Depth;
                            break;
                    }

                    processor.matrixSize = new Vector3Int((int)(finalColor.WidthPixels * volumeScale.x), (int)(finalColor.HeightPixels * volumeScale.y), (int)((depthRanges[(int)device.CurrentDepthMode].y - depthRanges[(int)device.CurrentDepthMode].x) / 11 * volumeScale.z));

                    if (processor.colorTexture == null)
                    {
                        processor.colorTexture = new Texture2D(finalColor.WidthPixels, finalColor.HeightPixels, TextureFormat.BGRA32, false);
                        colorData = new byte[finalColor.Memory.Length];
                    }

                    if (processor.depthTexture == null)
                    {
                        processor.depthTexture = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false);
                        processor.oldDepthTexture = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false);
                        depthData = new byte[finalDepth.Memory.Length];
                    }

                    colorData = finalColor.Memory.ToArray();
                    processor.colorTexture.LoadRawTextureData(colorData);
                    processor.colorTexture.Apply();

                    depthData = finalDepth.Memory.ToArray();
                    processor.depthTexture.LoadRawTextureData(depthData);
                    processor.depthTexture.Apply();
                    processor.ProcessKinectData();
                    Graphics.CopyTexture(processor.depthTexture, processor.oldDepthTexture);
                }
            }
            else
            {
                await Task.Run(() => { });
            }
        }
    }
    private async Task CameraLoop(Device device)
    {
        Material matt = mesh.material;

        while (running)
        {
            if (collectCameraData)
            {
                using (Capture capture = await Task.Run(() => device.GetCapture()).ConfigureAwait(true))
                {
                    switch (transformationMode)
                    {
                    case TransformationMode.ColorToDepth:
                        finalColor = transformation.ColorImageToDepthCamera(capture);
                        finalDepth = capture.Depth;
                        break;

                    case TransformationMode.DepthToColor:
                        finalColor = capture.Color;
                        finalDepth = transformation.DepthImageToColorCamera(capture);
                        break;

                    case TransformationMode.None:
                        finalColor = capture.Color;
                        finalDepth = capture.Depth;
                        break;
                    }


                    if (depthTexture == null)
                    {
                        depthTexture = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false);
                        depthData    = new byte[finalDepth.Memory.Length];
                        print("Made Depth Texture");
                    }

                    depthData = finalDepth.Memory.ToArray();
                    depthTexture.LoadRawTextureData(depthData);
                    depthTexture.Apply();

                    matt.SetTexture("_MainTex", depthTexture);
                }
            }
            else
            {
                await Task.Run(() => { });
            }
        }
    }
    void BuildDepthImageSource(Capture capture)
    {
        if (capture.Color == null || capture.Depth == null)
        {
            return;
        }

        transformedDepth = new Image(ImageFormat.Depth16, colourWidth, colourHeight, colourWidth * sizeof(ushort));

        transformation.DepthImageToColorCamera(capture, transformedDepth);
        depthPixels = transformedDepth.GetPixels <ushort>().ToArray();
        //depthPixels = capture.CreateDepthImage(transformation);
        depthWidth  = transformedDepth.WidthPixels;
        depthHeight = transformedDepth.HeightPixels;

        transformedDepth.Dispose();
    }
Esempio n. 7
0
    private unsafe void Capture()
    {
        Capture capture = _device.GetCapture();

        _depthTransformer.DepthImageToColorCamera(capture, _transformedDepth);

        var colorPin = capture.Color.Memory.Pin();
        var depthPin = _transformedDepth.Memory.Pin();

        var colorInput = NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray <BGRA>(colorPin.Pointer, _colorDims.x * _colorDims.y, Allocator.None);
        var depthInput = NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray <ushort>(depthPin.Pointer, _colorDims.x * _colorDims.y, Allocator.None);

#if ENABLE_UNITY_COLLECTIONS_CHECKS
        var colorSafetyHandle = AtomicSafetyHandle.Create();
        var depthSafetyHandle = AtomicSafetyHandle.Create();
        NativeArrayUnsafeUtility.SetAtomicSafetyHandle(ref colorInput, colorSafetyHandle);
        NativeArrayUnsafeUtility.SetAtomicSafetyHandle(ref depthInput, depthSafetyHandle);
#endif

        var colorOutput = _colorTex.GetRawTextureData <Color32>();
        var depthOutput = _depthTex.GetRawTextureData <ushort>();

        var job = new ConvertColorDataJob
        {
            dims     = _colorDims,
            colorIn  = colorInput,
            depthIn  = depthInput,
            depthOut = depthOutput,
            colorOut = colorOutput
        };
        job.Schedule(colorInput.Length, 64).Complete();

        _colorTex.Apply(true);
        _depthTex.Apply(true);

#if ENABLE_UNITY_COLLECTIONS_CHECKS
        AtomicSafetyHandle.Release(colorSafetyHandle);
        AtomicSafetyHandle.Release(depthSafetyHandle);
#endif

        colorPin.Dispose();
        depthPin.Dispose();
    }
Esempio n. 8
0
    public static Color[] CreateDepthImage(this Capture capture, Transformation transformation)
    {
        Image img          = capture.Color;
        int   colourWidth  = img.WidthPixels;
        int   colourHeight = img.HeightPixels;

        using (Image transformedDepth = new Image(ImageFormat.Depth16, colourWidth, colourHeight, colourWidth * sizeof(ushort)))
        {
            // Transform the depth image to the colour capera perspective.
            transformation.DepthImageToColorCamera(capture, transformedDepth);

            // Get the transformed pixels (colour camera perspective but depth pixels).
            Span <ushort> depthBuffer = transformedDepth.GetPixels <ushort>().Span;

            Color[] depthPixels = img.CreateColourMap();

            // Create a new image with data from the depth and colour image.
            for (int i = 0; i < depthPixels.Length; i++)
            {
                // We'll use the colour image if the depth is less than 1 metre.
                var depth = depthBuffer[i];

                if (depth == 0 || depth >= 2000) // No depth image.
                {
                    depthPixels[i].r = 0;
                    depthPixels[i].g = 0;
                    depthPixels[i].b = 0;
                }

                depthPixels[i].r = GetDepthColor(depth, RED_MAX_VALUE_LOWER, RED_MAX_VALUE_UPPER);
                depthPixels[i].b = GetDepthColor(depth, Blue_MAX_VALUE, Blue_MAX_VALUE);
                depthPixels[i].g = GetDepthColor(depth, GREEN_MAX_VALUE_LOWER, GREEN_MAX_VALUE_UPPER);
            }

            return(depthPixels);
        }
        public Image ToColor(Image depthMap, Image bodyIndexMap, int bodyCount)
        {
            if (bodyIndexMap == null || depthMap == null)
            {
                return(null);
            }

            // Object can be disposed from different thread, thus it is worth to keep references to images while we're working with their buffers
            using (var maskedDepthMapRef = maskedDepthMap.DuplicateReference())
                using (var transformedMaskedDepthMapRef = transformedMaskedDepthMap.DuplicateReference())
                    using (var transformedBodyIndexMapRef = transformedBodyIndexMap.DuplicateReference())
                    {
                        ClearTransformedBodyIndex(transformedBodyIndexMapRef);

                        for (var bodyIndex = 0; bodyIndex < bodyCount; bodyIndex++)
                        {
                            ApplyBodyMask(depthMap, bodyIndexMap, bodyIndex, maskedDepthMapRef);
                            transformation.DepthImageToColorCamera(maskedDepthMapRef, transformedMaskedDepthMapRef);
                            FillBodyIndexMapFromMaskedDepth(transformedMaskedDepthMapRef, bodyIndex, transformedBodyIndexMapRef);
                        }

                        return(transformedBodyIndexMapRef.DuplicateReference());
                    }
        }
Esempio n. 10
0
        private async void Window_Loaded(object sender, RoutedEventArgs e)
        {
            using (Device device = Device.Open(0))
            {
                device.StartCameras(new DeviceConfiguration
                {
                    ColorFormat            = ImageFormat.ColorBGRA32,
                    ColorResolution        = ColorResolution.R1440p,
                    DepthMode              = DepthMode.WFOV_2x2Binned,
                    SynchronizedImagesOnly = true,
                    CameraFPS              = FPS.FPS30,
                });

                int colorWidth  = device.GetCalibration().ColorCameraCalibration.ResolutionWidth;
                int colorHeight = device.GetCalibration().ColorCameraCalibration.ResolutionHeight;

                Stopwatch sw         = new Stopwatch();
                int       frameCount = 0;
                sw.Start();

                // Allocate image buffers for us to manipulate
                using (Image transformedDepth = new Image(ImageFormat.Depth16, colorWidth, colorHeight))
                    using (Image outputColorImage = new Image(ImageFormat.ColorBGRA32, colorWidth, colorHeight))
                        using (Transformation transform = device.GetCalibration().CreateTransformation())
                        {
                            while (this.running)
                            {
                                if (!Environment.Is64BitProcess)
                                {
                                    // In 32-bit the BitmapSource memory runs out quickly and we can hit OutOfMemoryException.
                                    // Force garbage collection in each loop iteration to keep memory in check.
                                    GC.Collect();
                                }

                                // Wait for a capture on a thread pool thread
                                using (Capture capture = await Task.Run(() => { return(device.GetCapture()); }).ConfigureAwait(true))
                                {
                                    // Create a BitmapSource for the unmodified color image.
                                    // Creating the BitmapSource is slow, so do it asynchronously on another thread
                                    Task <BitmapSource> createInputColorBitmapTask = Task.Run(() =>
                                    {
                                        BitmapSource source = capture.Color.CreateBitmapSource();

                                        // Allow the bitmap to move threads
                                        source.Freeze();
                                        return(source);
                                    });

                                    // Compute the colorized output bitmap on a thread pool thread
                                    Task <BitmapSource> createOutputColorBitmapTask = Task.Run(() =>
                                    {
                                        // Transform the depth image to the perspective of the color camera
                                        transform.DepthImageToColorCamera(capture, transformedDepth);

                                        // Get Span<T> references to the pixel buffers for fast pixel access.
                                        Span <ushort> depthBuffer     = transformedDepth.GetPixels <ushort>().Span;
                                        Span <BGRA> colorBuffer       = capture.Color.GetPixels <BGRA>().Span;
                                        Span <BGRA> outputColorBuffer = outputColorImage.GetPixels <BGRA>().Span;

                                        // Create an output color image with data from the depth image
                                        for (int i = 0; i < colorBuffer.Length; i++)
                                        {
                                            // The output image will be the same as the input color image,
                                            // but colorized with Red where there is no depth data, and Green
                                            // where there is depth data at more than 1.5 meters
                                            outputColorBuffer[i] = colorBuffer[i];

                                            if (depthBuffer[i] == 0)
                                            {
                                                outputColorBuffer[i].R = 255;
                                            }
                                            else if (depthBuffer[i] > 1500)
                                            {
                                                outputColorBuffer[i].G = 255;
                                            }
                                        }

                                        BitmapSource source = outputColorImage.CreateBitmapSource();

                                        // Allow the bitmap to move threads
                                        source.Freeze();

                                        return(source);
                                    });

                                    // Wait for both bitmaps to be ready and assign them.
                                    BitmapSource inputColorBitmap = await createInputColorBitmapTask.ConfigureAwait(true);

                                    BitmapSource outputColorBitmap = await createOutputColorBitmapTask.ConfigureAwait(true);

                                    this.inputColorImageViewPane.Source  = inputColorBitmap;
                                    this.outputColorImageViewPane.Source = outputColorBitmap;

                                    ++frameCount;

                                    if (sw.Elapsed > TimeSpan.FromSeconds(2))
                                    {
                                        double framesPerSecond = (double)frameCount / sw.Elapsed.TotalSeconds;
                                        this.fps.Content = $"{framesPerSecond:F2} FPS";

                                        frameCount = 0;
                                        sw.Restart();
                                    }
                                }
                            }
                        }
            }
        }
Esempio n. 11
0
    private unsafe void Capture()
    {
        Capture capture = _device.GetCapture();

        // _tracker.EnqueueCapture(capture);

        // // Todo: oh my god thread this
        // Frame frame = _tracker.PopResult(); // System.TimeSpan.FromMilliseconds(4d)
        // if (frame == null) {
        //     Debug.LogWarningFormat("Unable to get BodyTracking frame");
        //     return;
        // }

        // _numBodies = frame.NumberOfBodies;

        var palette = new NativeArray <Color32>(3, Allocator.TempJob);

        palette[0] = new Color32(255, 0, 0, 255);
        palette[1] = new Color32(0, 255, 0, 255);
        palette[2] = new Color32(0, 0, 255, 255);

        // if (_numBodies > 0) {
        //     uint bodyId = frame.GetBodyId(0);
        //     _skeleton = frame.GetBodySkeleton(0);
        // }

        // _depthTransformer.DepthImageToColorCameraCustom(
        //     capture.Depth,
        //     frame.BodyIndexMap,
        //     _transformedDepth,
        //     _transformedSegment);

        _depthTransformer.DepthImageToColorCamera(
            capture.Depth,
            _transformedDepth);

        var colorPin     = capture.Color.Memory.Pin();
        var depthPin     = _transformedDepth.Memory.Pin();
        var bodyMapPin   = _transformedSegment.Memory.Pin();
        var colorInput   = NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray <BGRA>(colorPin.Pointer, _colorDims.x * _colorDims.y, Allocator.None);
        var depthInput   = NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray <System.UInt16>(depthPin.Pointer, _colorDims.x * _colorDims.y, Allocator.None);
        var bodyMapInput = NativeArrayUnsafeUtility.ConvertExistingDataToNativeArray <byte>(bodyMapPin.Pointer, _colorDims.x * _colorDims.y, Allocator.None);

#if ENABLE_UNITY_COLLECTIONS_CHECKS
        var colorSafetyHandle   = AtomicSafetyHandle.Create();
        var depthSafetyHandle   = AtomicSafetyHandle.Create();
        var bodyMapSafetyHandle = AtomicSafetyHandle.Create();
        NativeArrayUnsafeUtility.SetAtomicSafetyHandle(ref bodyMapInput, bodyMapSafetyHandle);
        NativeArrayUnsafeUtility.SetAtomicSafetyHandle(ref depthInput, depthSafetyHandle);
        NativeArrayUnsafeUtility.SetAtomicSafetyHandle(ref colorInput, colorSafetyHandle);
#endif

        var colorOutput   = _colorTex.GetRawTextureData <Color32>();
        var segmentOutput = _segmentTex.GetRawTextureData <Color32>();

        var job = new ConvertSegmentMapToColorPreviewJob
        {
            dims            = _colorDims,
            backgroundIndex = Frame.BodyIndexMapBackground,
            bodyPalette     = palette,
            depthIn         = depthInput,
            colorIn         = colorInput,
            segmentIn       = bodyMapInput,
            depthOut        = _depth,
            segmentOut      = segmentOutput,
            colorOut        = colorOutput
        };
        job.Schedule().Complete();

        _depth.CopyTo(_depthTex.GetRawTextureData <float>());

        _depthTex.Apply(true);
        _segmentTex.Apply(true);
        _colorTex.Apply(true);

#if ENABLE_UNITY_COLLECTIONS_CHECKS
        AtomicSafetyHandle.Release(depthSafetyHandle);
        AtomicSafetyHandle.Release(bodyMapSafetyHandle);
        AtomicSafetyHandle.Release(colorSafetyHandle);
#endif

        depthPin.Dispose();
        colorPin.Dispose();
        bodyMapPin.Dispose();

        // frame.Dispose();
        palette.Dispose();
    }
Esempio n. 12
0
        //Show initial image
        private async Task <BitmapSource> ShowInitImage(Capture capture, Transformation transform)
        {
            BitmapSource initImageBitmap = null;
            // Create a BitmapSource for the unmodified color image.
            // Creating the BitmapSource is slow, so do it asynchronously on another thread
            Task <BitmapSource> createColorBitmapTask = new Task <BitmapSource>(() =>
            {
                BitmapSource source = capture.Color.CreateBitmapSource();

                // Allow the bitmap to move threads
                source.Freeze();
                return(source);
            });

            // Compute the colorized output bitmap on a thread pool thread
            Task <BitmapSource> createDepthBitmapTask = new Task <BitmapSource>(() =>
            {
                int colorWidth  = this.KinectDevice.GetCalibration().ColorCameraCalibration.ResolutionWidth;
                int colorHeight = this.KinectDevice.GetCalibration().ColorCameraCalibration.ResolutionHeight;
                // Allocate image buffers for us to manipulate
                var transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image(ImageFormat.Depth16, colorWidth, colorHeight);
                var outputColorImage = new Microsoft.Azure.Kinect.Sensor.Image(ImageFormat.ColorBGRA32, colorWidth, colorHeight);
                // Transform the depth image to the perspective of the color camera
                transform.DepthImageToColorCamera(capture, transformedDepth);

                // Get Span<T> references to the pixel buffers for fast pixel access.
                Span <ushort> depthBuffer     = transformedDepth.GetPixels <ushort>().Span;
                Span <BGRA> colorBuffer       = capture.Color.GetPixels <BGRA>().Span;
                Span <BGRA> outputColorBuffer = outputColorImage.GetPixels <BGRA>().Span;

                // Create an output color image with data from the depth image
                for (int i = 0; i < colorBuffer.Length; i++)
                {
                    // The output image will be the same as the input color image,
                    // but colorized with Red where there is no depth data, and Green
                    // where there is depth data at more than 1.5 meters
                    outputColorBuffer[i] = colorBuffer[i];

                    if (depthBuffer[i] == 0)
                    {
                        outputColorBuffer[i].R = 255;
                    }
                    else if (depthBuffer[i] > 1500)
                    {
                        outputColorBuffer[i].G = 255;
                    }
                }

                BitmapSource source = outputColorImage.CreateBitmapSource();

                // Allow the bitmap to move threads
                source.Freeze();

                return(source);
            });

            if (this.sysState.DeviceDepthMode)
            {
                createDepthBitmapTask.Start();
                initImageBitmap = await createDepthBitmapTask.ConfigureAwait(false);
            }
            else
            {
                createColorBitmapTask.Start();
                initImageBitmap = await createColorBitmapTask.ConfigureAwait(false);
            }

            return(initImageBitmap);
        }
    private async Task CameraLoop(Device device)
    {
        Material matt = mesh.material;

        while (running)
        {
            if (collectCameraData)
            {
                using (Capture capture = await Task.Run(() => device.GetCapture()).ConfigureAwait(true))
                {
                    switch (kinectSettings.transformationMode)
                    {
                    case TransformationMode.ColorToDepth:
                        finalColor = transformation.ColorImageToDepthCamera(capture);
                        finalDepth = capture.Depth;
                        break;

                    case TransformationMode.DepthToColor:
                        finalColor = capture.Color;
                        finalDepth = transformation.DepthImageToColorCamera(capture);
                        break;

                    case TransformationMode.None:
                        finalColor = capture.Color;
                        finalDepth = capture.Depth;
                        break;
                    }

                    if (volumeBuffer == null)
                    {
                        matrixSize   = new Vector3Int((int)(finalColor.WidthPixels * kinectSettings.volumeScale.x), (int)(finalColor.HeightPixels * kinectSettings.volumeScale.y), (int)((KinectUtilities.depthRanges[(int)device.CurrentDepthMode].y - KinectUtilities.depthRanges[(int)device.CurrentDepthMode].x) / 11 * kinectSettings.volumeScale.z));
                        volumeBuffer = new ComputeBuffer(matrixSize.x * matrixSize.y * matrixSize.z, 4 * sizeof(float), ComputeBufferType.Default);
                        //print("Made Volume Buffer || Matrix Size: " + matrixSize);
                        extractedVolumeBuffer = new float[matrixSize.x * matrixSize.y * matrixSize.z * 4];
                        extractedVolumeBytes  = new byte[matrixSize.x * matrixSize.y * matrixSize.z * 4 * 4];
                    }

                    if (colorTexture == null)
                    {
                        colorTexture = new Texture2D(finalColor.WidthPixels, finalColor.HeightPixels, TextureFormat.BGRA32, false);
                        colorData    = new byte[finalColor.Memory.Length];
                        //print("Made Color Texture");
                    }

                    if (depthTexture == null)
                    {
                        depthTexture    = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false);
                        oldDepthTexture = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false);
                        depthData       = new byte[finalDepth.Memory.Length];
                        //print("Made Depth Texture");
                    }

                    colorData = finalColor.Memory.ToArray();
                    colorTexture.LoadRawTextureData(colorData);
                    colorTexture.Apply();

                    depthData = finalDepth.Memory.ToArray();
                    depthTexture.LoadRawTextureData(depthData);
                    depthTexture.Apply();

                    configureComputeShader();

                    kinectProcessingShader.Dispatch(computeShaderKernelIndex, matrixSize.x / 16, matrixSize.y / 16, 1);

                    // Get the volume buffer data as a byte array
                    volumeBuffer.GetData(extractedVolumeBytes);

                    // TODO: Test which is faster, or if a dedicated thread would be best
                    //Option 1: Use the UserWorkItem Threadpool to manage thread for me
                    ThreadPool.QueueUserWorkItem((state) => Postprocess((Byte[])state), extractedVolumeBytes);

                    //Option 2: Spawn a thread for each frame
                    //new Thread(() => Postprocess(extractedVolumeBytes)).Start();

                    if (compressedBytes == 0)
                    {
                        byte[] compressedArray = CompressData(extractedVolumeBytes);
                        compressedBytes     = compressedArray.Length;
                        maxRecordingSeconds = (maxFileSizeMb * 1000 * 1000) / (compressedBytes * KinectUtilities.FPStoInt(kinectSettings.fps));
                    }

                    matt.SetBuffer("colors", volumeBuffer);
                    matt.SetInt("_MatrixX", matrixSize.x);
                    matt.SetInt("_MatrixY", matrixSize.y);
                    matt.SetInt("_MatrixZ", matrixSize.z);

                    Graphics.CopyTexture(depthTexture, oldDepthTexture);
                }
            }
            else
            {
                await Task.Run(() => { });
            }
        }
    }
    private async Task CameraLoop(Device device)
    {
        Material matt = mesh.material;

        while (running)
        {
            if (collectCameraData)
            {
                using (Capture capture = await Task.Run(() => device.GetCapture()).ConfigureAwait(true))
                {
                    switch (transformationMode)
                    {
                    case TransformationMode.ColorToDepth:
                        finalColor = transformation.ColorImageToDepthCamera(capture);
                        finalDepth = capture.Depth;
                        break;

                    case TransformationMode.DepthToColor:
                        finalColor = capture.Color;
                        finalDepth = transformation.DepthImageToColorCamera(capture);
                        break;

                    case TransformationMode.None:
                        finalColor = capture.Color;
                        finalDepth = capture.Depth;
                        break;
                    }

                    if (volumeBuffer == null)
                    {
                        matrixSize   = new Vector3Int((int)(finalColor.WidthPixels * volumeScale.x), (int)(finalColor.HeightPixels * volumeScale.y), (int)((depthRanges[(int)device.CurrentDepthMode].y - depthRanges[(int)device.CurrentDepthMode].x) / 11 * volumeScale.z));
                        volumeBuffer = new ComputeBuffer(matrixSize.x * matrixSize.y * matrixSize.z, 4 * sizeof(float), ComputeBufferType.Default);
                    }

                    if (colorTexture == null)
                    {
                        colorTexture = new Texture2D(finalColor.WidthPixels, finalColor.HeightPixels, TextureFormat.BGRA32, false);
                        colorData    = new byte[finalColor.Memory.Length];
                    }

                    if (depthTexture == null)
                    {
                        depthTexture    = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false);
                        oldDepthTexture = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false);
                        depthData       = new byte[finalDepth.Memory.Length];
                    }

                    colorData = finalColor.Memory.ToArray();
                    colorTexture.LoadRawTextureData(colorData);
                    colorTexture.Apply();

                    depthData = finalDepth.Memory.ToArray();
                    depthTexture.LoadRawTextureData(depthData);
                    depthTexture.Apply();

                    // Apply Buffer Updates
                    int kernelIndex = shader.FindKernel("ToBuffer");
                    shader.SetInt("_MatrixX", matrixSize.x);
                    shader.SetInt("_MatrixY", matrixSize.y);
                    shader.SetInt("_MatrixZ", matrixSize.z);

                    shader.SetTexture(kernelIndex, "ColorTex", colorTexture);
                    shader.SetTexture(kernelIndex, "DepthTex", depthTexture);
                    shader.SetTexture(kernelIndex, "oldDepthTexture", oldDepthTexture);
                    shader.SetBuffer(kernelIndex, "ResultBuffer", volumeBuffer);
                    shader.SetInt("minDepth", minDepthMM);
                    shader.SetInt("maxDepth", maxDepthMM);
                    shader.Dispatch(kernelIndex, matrixSize.x / 8, matrixSize.y / 8, matrixSize.z / 8);

                    matt.SetBuffer("colors", volumeBuffer);
                    matt.SetInt("_MatrixX", matrixSize.x);
                    matt.SetInt("_MatrixY", matrixSize.y);
                    matt.SetInt("_MatrixZ", matrixSize.z);

                    Graphics.CopyTexture(depthTexture, oldDepthTexture);
                }
            }
            else
            {
                await Task.Run(() => { });
            }
        }
    }
Esempio n. 15
0
        /// <summary>
        /// This will capture a Depth enabled Color Image and save it as <see cref="Bitmap"/>
        /// Picture will be colorized with Red where there is no depth data, and Green
        /// where there is depth data at more than 1.5 meters
        /// </summary>
        /// <param name="kinectSensor">The initialized Kinect Sensor object</param>
        /// <returns>Returns the Picture from the Color Camera as <see cref="Bitmap"/></returns>
        public static async Task <Bitmap> CreateDepthColorBitmapAsync(this Device kinectSensor)
        {
            try
            {
                //Declare calibration settings
                int colorWidth;
                int colorHeight;

                //Check for initialized Camera
                try
                {
                    colorWidth  = kinectSensor.GetCalibration().ColorCameraCalibration.ResolutionWidth;
                    colorHeight = kinectSensor.GetCalibration().ColorCameraCalibration.ResolutionHeight;
                }
                catch (Exception e)
                {
                    //Camera not initialized
                    throw new CameraNotInitializedException("The Camera was not initialized correctly.", e);
                }

                // Configure transformation module in order to transform depth enabled picture as bitmap
                using (Microsoft.Azure.Kinect.Sensor.Image transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image(ImageFormat.Depth16, colorWidth, colorHeight))
                    using (Microsoft.Azure.Kinect.Sensor.Image outputColorImage = new Microsoft.Azure.Kinect.Sensor.Image(ImageFormat.ColorBGRA32, colorWidth, colorHeight))
                        using (Transformation transform = kinectSensor.GetCalibration().CreateTransformation())
                        {
                            // Wait for a capture on a thread pool thread
                            using (Capture capture = await Task.Run(() => { return(kinectSensor.GetCapture()); }).ConfigureAwait(true))
                            {
                                // Compute the colorized output bitmap on a thread pool thread
                                Task <System.Drawing.Bitmap> createDepthColorBitmapTask = Task.Run(() =>
                                {
                                    // Transform the depth image to the perspective of the color camera
                                    transform.DepthImageToColorCamera(capture, transformedDepth);

                                    // Get Span<T> references to the pixel buffers for fast pixel access.
                                    Span <ushort> depthBuffer     = transformedDepth.GetPixels <ushort>().Span;
                                    Span <BGRA> colorBuffer       = capture.Color.GetPixels <BGRA>().Span;
                                    Span <BGRA> outputColorBuffer = outputColorImage.GetPixels <BGRA>().Span;

                                    // Create an output color image with data from the depth image
                                    for (int i = 0; i < colorBuffer.Length; i++)
                                    {
                                        // The output image will be the same as the input color image,
                                        // but colorized with Red where there is no depth data, and Green
                                        // where there is depth data at more than 1.5 meters
                                        outputColorBuffer[i] = colorBuffer[i];
                                        if (depthBuffer[i] == 0)
                                        {
                                            outputColorBuffer[i].R = 255;
                                        }
                                        else if (depthBuffer[i] > 1500)
                                        {
                                            outputColorBuffer[i].G = 255;
                                        }
                                    }

                                    return(outputColorImage.CreateBitmap());
                                });

                                // Wait for  bitmap and return
                                var depthColorBitmap = await createDepthColorBitmapTask.ConfigureAwait(true);

                                return(depthColorBitmap);
                            }
                        }
            }
            catch (Exception e)
            {
                throw e;
            }
        }
Esempio n. 16
0
    private async Task CameraLoop(Device device)
    {
        Material matt = mesh.material;

        while (running)
        {
            if (collectCameraData)
            {
                using (Capture capture = await Task.Run(() => device.GetCapture()).ConfigureAwait(true))
                {
                    switch (transformationMode)
                    {
                    case TransformationMode.ColorToDepth:
                        finalColor = transformation.ColorImageToDepthCamera(capture);
                        finalDepth = capture.Depth;
                        break;

                    case TransformationMode.DepthToColor:
                        finalColor = capture.Color;
                        finalDepth = transformation.DepthImageToColorCamera(capture);
                        break;

                    case TransformationMode.None:
                        finalColor = capture.Color;
                        finalDepth = capture.Depth;
                        break;
                    }

                    if (volumeTexture == null)
                    {
                        matrixSize    = new Vector3Int((int)(finalColor.WidthPixels * volumeScale.x), (int)(finalColor.HeightPixels * volumeScale.y), (int)((depthRanges[(int)device.CurrentDepthMode].y - depthRanges[(int)device.CurrentDepthMode].x) / 11 * volumeScale.z));
                        volumeTexture = new RenderTexture(matrixSize.x, matrixSize.y, 0, RenderTextureFormat.ARGB32);
                        volumeTexture.enableRandomWrite = true;
                        volumeTexture.dimension         = UnityEngine.Rendering.TextureDimension.Tex3D;
                        volumeTexture.volumeDepth       = matrixSize.z;
                        volumeTexture.Create();
                    }

                    if (colorTexture == null)
                    {
                        colorTexture = new Texture2D(finalColor.WidthPixels, finalColor.HeightPixels, TextureFormat.BGRA32, false);
                        colorData    = new byte[finalColor.Memory.Length];
                    }

                    if (depthTexture == null)
                    {
                        depthTexture    = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false);
                        oldDepthTexture = new Texture2D(finalDepth.WidthPixels, finalDepth.HeightPixels, TextureFormat.R16, false);
                        depthData       = new byte[finalDepth.Memory.Length];
                    }

                    colorData = finalColor.Memory.ToArray();
                    colorTexture.LoadRawTextureData(colorData);
                    colorTexture.Apply();

                    depthData = finalDepth.Memory.ToArray();
                    depthTexture.LoadRawTextureData(depthData);
                    depthTexture.Apply();

                    // Apply Texture Updates
                    int kernelIndex = shader.FindKernel("ToTexture");
                    shader.SetTexture(kernelIndex, "ColorTex", colorTexture);
                    shader.SetTexture(kernelIndex, "DepthTex", depthTexture);
                    shader.SetTexture(kernelIndex, "oldDepthTexture", oldDepthTexture);
                    shader.SetTexture(kernelIndex, "ResultTexture", volumeTexture);
                    shader.SetVector("_Size", new Vector4(matrixSize.x, matrixSize.y, matrixSize.z, 1));
                    shader.SetInt("minDepth", minDepthMM);
                    shader.SetInt("maxDepth", maxDepthMM);
                    shader.Dispatch(kernelIndex, matrixSize.x / 16, matrixSize.y / 16, 1);

                    matt.SetTexture("_Volume", volumeTexture);
                    Graphics.CopyTexture(depthTexture, oldDepthTexture);
                }
            }
            else
            {
                await Task.Run(() => { });
            }
        }
    }
Esempio n. 17
0
        // processes the camera frame
        private void ProcessCameraFrame(KinectInterop.SensorData sensorData, Capture capture)
        {
            // check for color & depth sync
            if (isSyncDepthAndColor && (capture.Color == null || capture.Depth == null))
            {
                return;
            }

            try
            {
                // color frame
                if (capture.Color != null && rawColorImage != null)
                {
                    if (kinectPlayback != null)
                    {
                        WaitForPlaybackTimestamp("color", capture.Color.DeviceTimestamp.Ticks);
                    }

                    lock (colorFrameLock)
                    {
                        capture.Color.CopyBytesTo(rawColorImage, 0, 0, rawColorImage.Length);

                        rawColorTimestamp = (ulong)capture.Color.DeviceTimestamp.Ticks;
                        colorFrameNumber  = currentFrameNumber;
                        //Debug.Log("RawColorTimestamp: " + rawColorTimestamp);
                    }
                }

                // depth frame
                if (capture.Depth != null && rawDepthImage != null)
                {
                    if (kinectPlayback != null)
                    {
                        WaitForPlaybackTimestamp("depth", capture.Depth.DeviceTimestamp.Ticks);
                    }

                    lock (depthFrameLock)
                    {
                        capture.Depth.CopyTo(rawDepthImage, 0, 0, rawDepthImage.Length);

                        rawDepthTimestamp = (ulong)capture.Depth.DeviceTimestamp.Ticks;
                        depthFrameNumber  = currentFrameNumber;
                        //Debug.Log("RawDepthTimestamp: " + rawDepthTimestamp);
                    }
                }

                // infrared frame
                if (capture.IR != null && rawInfraredImage != null)
                {
                    if (kinectPlayback != null)
                    {
                        WaitForPlaybackTimestamp("ir", capture.IR.DeviceTimestamp.Ticks);
                    }

                    lock (infraredFrameLock)
                    {
                        capture.IR.CopyTo(rawInfraredImage, 0, 0, rawInfraredImage.Length);

                        rawInfraredTimestamp = (ulong)capture.IR.DeviceTimestamp.Ticks;
                        infraredFrameNumber  = currentFrameNumber;
                        //Debug.Log("RawInfraredTimestamp: " + rawInfraredTimestamp);
                    }
                }

                // transformation data frames
                if ((depth2ColorDataFrame != null || color2DepthDataFrame != null) && capture.Color != null && capture.Depth != null)
                {
                    if (coordMapperTransform == null)
                    {
                        coordMapperTransform = coordMapper.CreateTransformation();
                    }

                    if (depth2ColorDataFrame != null)
                    {
                        lock (depth2ColorFrameLock)
                        {
                            using (Image d2cColorData = coordMapperTransform.ColorImageToDepthCamera(capture))
                            {
                                d2cColorData.CopyTo <byte>(depth2ColorDataFrame, 0, 0, depth2ColorDataFrame.Length);
                                lastDepth2ColorFrameTime = (ulong)capture.Depth.DeviceTimestamp.Ticks;
                            }
                        }
                    }

                    if (color2DepthDataFrame != null)
                    {
                        lock (color2DepthFrameLock)
                        {
                            using (Image c2dDepthData = coordMapperTransform.DepthImageToColorCamera(capture))
                            {
                                c2dDepthData.CopyTo <ushort>(color2DepthDataFrame, 0, 0, color2DepthDataFrame.Length);
                                lastColor2DepthFrameTime = (ulong)capture.Color.DeviceTimestamp.Ticks;
                            }
                        }
                    }
                }
            }
            catch (System.Exception ex)
            {
                Debug.LogException(ex);
            }
        }
        private async void CameraImage()
        {
            try {
                device = Device.Open();

                DeviceConfiguration config = new DeviceConfiguration();

                config.ColorFormat            = ImageFormat.ColorBGRA32;
                config.ColorResolution        = ColorResolution.R1080p;
                config.DepthMode              = DepthMode.WFOV_2x2Binned;
                config.SynchronizedImagesOnly = true;
                config.CameraFPS              = FPS.FPS30;

                device.StartCameras(config);

                int colorWidth  = device.GetCalibration().ColorCameraCalibration.ResolutionWidth;
                int colorHeight = device.GetCalibration().ColorCameraCalibration.ResolutionHeight;

                using (Microsoft.Azure.Kinect.Sensor.Image transformedDepth = new Microsoft.Azure.Kinect.Sensor.Image(ImageFormat.Depth16, colorWidth, colorHeight))
                    using (Microsoft.Azure.Kinect.Sensor.Image outputColorImage = new Microsoft.Azure.Kinect.Sensor.Image(ImageFormat.ColorBGRA32, colorWidth, colorHeight))
                        using (Transformation transform = device.GetCalibration().CreateTransformation())

                            while (running)
                            {
                                using (Capture capture = await Task.Run(() => { return(this.device.GetCapture()); }).ConfigureAwait(true))
                                {
                                    Task <BitmapSource> createInputColorBitmapTask = Task.Run(() =>
                                    {
                                        Microsoft.Azure.Kinect.Sensor.Image color = capture.Color;
                                        BitmapSource source = BitmapSource.Create(color.WidthPixels, color.HeightPixels, 96, 96, PixelFormats.Bgra32, null, color.Memory.ToArray(), color.StrideBytes);

                                        source.Freeze();
                                        return(source);
                                    });
                                    Task <BitmapSource> createOutputColorBitmapTask = Task.Run(() =>
                                    {
                                        transform.DepthImageToColorCamera(capture, transformedDepth);

                                        Span <ushort> depthBuffer     = transformedDepth.GetPixels <ushort>().Span;
                                        Span <BGRA> colorBuffer       = capture.Color.GetPixels <BGRA>().Span;
                                        Span <BGRA> outputColorBuffer = outputColorImage.GetPixels <BGRA>().Span;

                                        for (int i = 0; i < colorBuffer.Length; i++)
                                        {
                                            outputColorBuffer[i] = colorBuffer[i];

                                            if (depthBuffer[i] == 0)
                                            {
                                                outputColorBuffer[i].R = 200;
                                                outputColorBuffer[i].G = 200;
                                                outputColorBuffer[i].B = 200;
                                            }
                                            else if (depthBuffer[i] > 10 && depthBuffer[i] <= 500)
                                            {
                                                outputColorBuffer[i].R = 100;
                                                outputColorBuffer[i].G = 100;
                                                outputColorBuffer[i].B = 100;
                                            }
                                            else if (depthBuffer[i] > 500)
                                            {
                                                outputColorBuffer[i].R = 10;
                                                outputColorBuffer[i].G = 10;
                                                outputColorBuffer[i].B = 10;
                                            }
                                        }

                                        BitmapSource source = BitmapSource.Create(outputColorImage.WidthPixels, outputColorImage.HeightPixels, 96, 96, PixelFormats.Bgra32, null, outputColorImage.Memory.ToArray(), outputColorImage.StrideBytes);

                                        source.Freeze();
                                        return(source);
                                    });

                                    BitmapSource inputColorBitmap = await createInputColorBitmapTask.ConfigureAwait(true);

                                    BitmapSource outputColorBitmap = await createOutputColorBitmapTask.ConfigureAwait(true);

                                    if (!depthmapActivated)
                                    {
                                        this.displayImg.Source = inputColorBitmap;
                                    }
                                    else
                                    {
                                        this.displayImg.Source = outputColorBitmap;
                                    }
                                }
                            }

                device.Dispose();
            } catch (Exception ex)
            {
                Console.WriteLine(ex);
            }
        }