Ejemplo n.º 1
0
    public static Tuple <int, int> GetColorDimensions(ColorResolution resolution)
    {
        switch (resolution)
        {
        case ColorResolution.R720p:
            return(Tuple.Create(1280, 720));

        case ColorResolution.R2160p:
            return(Tuple.Create(3840, 2160));

        case ColorResolution.R1440p:
            return(Tuple.Create(2560, 1440));

        case ColorResolution.R1080p:
            return(Tuple.Create(1920, 1080));

        case ColorResolution.R3072p:
            return(Tuple.Create(4096, 3072));

        case ColorResolution.R1536p:
            return(Tuple.Create(2048, 1536));

        default:
            throw new Exception("Invalid color dimensions value!");
        }
    }
Ejemplo n.º 2
0
        /// <summary>
        /// Creates dummy (no distortions, ideal pin-hole geometry, all sensors are aligned) but valid calibration data.
        /// This can be useful for testing and subbing needs.
        /// </summary>
        /// <param name="depthMode">Depth mode for which dummy calibration should be created. Can be <see cref="DepthMode.Off"/>.</param>
        /// <param name="colorResolution">Color resolution for which dummy calibration should be created. Can be <see cref="ColorResolution.Off"/>.</param>
        /// <param name="calibration">Result: created dummy calibration data for <paramref name="depthMode"/> and <paramref name="colorResolution"/> specified.</param>
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="depthMode"/> and <paramref name="colorResolution"/> cannot be equal to <c>Off</c> simultaneously.</exception>
        public static void CreateDummy(DepthMode depthMode, ColorResolution colorResolution, out Calibration calibration)
        {
            if (depthMode == DepthMode.Off && colorResolution == ColorResolution.Off)
            {
                throw new ArgumentOutOfRangeException(nameof(depthMode) + " and " + nameof(colorResolution), $"{nameof(depthMode)} and {nameof(colorResolution)} cannot be equal to Off simultaneously.");
            }

            calibration = default;

            // depth camera
            calibration.DepthMode = depthMode;
            depthMode.GetNominalFov(out var hFovDegrees, out var vFovDegrees);
            InitDummyCameraCalibration(ref calibration.DepthCameraCalibration,
                                       depthMode.WidthPixels(), depthMode.HeightPixels(),
                                       hFovDegrees, vFovDegrees);

            // color camera
            calibration.ColorResolution = colorResolution;
            colorResolution.GetNominalFov(out hFovDegrees, out vFovDegrees);
            InitDummyCameraCalibration(ref calibration.ColorCameraCalibration,
                                       colorResolution.WidthPixels(), colorResolution.HeightPixels(),
                                       hFovDegrees, vFovDegrees);

            // extrinsics
            calibration.Extrinsics = new CalibrationExtrinsics[(int)CalibrationGeometry.Count * (int)CalibrationGeometry.Count];
            for (var i = 0; i < calibration.Extrinsics.Length; i++)
            {
                InitDummyExtrinsics(ref calibration.Extrinsics[i]);
            }
        }
Ejemplo n.º 3
0
 public DeviceReadingLoop(Device device, DepthMode depthMode, ColorResolution colorResolution, FrameRate frameRate)
 {
     this.device     = device;
     DepthMode       = depthMode;
     ColorResolution = colorResolution;
     FrameRate       = frameRate;
 }
Ejemplo n.º 4
0
        private void TestConvert2DTo2D(DepthMode depthMode, ColorResolution colorResolution)
        {
            Calibration.CreateDummy(depthMode, colorResolution, out var calibration);

            var point2d = calibration.Convert2DTo2D(new Float2(100f, 10f), 2000f, CalibrationGeometry.Depth, CalibrationGeometry.Depth);

            Assert.IsNotNull(point2d);
            Assert.AreEqual(100f, point2d.Value.X);
            Assert.AreEqual(10f, point2d.Value.Y);

            point2d = calibration.Convert2DTo2D(new Float2(10f, 100f), 3000f, CalibrationGeometry.Color, CalibrationGeometry.Color);
            Assert.IsNotNull(point2d);
            Assert.AreEqual(10f, point2d.Value.X);
            Assert.AreEqual(100f, point2d.Value.Y);

            var depthCenter = new Float2(calibration.DepthCameraCalibration.Intrinsics.Parameters.Cx, calibration.DepthCameraCalibration.Intrinsics.Parameters.Cy);
            var colorCenter = new Float2(calibration.ColorCameraCalibration.Intrinsics.Parameters.Cx, calibration.ColorCameraCalibration.Intrinsics.Parameters.Cy);

            point2d = calibration.Convert2DTo2D(depthCenter, 1000f, CalibrationGeometry.Depth, CalibrationGeometry.Color);
            Assert.IsNotNull(point2d);
            Assert.AreEqual(colorCenter, point2d);

            point2d = calibration.Convert2DTo2D(colorCenter, 2000f, CalibrationGeometry.Color, CalibrationGeometry.Depth);
            Assert.IsNotNull(point2d);
            Assert.AreEqual(depthCenter, point2d);

            point2d = calibration.Convert2DTo2D(depthCenter, 0f, CalibrationGeometry.Depth, CalibrationGeometry.Color);
            Assert.IsNull(point2d);

            point2d = calibration.Convert2DTo2D(colorCenter, -10f, CalibrationGeometry.Color, CalibrationGeometry.Depth);
            Assert.IsNull(point2d);
        }
Ejemplo n.º 5
0
        public static void CreateDummy(DepthMode depthMode, ColorResolution colorResolution, out Calibration calibration)
        {
            calibration = default(Calibration);

            // depth camera
            calibration.DepthMode = depthMode;
            depthMode.GetNominalFov(out var hFovDegrees, out var vFovDegrees);
            InitDummyCameraCalibration(ref calibration.DepthCameraCalibration,
                                       depthMode.WidthPixels(), depthMode.HeightPixels(),
                                       hFovDegrees, vFovDegrees);

            // color camera
            calibration.ColorResolution = colorResolution;
            colorResolution.GetNominalFov(out hFovDegrees, out vFovDegrees);
            InitDummyCameraCalibration(ref calibration.ColorCameraCalibration,
                                       colorResolution.WidthPixels(), colorResolution.HeightPixels(),
                                       hFovDegrees, vFovDegrees);

            // extrinsics
            calibration.Extrinsics = new CalibrationExtrinsics[(int)CalibrationGeometry.Count * (int)CalibrationGeometry.Count];
            for (var i = 0; i < calibration.Extrinsics.Length; i++)
            {
                InitDummyExtrinsics(ref calibration.Extrinsics[i]);
            }
        }
Ejemplo n.º 6
0
        private void TestDummyCalibration(DepthMode depthMode, ColorResolution colorResolution)
        {
            Calibration.CreateDummy(depthMode, colorResolution, out var calibration);

            Assert.IsTrue(calibration.IsValid);

            Assert.AreEqual(depthMode, calibration.DepthMode);
            Assert.AreEqual(colorResolution, calibration.ColorResolution);
        }
Ejemplo n.º 7
0
        /// <summary>Gets the camera calibration for the entire Azure Kinect device.</summary>
        /// <param name="depthMode">Mode in which depth camera is operated.</param>
        /// <param name="colorResolution">Resolution in which color camera is operated.</param>
        /// <param name="calibration">Output: calibration data.</param>
        /// <remarks><para>
        /// The <paramref name="calibration"/> represents the data needed to transform between the camera views and may be
        /// different for each operating <paramref name="depthMode"/> and <paramref name="colorResolution"/> the device is configured to operate in.
        /// </para><para>
        /// The <paramref name="calibration"/> output is used as input to all calibration and transformation functions.
        /// </para></remarks>
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="depthMode"/> and <paramref name="colorResolution"/> cannot be equal to <c>Off</c> simultaneously.</exception>
        /// <exception cref="ObjectDisposedException">This method cannot be called for disposed object.</exception>
        /// <exception cref="DeviceConnectionLostException">Connection with Azure Kinect device has been lost.</exception>
        /// <exception cref="InvalidOperationException">Cannot read calibration data for some unexpected reason. See logs for details.</exception>
        /// <seealso cref="GetCalibration(DepthMode, ColorResolution, out Calibration)"/>
        public void GetCalibration(DepthMode depthMode, ColorResolution colorResolution, out Calibration calibration)
        {
            if (depthMode == DepthMode.Off && colorResolution == ColorResolution.Off)
            {
                throw new ArgumentOutOfRangeException(nameof(depthMode) + " and " + nameof(colorResolution), $"{nameof(depthMode)} and {nameof(colorResolution)} cannot be equal to Off simultaneously.");
            }

            CheckResult(NativeApi.DeviceGetCalibration(handle.ValueNotDisposed, depthMode, colorResolution, out calibration));
        }
Ejemplo n.º 8
0
        private void TestConvert3DTo3D(DepthMode depthMode, ColorResolution colorResolution)
        {
            Calibration.CreateDummy(depthMode, colorResolution, out var calibration);

            var testPoint = new Float3(10f, 10f, 1000f);

            var point3d = calibration.Convert3DTo3D(testPoint, CalibrationGeometry.Gyro, CalibrationGeometry.Accel);

            Assert.AreEqual(testPoint, point3d);
        }
 void CheckForChanges()
 {
     if ((lastDepthMode != depthMode) || (lastColorResolution != colorResolution) || (lastFPS != fps) || (lastTransformationMode != transformationMode))
     {
         StartKinect();
         lastDepthMode          = depthMode;
         lastColorResolution    = colorResolution;
         lastFPS                = fps;
         lastTransformationMode = transformationMode;
     }
 }
Ejemplo n.º 10
0
        public static Calibration GetFromRaw(byte[] raw, DepthMode depthMode, ColorResolution colorResolution)
        {
            AzureKinectException.ThrowIfNotSuccess(NativeMethods.k4a_calibration_get_from_raw(
                                                       raw,
                                                       (UIntPtr)raw.Length,
                                                       depthMode,
                                                       colorResolution,
                                                       out Calibration calibration));

            return(calibration);
        }
Ejemplo n.º 11
0
        public static void CreateFromRaw(byte[] rawCalibration, DepthMode depthMode, ColorResolution colorResolution, out Calibration calibration)
        {
            if (rawCalibration == null)
            {
                throw new ArgumentNullException(nameof(rawCalibration));
            }
            var res = NativeApi.CalibrationGetFromRaw(rawCalibration, Helpers.Int32ToUIntPtr(rawCalibration.Length), depthMode, colorResolution, out calibration);

            if (res == NativeCallResults.Result.Failed)
            {
                throw new InvalidOperationException("Cannot create calibration from parameters specified.");
            }
        }
Ejemplo n.º 12
0
        public Calibration GetCalibration(DepthMode depthMode, ColorResolution colorResolution)
        {
            lock (this)
            {
                if (disposedValue)
                {
                    throw new ObjectDisposedException(nameof(Device));
                }

                AzureKinectException.ThrowIfNotSuccess(NativeMethods.k4a_device_get_calibration(handle, depthMode, colorResolution, out Calibration calibration));
                return(calibration);
            }
        }
Ejemplo n.º 13
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="camera"></param>
        /// <param name="cameraConfig"></param>
        /// <param name="deviceLabel">This will be incoporated into the file name recorded</param>
        /// <param name="mic"></param>
        public IntegratedRecorder(Device camera, Field field, MMDevice mic)
        {
            this.camera         = camera;
            this.microphone     = mic;
            this.field          = field;
            mutAudioFileProcess = new Mutex();
            if (audioCaptureDevice == null)
            {
                audioCaptureDevice = CreateWaveInDevice();
            }
            bytesPerSample = audioCaptureDevice.WaveFormat.BitsPerSample / 8;

            DepthMode       depthMode       = DepthMode.Off;
            ColorResolution ColorResolution = ColorResolution.R720p;
            FrameRate       frameRate       = FrameRate.Thirty;

            cameraConfig = new DeviceConfiguration
            {
                CameraFps       = frameRate,
                ColorFormat     = ImageFormat.ColorMjpg,
                ColorResolution = ColorResolution,
                DepthMode       = depthMode,
                WiredSyncMode   = WiredSyncMode.Standalone,
            };
            this.camera.StartCameras(cameraConfig);

            // Forcibly turn on the microphone (some programs (Skype) turn it off).
            microphone.AudioEndpointVolume.Mute = false;
            // Not really start to record, while just for enabling calculating the volume peak value
            // refer to: https://github.com/naudio/NAudio/blob/master/Docs/RecordingLevelMeter.md
            audioCaptureDevice.StartRecording();

            qVideoBufferToDisplay            = new Queue <Capture>();
            mutVideoRecord                   = new Mutex();
            mutVideoDisplay                  = new Mutex();
            mutAudioDisplay                  = new Mutex();
            threadVideoFrameExtract          = new Thread(() => ImageExtractLoop());
            threadVideoFrameExtract.Priority = ThreadPriority.Highest;
            threadVideoFrameExtract.Start();

            threadVideoDisplay          = new Thread(() => VideoDisplayLoop());
            threadVideoDisplay.Priority = ThreadPriority.Lowest;
            threadVideoDisplay.Start();

            qAudioBufferToDisplay       = new Queue <WaveInEventArgs>();
            threadAudioDisplay          = new Thread(() => AudioDisplay());
            threadAudioDisplay.Priority = ThreadPriority.Lowest;
            threadAudioDisplay.Start();

            stopwatchSampleRate.Start();
        }
Ejemplo n.º 14
0
        /// <summary>
        /// Creates dummy (no distortions, ideal pin-hole geometry, all sensors are aligned, there is specified distance between depth and color cameras) but valid calibration data.
        /// This can be useful for testing and subbing needs.
        /// </summary>
        /// <param name="depthMode">Depth mode for which dummy calibration should be created. Can be <see cref="DepthMode.Off"/>.</param>
        /// <param name="distanceBetweenDepthAndColorMm">Distance (horizontal) between depth and color cameras.</param>
        /// <param name="colorResolution">Color resolution for which dummy calibration should be created. Can be <see cref="ColorResolution.Off"/>.</param>
        /// <param name="calibration">Result: created dummy calibration data for <paramref name="depthMode"/> and <paramref name="colorResolution"/> specified.</param>
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="depthMode"/> and <paramref name="colorResolution"/> cannot be equal to <c>Off</c> simultaneously.</exception>
        public static void CreateDummy(DepthMode depthMode, ColorResolution colorResolution, float distanceBetweenDepthAndColorMm,
                                       out Calibration calibration)
        {
            CreateDummy(depthMode, colorResolution, out calibration);

            var extr = calibration.GetExtrinsics(CalibrationGeometry.Color, CalibrationGeometry.Depth);

            extr.Translation = new Float3(distanceBetweenDepthAndColorMm, 0, 0);
            calibration.SetExtrinsics(CalibrationGeometry.Color, CalibrationGeometry.Depth, extr);

            extr             = calibration.GetExtrinsics(CalibrationGeometry.Depth, CalibrationGeometry.Color);
            extr.Translation = new Float3(-distanceBetweenDepthAndColorMm, 0, 0);
            calibration.SetExtrinsics(CalibrationGeometry.Depth, CalibrationGeometry.Color, extr);
        }
Ejemplo n.º 15
0
        /// <summary>
        /// Main entry point.
        /// </summary>
        public static void Main()
        {
            // camera resolution settings
            const ColorResolution resolution   = ColorResolution.R720p;
            const int             widthSource  = 1280;
            const int             heightSource = 720;

            // down sampled resolution
            const int    widthOutput       = 80;
            const int    heightOutput      = 45;
            const double scaleFactorWidth  = (double)widthOutput / widthSource;
            const double scaleFactorHeight = (double)heightOutput / heightSource;

            // background subtraction beyond this depth
            const double maxDepth = 1.0; // meters

            const SensorOrientation initialOrientation = SensorOrientation.Default;

            using (var pipeline = Pipeline.Create("AzureKinectSample", DeliveryPolicy.LatestMessage))
            {
                var azureKinect = new AzureKinectSensor(
                    pipeline,
                    new AzureKinectSensorConfiguration()
                {
                    OutputImu                = true,
                    ColorResolution          = resolution,
                    DepthMode                = DepthMode.WFOV_Unbinned,
                    CameraFPS                = FPS.FPS15,
                    BodyTrackerConfiguration = new AzureKinectBodyTrackerConfiguration()
                    {
                        CpuOnlyMode       = true, // false if CUDA supported GPU available
                        SensorOrientation = initialOrientation,
                    },
                });

                StringBuilder     sb = new StringBuilder();
                SensorOrientation lastOrientation = (SensorOrientation)(-1); // detect orientation changes

                // consuming color, depth, IMU, body tracking, calibration
                azureKinect.ColorImage.Resize(widthOutput, heightOutput)
                .Join(azureKinect.DepthImage)
                .Join(azureKinect.Imu, TimeSpan.FromMilliseconds(10))
                .Pair(azureKinect.Bodies)
                .Pair(azureKinect.DepthDeviceCalibrationInfo)
                .Do(message =>
                {
                    var(color, depth, imu, bodies, calib) = message;
 void ValidateCalibration(Calibration cal,
                          DepthMode depthMode,
                          ColorResolution colorResolution,
                          int depthWidth, int depthHeight,
                          int colorWidth, int colorHeight)
 {
     Assert.AreEqual(depthMode, cal.DepthMode);
     Assert.AreEqual(colorResolution, cal.ColorResolution);
     Assert.AreEqual(depthWidth, cal.DepthCameraCalibration.ResolutionWidth);
     Assert.AreEqual(depthHeight, cal.DepthCameraCalibration.ResolutionHeight);
     Assert.AreEqual(colorWidth, cal.ColorCameraCalibration.ResolutionWidth);
     Assert.AreEqual(colorHeight, cal.ColorCameraCalibration.ResolutionHeight);
     Assert.IsTrue(cal.DepthCameraCalibration.Intrinsics.Type == CalibrationModelType.Rational6KT ||
                   cal.DepthCameraCalibration.Intrinsics.Type == CalibrationModelType.BrownConrady);
     Assert.IsTrue(cal.ColorCameraCalibration.Intrinsics.Type == CalibrationModelType.Rational6KT ||
                   cal.ColorCameraCalibration.Intrinsics.Type == CalibrationModelType.BrownConrady);
 }
Ejemplo n.º 17
0
 public static void GetNominalFov(this ColorResolution resolution, out float horizontalDegrees, out float verticalDegrees)
 {
     if (resolution.IsAspectRatio16to9())
     {
         horizontalDegrees = NOMINAL_HFOV_DEGREES;
         verticalDegrees   = NOMINAL_VFOV_16_9_DEGREES;
     }
     else if (resolution.IsAspectRatio4to3())
     {
         horizontalDegrees = NOMINAL_HFOV_DEGREES;
         verticalDegrees   = NOMINAL_VFOV_4_3_DEGREES;
     }
     else
     {
         horizontalDegrees = 0;
         verticalDegrees   = 0;
     }
 }
Ejemplo n.º 18
0
 public bool Import(string json)
 {
     try
     {
         KinectConfiguration fromJson = JsonUtility.FromJson <KinectConfiguration>(json);
         this.transformationMode = fromJson.transformationMode;
         this.colorResolution    = fromJson.colorResolution;
         this.depthMode          = fromJson.depthMode;
         this.fps                = fromJson.fps;
         this.volumeScale        = fromJson.volumeScale;
         this.depthRangeModifier = fromJson.depthRangeModifier;
         return(true);
     }
     catch (Exception ex)
     {
         Debug.Log("Kinect Configuration deserialization failed with :" + ex.Message);
         return(false);
     }
 }
Ejemplo n.º 19
0
        /// <summary>Gets the camera calibration for a device from a raw calibration blob.</summary>
        /// <param name="rawCalibration">Raw calibration blob obtained from a device or recording. The raw calibration must be <c>0</c>-terminated. Cannot be <see langword="null"/>.</param>
        /// <param name="depthMode">Mode in which depth camera is operated.</param>
        /// <param name="colorResolution">Resolution in which color camera is operated.</param>
        /// <param name="calibration">Result: calibration data.</param>
        /// <exception cref="ArgumentNullException"><paramref name="rawCalibration"/> cannot be <see langword="null"/>.</exception>
        /// <exception cref="ArgumentException"><paramref name="rawCalibration"/> must be 0-terminated.</exception>
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="depthMode"/> and <paramref name="colorResolution"/> cannot be equal to <c>Off</c> simultaneously.</exception>
        public static void CreateFromRaw(byte[] rawCalibration, DepthMode depthMode, ColorResolution colorResolution, out Calibration calibration)
        {
            if (rawCalibration == null)
            {
                throw new ArgumentNullException(nameof(rawCalibration));
            }
            if (rawCalibration.IndexOf(0) < 0)
            {
                throw new ArgumentException($"{nameof(rawCalibration)} must be 0-terminated.", nameof(rawCalibration));
            }
            if (depthMode == DepthMode.Off && colorResolution == ColorResolution.Off)
            {
                throw new ArgumentOutOfRangeException(nameof(depthMode) + " and " + nameof(colorResolution), $"{nameof(depthMode)} and {nameof(colorResolution)} cannot be equal to Off simultaneously.");
            }
            var res = NativeApi.CalibrationGetFromRaw(rawCalibration, Helpers.Int32ToUIntPtr(rawCalibration.Length), depthMode, colorResolution, out calibration);

            if (res == NativeCallResults.Result.Failed)
            {
                throw new InvalidOperationException("Cannot create calibration from parameters specified.");
            }
        }
Ejemplo n.º 20
0
        private void TestConvertColor2DToDepth2D(DepthMode depthMode, ColorResolution colorResolution)
        {
            Calibration.CreateDummy(depthMode, colorResolution, 30, out var calibration);

            var depth2d = new Float2(calibration.DepthCameraCalibration.Intrinsics.Parameters.Cx, calibration.DepthCameraCalibration.Intrinsics.Parameters.Cy);
            var depthMm = (short)1800;
            var color2d = calibration.Convert2DTo2D(depth2d, depthMm, CalibrationGeometry.Depth, CalibrationGeometry.Color).Value;

            var depthImageBuffer = new short[depthMode.WidthPixels() * depthMode.HeightPixels()];

            for (var i = 0; i < depthImageBuffer.Length; i++)
            {
                depthImageBuffer[i] = depthMm;
            }
            var depthImage = Image.CreateFromArray(depthImageBuffer, ImageFormat.Depth16, depthMode.WidthPixels(), depthMode.HeightPixels());

            var point2d = calibration.ConvertColor2DToDepth2D(color2d, depthImage);

            Assert.IsNotNull(point2d);
            Assert.IsTrue(Math.Abs(depth2d.X - point2d.Value.X) < 1f);
            Assert.IsTrue(Math.Abs(depth2d.Y - point2d.Value.Y) < 1f);

            depthImage.Dispose();
        }
Ejemplo n.º 21
0
 /// <summary>Returns image height in pixels for a given resolution.</summary>
 public static int HeightPixels(this ColorResolution resolution)
 => heights[(int)resolution];
    void Start()
    {
        if (useAppConfig)
        {
            AppConfig conf = AppManager.AppConfig;
            colorResolution = conf.ColorResolution;
            imageFormat     = conf.ImageFormat;
            fps             = conf.Fps;
            depthMode       = conf.DepthMode;
            processingMode  = conf.ProcessingMode;
        }

        colourKernelId = computeShader.FindKernel("ColourTex");
        irKernelId     = computeShader.FindKernel("IRTex");
        depthKernelId  = computeShader.FindKernel("DepthTex");

        switch (colorResolution)
        {
        case ColorResolution.R720p:
        {
            colourWidth  = 1280;
            colourHeight = 720;
            break;
        }

        case ColorResolution.R1080p:
        {
            colourWidth  = 1920;
            colourHeight = 1080;
            break;
        }

        default:
        {
            colorResolution = ColorResolution.R1080p;
            colourWidth     = 1920;
            colourHeight    = 1080;
            break;
        }
        }

        if (uiEnabled)
        {
            colourTex = new RenderTexture(colourWidth, colourHeight, 24)
            {
                enableRandomWrite = true
            };
            colourTex.Create();
            colourImage.texture = colourTex;

            irTex = new RenderTexture(irWidth, irHeight, 24)
            {
                enableRandomWrite = true
            };
            irTex.Create();
            irImage.texture = irTex;

            depthTex = new RenderTexture(colourWidth, colourHeight, 24)
            {
                enableRandomWrite = true
            };
            depthTex.Create();
            depthImage.texture = depthTex;
        }

        Init();
    }
Ejemplo n.º 23
0
 private static void CheckImageParameter(string paramName, Image paramValue, ImageFormat expectedFormat, ColorResolution colorResolution)
 => CheckImageParameter(paramName, paramValue, expectedFormat, expectedFormat, colorResolution.WidthPixels(), colorResolution.HeightPixels());
Ejemplo n.º 24
0
 public static extern k4a_result_t k4a_calibration_get_from_raw(
     byte[] raw_calibration,
     UIntPtr raw_calibration_size,
     DepthMode depth_mode,
     ColorResolution color_resolution,
     out Calibration calibration);
Ejemplo n.º 25
0
 public static extern k4a_result_t k4a_device_get_calibration(
     k4a_device_t device_handle,
     DepthMode depth_mode,
     ColorResolution color_resolution,
     out Calibration calibration);
Ejemplo n.º 26
0
 // https://docs.microsoft.com/en-us/azure/Kinect-dk/hardware-specification
 public static bool IsCompatibleWith(this ColorResolution colorResolution, FrameRate frameRate)
 => !(colorResolution == ColorResolution.R3072p && frameRate == FrameRate.Thirty);
Ejemplo n.º 27
0
 public static bool IsCompatibleWith(this ColorResolution colorResolution, ImageFormat imageFormat)
 => imageFormat == ImageFormat.ColorBgra32 || imageFormat == ImageFormat.ColorMjpg ||
 (colorResolution == ColorResolution.R720p && (imageFormat == ImageFormat.ColorNV12 || imageFormat == ImageFormat.ColorYUY2));
Ejemplo n.º 28
0
 public static bool IsAspectRatio16to9(this ColorResolution resolution)
 => resolution == ColorResolution.R720p || resolution == ColorResolution.R1080p ||
 resolution == ColorResolution.R1440p || resolution == ColorResolution.R2160p;
Ejemplo n.º 29
0
 public static bool IsAspectRatio4to3(this ColorResolution resolution)
 => resolution == ColorResolution.R1536p || resolution == ColorResolution.R3072p;
Ejemplo n.º 30
0
 /// <summary>Returns image width in pixels for a given resolution.</summary>
 public static int WidthPixels(this ColorResolution resolution)
 => resolution.IsAspectRatio4to3()
         ? resolution.HeightPixels() * 4 / 3
         : resolution.HeightPixels() * 16 / 9;