コード例 #1
0
 /// <summary>
 /// ValueType copy from another SessionConfig object into this one.
 /// </summary>
 /// <param name="other">The SessionConfig to copy from.</param>
 public void CopyFrom(ARCoreSessionConfig other)
 {
     MatchCameraFramerate   = other.MatchCameraFramerate;
     PlaneFindingMode       = other.PlaneFindingMode;
     LightEstimationMode    = other.LightEstimationMode;
     CloudAnchorMode        = other.CloudAnchorMode;
     AugmentedImageDatabase = other.AugmentedImageDatabase;
     CameraFocusMode        = other.CameraFocusMode;
     AugmentedFaceMode      = other.AugmentedFaceMode;
     DepthMode            = other.DepthMode;
     InstantPlacementMode = other.InstantPlacementMode;
 }
コード例 #2
0
        public static ApiDepthMode ToApiDepthMode(this DepthMode depthMode)
        {
            switch (depthMode)
            {
            case DepthMode.Automatic:
                return(ApiDepthMode.Automatic);

            case DepthMode.Disabled:
            default:
                return(ApiDepthMode.Disabled);
            }
        }
コード例 #3
0
        private IEnumerator Process(DepthMode depthMode, bool cpuOnly)
        {
            var debugDelegate = new AzureKinectBodyTracker.DebugLogDelegate(PluginDebugLogCallBack);
            var debagCallback = Marshal.GetFunctionPointerForDelegate(debugDelegate);

            AzureKinectBodyTracker.SetDebugLogCallback(debagCallback);

            var bodyRecognizedDelegate = new AzureKinectBodyTracker.BodyRecognizedDelegate(this.BodyRecognizedCallback);
            var bodyRecognizedCallback = Marshal.GetFunctionPointerForDelegate(bodyRecognizedDelegate);

            AzureKinectBodyTracker.SetBodyRecognizedCallback(bodyRecognizedCallback);

            var depthTextureId = 1u;
            var depthWidth     = (int)AzureKinectBodyTracker.DepthResolutions[depthMode].x;
            var depthHeight    = (int)AzureKinectBodyTracker.DepthResolutions[depthMode].y;

            this.depthTexture = new Texture2D((depthWidth > 0) ? depthWidth : 1, (depthHeight > 0) ? depthHeight : 1, TextureFormat.R16, false);
            this.depthMaterial.mainTexture = this.depthTexture;
            var colorTextureId = 2u;

            this.colorTexture = new Texture2D(1920, 1080, TextureFormat.BGRA32, false);
            this.colorMaterial.mainTexture = this.colorTexture;
            var transformedDepthTextureId = 3u;

            this.transformedDepthTexture = new Texture2D(1920, 1080, TextureFormat.R16, false);
            this.transformedDepthMaterial.mainTexture = this.transformedDepthTexture;

            var callback      = AzureKinectBodyTracker.GetTextureUpdateCallback();
            var commandBuffer = new CommandBuffer();

            commandBuffer.name = "AzureKinectImagesUpdeate";
            commandBuffer.IssuePluginCustomTextureUpdateV2(callback, this.depthTexture, depthTextureId);
            commandBuffer.IssuePluginCustomTextureUpdateV2(callback, this.colorTexture, colorTextureId);
            commandBuffer.IssuePluginCustomTextureUpdateV2(callback, this.transformedDepthTexture, transformedDepthTextureId);
            try
            {
                AzureKinectBodyTracker.Start(depthTextureId, colorTextureId, transformedDepthTextureId, depthMode, cpuOnly);
                this.currentDepthMode = depthMode;
            }
            catch (K4ABTException)
            {
                this.ProcessFinallize(false);
                yield break;
            }
            this.isRunning = true;
            while (this.isRunning)
            {
                Graphics.ExecuteCommandBuffer(commandBuffer);
                yield return(null);
            }
            AzureKinectBodyTracker.End();
            this.ProcessFinallize();
        }
コード例 #4
0
ファイル: Calibration.cs プロジェクト: Itpyc/k4a.net
        public static void CreateFromRaw(byte[] rawCalibration, DepthMode depthMode, ColorResolution colorResolution, out Calibration calibration)
        {
            if (rawCalibration == null)
            {
                throw new ArgumentNullException(nameof(rawCalibration));
            }
            var res = NativeApi.CalibrationGetFromRaw(rawCalibration, Helpers.Int32ToUIntPtr(rawCalibration.Length), depthMode, colorResolution, out calibration);

            if (res == NativeCallResults.Result.Failed)
            {
                throw new InvalidOperationException("Cannot create calibration from parameters specified.");
            }
        }
コード例 #5
0
        public Calibration GetCalibration(DepthMode depthMode, ColorResolution colorResolution)
        {
            lock (this)
            {
                if (disposedValue)
                {
                    throw new ObjectDisposedException(nameof(Device));
                }

                AzureKinectException.ThrowIfNotSuccess(NativeMethods.k4a_device_get_calibration(handle, depthMode, colorResolution, out Calibration calibration));
                return(calibration);
            }
        }
コード例 #6
0
        /// <summary>
        /// Get the camera calibration for a device from a raw calibration blob.
        /// </summary>
        /// <param name="raw">Raw calibration blob obtained from a device or recording.</param>
        /// <param name="depthMode">Mode in which depth camera is operated.</param>
        /// <param name="colorResolution">Resolution in which the color camera is operated.</param>
        /// <returns>Calibration object.</returns>
        public static Calibration GetFromRaw(byte[] raw, DepthMode depthMode, ColorResolution colorResolution)
        {
            Calibration calibration = default;

            AzureKinectException.ThrowIfNotSuccess(() => NativeMethods.k4a_calibration_get_from_raw(
                                                       raw,
                                                       (UIntPtr)raw.Length,
                                                       depthMode,
                                                       colorResolution,
                                                       out calibration));

            return(calibration);
        }
コード例 #7
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="camera"></param>
        /// <param name="cameraConfig"></param>
        /// <param name="deviceLabel">This will be incoporated into the file name recorded</param>
        /// <param name="mic"></param>
        public IntegratedRecorder(Device camera, Field field, MMDevice mic)
        {
            this.camera         = camera;
            this.microphone     = mic;
            this.field          = field;
            mutAudioFileProcess = new Mutex();
            if (audioCaptureDevice == null)
            {
                audioCaptureDevice = CreateWaveInDevice();
            }
            bytesPerSample = audioCaptureDevice.WaveFormat.BitsPerSample / 8;

            DepthMode       depthMode       = DepthMode.Off;
            ColorResolution ColorResolution = ColorResolution.R720p;
            FrameRate       frameRate       = FrameRate.Thirty;

            cameraConfig = new DeviceConfiguration
            {
                CameraFps       = frameRate,
                ColorFormat     = ImageFormat.ColorMjpg,
                ColorResolution = ColorResolution,
                DepthMode       = depthMode,
                WiredSyncMode   = WiredSyncMode.Standalone,
            };
            this.camera.StartCameras(cameraConfig);

            // Forcibly turn on the microphone (some programs (Skype) turn it off).
            microphone.AudioEndpointVolume.Mute = false;
            // Not really start to record, while just for enabling calculating the volume peak value
            // refer to: https://github.com/naudio/NAudio/blob/master/Docs/RecordingLevelMeter.md
            audioCaptureDevice.StartRecording();

            qVideoBufferToDisplay            = new Queue <Capture>();
            mutVideoRecord                   = new Mutex();
            mutVideoDisplay                  = new Mutex();
            mutAudioDisplay                  = new Mutex();
            threadVideoFrameExtract          = new Thread(() => ImageExtractLoop());
            threadVideoFrameExtract.Priority = ThreadPriority.Highest;
            threadVideoFrameExtract.Start();

            threadVideoDisplay          = new Thread(() => VideoDisplayLoop());
            threadVideoDisplay.Priority = ThreadPriority.Lowest;
            threadVideoDisplay.Start();

            qAudioBufferToDisplay       = new Queue <WaveInEventArgs>();
            threadAudioDisplay          = new Thread(() => AudioDisplay());
            threadAudioDisplay.Priority = ThreadPriority.Lowest;
            threadAudioDisplay.Start();

            stopwatchSampleRate.Start();
        }
コード例 #8
0
ファイル: Calibration.cs プロジェクト: metamagical/k4a.net
        /// <summary>
        /// Creates dummy (no distortions, ideal pin-hole geometry, all sensors are aligned, there is specified distance between depth and color cameras) but valid calibration data.
        /// This can be useful for testing and subbing needs.
        /// </summary>
        /// <param name="depthMode">Depth mode for which dummy calibration should be created. Can be <see cref="DepthMode.Off"/>.</param>
        /// <param name="distanceBetweenDepthAndColorMm">Distance (horizontal) between depth and color cameras.</param>
        /// <param name="colorResolution">Color resolution for which dummy calibration should be created. Can be <see cref="ColorResolution.Off"/>.</param>
        /// <param name="calibration">Result: created dummy calibration data for <paramref name="depthMode"/> and <paramref name="colorResolution"/> specified.</param>
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="depthMode"/> and <paramref name="colorResolution"/> cannot be equal to <c>Off</c> simultaneously.</exception>
        public static void CreateDummy(DepthMode depthMode, ColorResolution colorResolution, float distanceBetweenDepthAndColorMm,
                                       out Calibration calibration)
        {
            CreateDummy(depthMode, colorResolution, out calibration);

            var extr = calibration.GetExtrinsics(CalibrationGeometry.Color, CalibrationGeometry.Depth);

            extr.Translation = new Float3(distanceBetweenDepthAndColorMm, 0, 0);
            calibration.SetExtrinsics(CalibrationGeometry.Color, CalibrationGeometry.Depth, extr);

            extr             = calibration.GetExtrinsics(CalibrationGeometry.Depth, CalibrationGeometry.Color);
            extr.Translation = new Float3(-distanceBetweenDepthAndColorMm, 0, 0);
            calibration.SetExtrinsics(CalibrationGeometry.Depth, CalibrationGeometry.Color, extr);
        }
コード例 #9
0
ファイル: Session.cs プロジェクト: AtulSingh2000/ARFaceMove
        /// <summary>
        /// Check whether the depth mode is supported on this device. Not all
        /// devices support depth, see the
        /// <a href="https://developers.google.com/ar/discover/supported-devices">
        /// ARCore supported devices</a> page for details.
        /// </summary>
        /// <param name="depthMode">The depth mode.</param>
        /// <returns>true if the depth mode is supported, false if it is not
        /// supported or the session has not yet been initialized.</returns>
        public static bool IsDepthModeSupported(DepthMode depthMode)
        {
            var nativeSession = LifecycleManager.Instance.NativeSession;

            if (nativeSession == null)
            {
                return(false);
            }

            bool result = nativeSession.SessionApi.IsDepthModeSupported(
                depthMode.ToApiDepthMode());

            return(result);
        }
コード例 #10
0
    public static Tuple <int, int> GetIrLevels(DepthMode depthMode)
    {
        switch (depthMode)
        {
        case DepthMode.PassiveIR:
            return(Tuple.Create(0, 100));

        case DepthMode.Off:
            throw new Exception("Invalid depth mode!");

        default:
            return(Tuple.Create(0, 1000));
        }
    }
コード例 #11
0
        public static ClipDepthMode Convert(this DepthMode mode)
        {
            switch (mode)
            {
            case DepthMode.MinusOneToOne:
                return(ClipDepthMode.NegativeOneToOne);

            case DepthMode.ZeroToOne:
                return(ClipDepthMode.ZeroToOne);
            }

            Logger.Debug?.Print(LogClass.Gpu, $"Invalid {nameof(DepthMode)} enum value: {mode}.");

            return(ClipDepthMode.NegativeOneToOne);
        }
コード例 #12
0
        private void TestConvert3DTo2D(DepthMode depthMode, ColorResolution colorResolution)
        {
            Calibration.CreateDummy(depthMode, colorResolution, out var calibration);

            var depthCenter = new Float2(calibration.DepthCameraCalibration.Intrinsics.Parameters.Cx, calibration.DepthCameraCalibration.Intrinsics.Parameters.Cy);

            var point2d = calibration.Convert3DTo2D(new Float3(0f, 0f, 1000f), CalibrationGeometry.Depth, CalibrationGeometry.Depth);

            Assert.IsNotNull(point2d);
            Assert.AreEqual(depthCenter, point2d.Value);

            point2d = calibration.Convert3DTo2D(new Float3(0f, 0f, 2000f), CalibrationGeometry.Gyro, CalibrationGeometry.Depth);
            Assert.IsNotNull(point2d);
            Assert.AreEqual(depthCenter, point2d.Value);
        }
コード例 #13
0
        /// <summary>Adds a Azure Kinect sensor capture to the tracker input queue to generate its body tracking result asynchronously.</summary>
        /// <param name="capture">It should contain the depth data compatible with <see cref="DepthMode"/> for this function to work. Not <see langword="null"/>.</param>
        /// <param name="timeout">
        /// Specifies the time the function should block waiting to add the sensor capture to the tracker process queue.
        /// Default value is <see cref="Timeout.NoWait"/>, which means checking of the status without blocking.
        /// Passing <see cref="Timeout.Infinite"/> will block indefinitely until the capture is added to the process queue.
        /// </param>
        /// <returns>
        /// <see langword="true"/> - if a sensor capture is successfully added to the processing queue.
        /// <see langword="false"/> - if the queue is still full (see <see cref="IsQueueFull"/> property) before the <paramref name="timeout"/> elapses.
        /// </returns>
        /// <exception cref="ArgumentNullException"><paramref name="capture"/> cannot be <see langword="null"/>.</exception>
        /// <exception cref="ArgumentException"><paramref name="capture"/> doesn't contain depth data compatible with <see cref="DepthMode"/>.</exception>
        /// <exception cref="ObjectDisposedException">Object was disposed before this call or has been disposed during this call.</exception>
        /// <exception cref="BodyTrackingException">Cannot add capture to the tracker for some unknown reason. See logs for details.</exception>
        public bool TryEnqueueCapture(Capture capture, Timeout timeout = default(Timeout))
        {
            if (capture == null)
            {
                throw new ArgumentNullException(nameof(capture));
            }

            var res = NativeApi.TrackerEnqueueCapture(handle.ValueNotDisposed, Capture.ToHandle(capture), timeout);

            if (res == NativeCallResults.WaitResult.Timeout)
            {
                return(false);
            }
            if (res == NativeCallResults.WaitResult.Failed)
            {
                handle.CheckNotDisposed();      // to throw ObjectDisposedException() if failure is a result of disposing

                using (var depthImage = capture.DepthImage)
                {
                    if (depthImage == null)
                    {
                        throw new ArgumentException(
                                  "Capture should contain the depth data.",
                                  nameof(capture));
                    }
                    if (depthImage.Format != ImageFormat.Depth16)
                    {
                        throw new ArgumentException(
                                  $"Invalid format of depth data in capture: expected {ImageFormat.Depth16} but was {depthImage.Format}.",
                                  nameof(capture));
                    }
                    if (depthImage.WidthPixels != DepthMode.WidthPixels() || depthImage.HeightPixels != DepthMode.HeightPixels())
                    {
                        throw new ArgumentException(
                                  $"Invalid resolution of depth data in capture: expected {DepthMode.WidthPixels()}x{DepthMode.HeightPixels()} pixels but was {depthImage.WidthPixels}x{depthImage.HeightPixels} pixels.",
                                  nameof(capture));
                    }
                }

                throw new BodyTrackingException("Cannot add new capture to body tracking pipeline. See logs for details.");
            }

            Interlocked.Increment(ref queueSize);
            QueueSizeIncreased?.Invoke(this, EventArgs.Empty);

            return(true);
        }
コード例 #14
0
 void ValidateCalibration(Calibration cal,
                          DepthMode depthMode,
                          ColorResolution colorResolution,
                          int depthWidth, int depthHeight,
                          int colorWidth, int colorHeight)
 {
     Assert.AreEqual(depthMode, cal.DepthMode);
     Assert.AreEqual(colorResolution, cal.ColorResolution);
     Assert.AreEqual(depthWidth, cal.DepthCameraCalibration.ResolutionWidth);
     Assert.AreEqual(depthHeight, cal.DepthCameraCalibration.ResolutionHeight);
     Assert.AreEqual(colorWidth, cal.ColorCameraCalibration.ResolutionWidth);
     Assert.AreEqual(colorHeight, cal.ColorCameraCalibration.ResolutionHeight);
     Assert.IsTrue(cal.DepthCameraCalibration.Intrinsics.Type == CalibrationModelType.Rational6KT ||
                   cal.DepthCameraCalibration.Intrinsics.Type == CalibrationModelType.BrownConrady);
     Assert.IsTrue(cal.ColorCameraCalibration.Intrinsics.Type == CalibrationModelType.Rational6KT ||
                   cal.ColorCameraCalibration.Intrinsics.Type == CalibrationModelType.BrownConrady);
 }
コード例 #15
0
        private void TestConvert2DTo3D(DepthMode depthMode, ColorResolution colorResolution)
        {
            Calibration.CreateDummy(depthMode, colorResolution, out var calibration);

            var depthCenter = new Float2(calibration.DepthCameraCalibration.Intrinsics.Parameters.Cx, calibration.DepthCameraCalibration.Intrinsics.Parameters.Cy);
            var colorCenter = new Float2(calibration.ColorCameraCalibration.Intrinsics.Parameters.Cx, calibration.ColorCameraCalibration.Intrinsics.Parameters.Cy);

            var point3d = calibration.Convert2DTo3D(depthCenter, 1000f, CalibrationGeometry.Depth, CalibrationGeometry.Depth);

            Assert.IsNotNull(point3d);
            Assert.AreEqual(0f, point3d.Value.X);
            Assert.AreEqual(0f, point3d.Value.Y);
            Assert.AreEqual(1000f, point3d.Value.Z);

            point3d = calibration.Convert2DTo3D(colorCenter, 2000f, CalibrationGeometry.Color, CalibrationGeometry.Color);
            Assert.IsNotNull(point3d);
            Assert.AreEqual(0f, point3d.Value.X);
            Assert.AreEqual(0f, point3d.Value.Y);
            Assert.AreEqual(2000f, point3d.Value.Z);

            point3d = calibration.Convert2DTo3D(colorCenter, 3000f, CalibrationGeometry.Color, CalibrationGeometry.Depth);
            Assert.IsNotNull(point3d);
            Assert.AreEqual(0f, point3d.Value.X);
            Assert.AreEqual(0f, point3d.Value.Y);
            Assert.AreEqual(3000f, point3d.Value.Z);

            point3d = calibration.Convert2DTo3D(depthCenter, 4000f, CalibrationGeometry.Depth, CalibrationGeometry.Color);
            Assert.IsNotNull(point3d);
            Assert.AreEqual(0f, point3d.Value.X);
            Assert.AreEqual(0f, point3d.Value.Y);
            Assert.AreEqual(4000f, point3d.Value.Z);

            point3d = calibration.Convert2DTo3D(depthCenter, 500f, CalibrationGeometry.Depth, CalibrationGeometry.Accel);
            Assert.IsNotNull(point3d);
            Assert.AreEqual(0f, point3d.Value.X);
            Assert.AreEqual(0f, point3d.Value.Y);
            Assert.AreEqual(500f, point3d.Value.Z);

            point3d = calibration.Convert2DTo3D(depthCenter, -500f, CalibrationGeometry.Depth, CalibrationGeometry.Color);
            Assert.IsNotNull(point3d);
            Assert.AreEqual(-500f, point3d.Value.Z);

            point3d = calibration.Convert2DTo3D(new Float2(50f, 100f), 0f, CalibrationGeometry.Color, CalibrationGeometry.Depth);
            Assert.IsNull(point3d);
        }
コード例 #16
0
ファイル: DepthModes.cs プロジェクト: Itpyc/k4a.net
 public static void GetNominalFov(this DepthMode depthMode, out float horizontalDegrees, out float verticalDegrees)
 {
     if (depthMode == DepthMode.Off || depthMode == DepthMode.PassiveIR)
     {
         horizontalDegrees = 0;
         verticalDegrees   = 0;
     }
     else if (depthMode.IsWideView())
     {
         horizontalDegrees = NOMINAL_HFOV_WIDE_DEGREES;
         verticalDegrees   = NOMINAL_VFOV_WIDE_DEGREES;
     }
     else
     {
         horizontalDegrees = NOMINAL_HFOV_NARROW_DEGREES;
         verticalDegrees   = NOMINAL_VFOV_NARROW_DEGREES;
     }
 }
コード例 #17
0
 public bool Import(string json)
 {
     try
     {
         KinectConfiguration fromJson = JsonUtility.FromJson <KinectConfiguration>(json);
         this.transformationMode = fromJson.transformationMode;
         this.colorResolution    = fromJson.colorResolution;
         this.depthMode          = fromJson.depthMode;
         this.fps                = fromJson.fps;
         this.volumeScale        = fromJson.volumeScale;
         this.depthRangeModifier = fromJson.depthRangeModifier;
         return(true);
     }
     catch (Exception ex)
     {
         Debug.Log("Kinect Configuration deserialization failed with :" + ex.Message);
         return(false);
     }
 }
コード例 #18
0
    public static Tuple <int, int> GetDepthModeRange(DepthMode depthMode)
    {
        switch (depthMode)
        {
        case DepthMode.NFOV_2x2Binned:
            return(Tuple.Create(500, 5800));

        case DepthMode.NFOV_Unbinned:
            return(Tuple.Create(500, 4000));

        case DepthMode.WFOV_2x2Binned:
            return(Tuple.Create(250, 3000));

        case DepthMode.WFOV_Unbinned:
            return(Tuple.Create(250, 2500));

        case DepthMode.PassiveIR:
        default:
            throw new Exception("Invalid depth mode!");
        }
    }
コード例 #19
0
        private static (uint, uint) GetDepthModeRange(DepthMode depthMode)
        {
            switch (depthMode)
            {
            case DepthMode.NFOV_2x2Binned:
                return(500, 6800);

            case DepthMode.NFOV_Unbinned:
                return(500, 4000);

            case DepthMode.WFOV_2x2Binned:
                return(250, 3000);

            case DepthMode.WFOV_Unbinned:
                return(250, 2500);

            case DepthMode.PassiveIR:
            default:
                return(0, 0);
            }
        }
コード例 #20
0
ファイル: Calibration.cs プロジェクト: metamagical/k4a.net
        /// <summary>Gets the camera calibration for a device from a raw calibration blob.</summary>
        /// <param name="rawCalibration">Raw calibration blob obtained from a device or recording. The raw calibration must be <c>0</c>-terminated. Cannot be <see langword="null"/>.</param>
        /// <param name="depthMode">Mode in which depth camera is operated.</param>
        /// <param name="colorResolution">Resolution in which color camera is operated.</param>
        /// <param name="calibration">Result: calibration data.</param>
        /// <exception cref="ArgumentNullException"><paramref name="rawCalibration"/> cannot be <see langword="null"/>.</exception>
        /// <exception cref="ArgumentException"><paramref name="rawCalibration"/> must be 0-terminated.</exception>
        /// <exception cref="ArgumentOutOfRangeException"><paramref name="depthMode"/> and <paramref name="colorResolution"/> cannot be equal to <c>Off</c> simultaneously.</exception>
        public static void CreateFromRaw(byte[] rawCalibration, DepthMode depthMode, ColorResolution colorResolution, out Calibration calibration)
        {
            if (rawCalibration == null)
            {
                throw new ArgumentNullException(nameof(rawCalibration));
            }
            if (rawCalibration.IndexOf(0) < 0)
            {
                throw new ArgumentException($"{nameof(rawCalibration)} must be 0-terminated.", nameof(rawCalibration));
            }
            if (depthMode == DepthMode.Off && colorResolution == ColorResolution.Off)
            {
                throw new ArgumentOutOfRangeException(nameof(depthMode) + " and " + nameof(colorResolution), $"{nameof(depthMode)} and {nameof(colorResolution)} cannot be equal to Off simultaneously.");
            }
            var res = NativeApi.CalibrationGetFromRaw(rawCalibration, Helpers.Int32ToUIntPtr(rawCalibration.Length), depthMode, colorResolution, out calibration);

            if (res == NativeCallResults.Result.Failed)
            {
                throw new InvalidOperationException("Cannot create calibration from parameters specified.");
            }
        }
コード例 #21
0
    public static Tuple <int, int> GetDepthDimensions(DepthMode depthMode)
    {
        switch (depthMode)
        {
        case DepthMode.NFOV_2x2Binned:
            return(Tuple.Create(320, 288));

        case DepthMode.NFOV_Unbinned:
            return(Tuple.Create(640, 576));

        case DepthMode.WFOV_2x2Binned:
            return(Tuple.Create(512, 512));

        case DepthMode.WFOV_Unbinned:
            return(Tuple.Create(1024, 1024));

        case DepthMode.PassiveIR:
            return(Tuple.Create(1024, 1024));

        default:
            throw new Exception("Invalid depth dimensions value!");
        }
    }
コード例 #22
0
ファイル: Methods.cs プロジェクト: gamedropswithpops/Ryujinx
        /// <summary>
        /// Updates host viewport transform and clipping state based on current GPU state.
        /// </summary>
        /// <param name="state">Current GPU state</param>
        private void UpdateViewportTransform(GpuState state)
        {
            DepthMode depthMode = state.Get <DepthMode>(MethodOffset.DepthMode);

            _context.Renderer.Pipeline.SetDepthMode(depthMode);

            bool flipY = (state.Get <int>(MethodOffset.YControl) & 1) != 0;

            float yFlip = flipY ? -1 : 1;

            Viewport[] viewports = new Viewport[Constants.TotalViewports];

            for (int index = 0; index < Constants.TotalViewports; index++)
            {
                var transform = state.Get <ViewportTransform>(MethodOffset.ViewportTransform, index);
                var extents   = state.Get <ViewportExtents>  (MethodOffset.ViewportExtents, index);

                float x = transform.TranslateX - MathF.Abs(transform.ScaleX);
                float y = transform.TranslateY - MathF.Abs(transform.ScaleY);

                float width  = transform.ScaleX * 2;
                float height = transform.ScaleY * 2 * yFlip;

                RectangleF region = new RectangleF(x, y, width, height);

                viewports[index] = new Viewport(
                    region,
                    transform.UnpackSwizzleX(),
                    transform.UnpackSwizzleY(),
                    transform.UnpackSwizzleZ(),
                    transform.UnpackSwizzleW(),
                    extents.DepthNear,
                    extents.DepthFar);
            }

            _context.Renderer.Pipeline.SetViewports(0, viewports);
        }
コード例 #23
0
        private IEnumerator Process(DepthMode depthMode, bool cpuOnly)
        {
            var debugDelegate = new AzureKinectBodyTracker.DebugLogDelegate(PluginDebugLogCallBack);
            var debagCallback = Marshal.GetFunctionPointerForDelegate(debugDelegate);

            AzureKinectBodyTracker.SetDebugLogCallback(debagCallback);

            try
            {
                AzureKinectBodyTracker.Start(0, 0, 0, depthMode, cpuOnly);
                this.currentDepthMode = depthMode;
            }
            catch (K4ABTException)
            {
                this.ProcessFinallize(false);
                yield break;
            }

            var depthImageToPointCloudDelegate = new AzureKinectBodyTracker.DepthImageToPointCloudDelegate(this.DepthImageToPointCloudCallback);
            var depthImageToPointCloudCallback = Marshal.GetFunctionPointerForDelegate(depthImageToPointCloudDelegate);

            AzureKinectBodyTracker.SetDepthImageToPointCloudCallback(depthImageToPointCloudCallback);

            var colorImageToDepthSpaceDelegate = new AzureKinectBodyTracker.ColorImageToDepthSpaceDelegate(this.ColorImageToDepthSpaceCallback);
            var colorImageToDepthSpaceCallback = Marshal.GetFunctionPointerForDelegate(colorImageToDepthSpaceDelegate);

            AzureKinectBodyTracker.SetColorImageToDepthSpaceCallback(colorImageToDepthSpaceCallback);

            this.isRunning = true;
            while (this.isRunning)
            {
                yield return(null);
            }
            AzureKinectBodyTracker.End();
            this.ProcessFinallize();
        }
コード例 #24
0
        private void TestConvertColor2DToDepth2D(DepthMode depthMode, ColorResolution colorResolution)
        {
            Calibration.CreateDummy(depthMode, colorResolution, 30, out var calibration);

            var depth2d = new Float2(calibration.DepthCameraCalibration.Intrinsics.Parameters.Cx, calibration.DepthCameraCalibration.Intrinsics.Parameters.Cy);
            var depthMm = (short)1800;
            var color2d = calibration.Convert2DTo2D(depth2d, depthMm, CalibrationGeometry.Depth, CalibrationGeometry.Color).Value;

            var depthImageBuffer = new short[depthMode.WidthPixels() * depthMode.HeightPixels()];

            for (var i = 0; i < depthImageBuffer.Length; i++)
            {
                depthImageBuffer[i] = depthMm;
            }
            var depthImage = Image.CreateFromArray(depthImageBuffer, ImageFormat.Depth16, depthMode.WidthPixels(), depthMode.HeightPixels());

            var point2d = calibration.ConvertColor2DToDepth2D(color2d, depthImage);

            Assert.IsNotNull(point2d);
            Assert.IsTrue(Math.Abs(depth2d.X - point2d.Value.X) < 1f);
            Assert.IsTrue(Math.Abs(depth2d.Y - point2d.Value.Y) < 1f);

            depthImage.Dispose();
        }
コード例 #25
0
        /// <summary>
        /// Updates host viewport transform and clipping state based on current GPU state.
        /// </summary>
        /// <param name="state">Current GPU state</param>
        private void UpdateViewportTransform(GpuState state)
        {
            DepthMode depthMode = state.Get <DepthMode>(MethodOffset.DepthMode);

            _context.Renderer.Pipeline.SetDepthMode(depthMode);

            YControl yControl = state.Get <YControl>(MethodOffset.YControl);

            bool   flipY  = yControl.HasFlag(YControl.NegateY);
            Origin origin = yControl.HasFlag(YControl.TriangleRastFlip) ? Origin.LowerLeft : Origin.UpperLeft;

            _context.Renderer.Pipeline.SetOrigin(origin);

            // The triangle rast flip flag only affects rasterization, the viewport is not flipped.
            // Setting the origin mode to upper left on the host, however, not only affects rasterization,
            // but also flips the viewport.
            // We negate the effects of flipping the viewport by flipping it again using the viewport swizzle.
            if (origin == Origin.UpperLeft)
            {
                flipY = !flipY;
            }

            Span <Viewport> viewports = stackalloc Viewport[Constants.TotalViewports];

            for (int index = 0; index < Constants.TotalViewports; index++)
            {
                var transform = state.Get <ViewportTransform>(MethodOffset.ViewportTransform, index);
                var extents   = state.Get <ViewportExtents>  (MethodOffset.ViewportExtents, index);

                float x = transform.TranslateX - MathF.Abs(transform.ScaleX);
                float y = transform.TranslateY - MathF.Abs(transform.ScaleY);

                float width  = MathF.Abs(transform.ScaleX) * 2;
                float height = MathF.Abs(transform.ScaleY) * 2;

                float scale = TextureManager.RenderTargetScale;
                if (scale != 1f)
                {
                    x      *= scale;
                    y      *= scale;
                    width  *= scale;
                    height *= scale;
                }

                RectangleF region = new RectangleF(x, y, width, height);

                ViewportSwizzle swizzleX = transform.UnpackSwizzleX();
                ViewportSwizzle swizzleY = transform.UnpackSwizzleY();
                ViewportSwizzle swizzleZ = transform.UnpackSwizzleZ();
                ViewportSwizzle swizzleW = transform.UnpackSwizzleW();

                if (transform.ScaleX < 0)
                {
                    swizzleX ^= ViewportSwizzle.NegativeFlag;
                }

                if (flipY)
                {
                    swizzleY ^= ViewportSwizzle.NegativeFlag;
                }

                if (transform.ScaleY < 0)
                {
                    swizzleY ^= ViewportSwizzle.NegativeFlag;
                }

                if (transform.ScaleZ < 0)
                {
                    swizzleZ ^= ViewportSwizzle.NegativeFlag;
                }

                viewports[index] = new Viewport(
                    region,
                    swizzleX,
                    swizzleY,
                    swizzleZ,
                    swizzleW,
                    extents.DepthNear,
                    extents.DepthFar);
            }

            _context.Renderer.Pipeline.SetViewports(0, viewports);
        }
コード例 #26
0
 public void SetDepthMode(DepthMode mode)
 {
     _renderer.New <SetDepthModeCommand>().Set(mode);
     _renderer.QueueCommand();
 }
コード例 #27
0
 public static extern k4a_result_t k4a_device_get_calibration(
     k4a_device_t device_handle,
     DepthMode depth_mode,
     ColorResolution color_resolution,
     out Calibration calibration);
コード例 #28
0
 public static void Start(uint depthTextureId, uint colorTextureId, uint transformedDepthTextureId, DepthMode depthMode, bool cpuOnly)
 {
     if (IsValidPlatform())
     {
         if (!K4ABT_Start(depthTextureId, colorTextureId, transformedDepthTextureId, (int)depthMode, cpuOnly))
         {
             throw new K4ABTException(GetLastErrorMessage());
         }
     }
 }
コード例 #29
0
        /// <summary>
        /// Updates host viewport transform and clipping state based on current GPU state.
        /// </summary>
        /// <param name="state">Current GPU state</param>
        private void UpdateViewportTransform(GpuState state)
        {
            var yControl = state.Get <YControl> (MethodOffset.YControl);
            var face     = state.Get <FaceState>(MethodOffset.FaceState);

            UpdateFrontFace(yControl, face.FrontFace);

            bool flipY = yControl.HasFlag(YControl.NegateY);

            Span <Viewport> viewports = stackalloc Viewport[Constants.TotalViewports];

            for (int index = 0; index < Constants.TotalViewports; index++)
            {
                var transform = state.Get <ViewportTransform>(MethodOffset.ViewportTransform, index);
                var extents   = state.Get <ViewportExtents>  (MethodOffset.ViewportExtents, index);

                float scaleX = MathF.Abs(transform.ScaleX);
                float scaleY = transform.ScaleY;

                if (flipY)
                {
                    scaleY = -scaleY;
                }

                if (!_context.Capabilities.SupportsViewportSwizzle && transform.UnpackSwizzleY() == ViewportSwizzle.NegativeY)
                {
                    scaleY = -scaleY;
                }

                if (index == 0)
                {
                    // Try to guess the depth mode being used on the high level API
                    // based on current transform.
                    // It is setup like so by said APIs:
                    // If depth mode is ZeroToOne:
                    //  TranslateZ = Near
                    //  ScaleZ = Far - Near
                    // If depth mode is MinusOneToOne:
                    //  TranslateZ = (Near + Far) / 2
                    //  ScaleZ = (Far - Near) / 2
                    // DepthNear/Far are sorted such as that Near is always less than Far.
                    DepthMode depthMode = extents.DepthNear != transform.TranslateZ &&
                                          extents.DepthFar != transform.TranslateZ ? DepthMode.MinusOneToOne : DepthMode.ZeroToOne;

                    _context.Renderer.Pipeline.SetDepthMode(depthMode);
                }

                float x = transform.TranslateX - scaleX;
                float y = transform.TranslateY - scaleY;

                float width  = scaleX * 2;
                float height = scaleY * 2;

                float scale = TextureManager.RenderTargetScale;
                if (scale != 1f)
                {
                    x      *= scale;
                    y      *= scale;
                    width  *= scale;
                    height *= scale;
                }

                RectangleF region = new RectangleF(x, y, width, height);

                ViewportSwizzle swizzleX = transform.UnpackSwizzleX();
                ViewportSwizzle swizzleY = transform.UnpackSwizzleY();
                ViewportSwizzle swizzleZ = transform.UnpackSwizzleZ();
                ViewportSwizzle swizzleW = transform.UnpackSwizzleW();

                float depthNear = extents.DepthNear;
                float depthFar  = extents.DepthFar;

                if (transform.ScaleZ < 0)
                {
                    float temp = depthNear;
                    depthNear = depthFar;
                    depthFar  = temp;
                }

                viewports[index] = new Viewport(region, swizzleX, swizzleY, swizzleZ, swizzleW, depthNear, depthFar);
            }

            _context.Renderer.Pipeline.SetViewports(0, viewports);
        }
コード例 #30
0
 public static extern k4a_result_t k4a_calibration_get_from_raw(
     byte[] raw_calibration,
     UIntPtr raw_calibration_size,
     DepthMode depth_mode,
     ColorResolution color_resolution,
     out Calibration calibration);