public async Task HandleData(SensorOrientation expected, byte[] response)
            {
                SensorOrientation?actual = null;

                await accelerometer.Orientation.AddRouteAsync(source => source.Stream(data => actual = data.Value <SensorOrientation>()));

                platform.sendMockResponse(response);

                Assert.That(actual, Is.EqualTo(expected));
            }
Пример #2
0
        /// <summary>
        /// Main entry point.
        /// </summary>
        public static void Main()
        {
            // camera resolution settings
            const ColorResolution resolution   = ColorResolution.R720p;
            const int             widthSource  = 1280;
            const int             heightSource = 720;

            // down sampled resolution
            const int    widthOutput       = 80;
            const int    heightOutput      = 45;
            const double scaleFactorWidth  = (double)widthOutput / widthSource;
            const double scaleFactorHeight = (double)heightOutput / heightSource;

            // background subtraction beyond this depth
            const double maxDepth = 1.0; // meters

            const SensorOrientation initialOrientation = SensorOrientation.Default;

            using (var pipeline = Pipeline.Create("AzureKinectSample", DeliveryPolicy.LatestMessage))
            {
                var azureKinect = new AzureKinectSensor(
                    pipeline,
                    new AzureKinectSensorConfiguration()
                {
                    OutputImu                = true,
                    ColorResolution          = resolution,
                    DepthMode                = DepthMode.WFOV_Unbinned,
                    CameraFPS                = FPS.FPS15,
                    BodyTrackerConfiguration = new AzureKinectBodyTrackerConfiguration()
                    {
                        CpuOnlyMode       = true, // false if CUDA supported GPU available
                        SensorOrientation = initialOrientation,
                    },
                });

                StringBuilder     sb = new StringBuilder();
                SensorOrientation lastOrientation = (SensorOrientation)(-1); // detect orientation changes

                // consuming color, depth, IMU, body tracking, calibration
                azureKinect.ColorImage.Resize(widthOutput, heightOutput)
                .Join(azureKinect.DepthImage)
                .Join(azureKinect.Imu, TimeSpan.FromMilliseconds(10))
                .Pair(azureKinect.Bodies)
                .Pair(azureKinect.DepthDeviceCalibrationInfo)
                .Do(message =>
                {
                    var(color, depth, imu, bodies, calib) = message;
Пример #3
0
        public BackgroundTrackingLoop(ref Calibration calibration, SensorOrientation sensorOrientation, float smoothingFactor)
        {
            var config = new TrackerConfiguration {
                SensorOrientation = sensorOrientation
            };

            tracker = new Tracker(ref calibration, config)
            {
                TemporalSmoothingFactor = smoothingFactor
            };
            isRunning        = true;
            backgroundThread = new Thread(BackgroundLoop)
            {
                IsBackground = true
            };
            backgroundThread.Start();
        }
Пример #4
0
        public TrackerModel(IApp app, BackgroundReadingLoop readingLoop,
                            TrackerProcessingMode processingMode, DnnModel dnnModel, SensorOrientation sensorOrientation, float smoothingFactor)
            : base(app)
        {
            // try to create tracking loop first
            readingLoop.GetCalibration(out calibration);
            trackingLoop = new BackgroundTrackingLoop(in calibration, processingMode, dnnModel, sensorOrientation, smoothingFactor);
            trackingLoop.BodyFrameReady += TrackingLoop_BodyFrameReady;
            trackingLoop.Failed         += BackgroundLoop_Failed;

            this.readingLoop          = readingLoop;
            readingLoop.CaptureReady += ReadingLoop_CaptureReady;
            readingLoop.Failed       += BackgroundLoop_Failed;

            Title = readingLoop.ToString();

            // Image and skeleton visualizers for depth
            var depthMode = readingLoop.DepthMode;

            depthImageVisualizer    = ImageVisualizer.CreateForDepth(dispatcher, depthMode.WidthPixels(), depthMode.HeightPixels());
            depthSkeletonVisualizer = new SkeletonVisualizer(dispatcher, depthMode.WidthPixels(), depthMode.HeightPixels(), ProjectJointToDepthMap);

            // Image and skeleton visualizers for color
            var colorRes = readingLoop.ColorResolution;

            if (colorRes != ColorResolution.Off)
            {
                colorImageVisualizer       = ImageVisualizer.CreateForColorBgra(dispatcher, colorRes.WidthPixels(), colorRes.HeightPixels());
                colorSkeletonVisualizer    = new SkeletonVisualizer(dispatcher, colorRes.WidthPixels(), colorRes.HeightPixels(), ProjectJointToColorImage);
                bodyIndexMapTransformation = new BodyIndexMapTransformation(in calibration);
            }

            // Proportions between columns
            if (colorRes != ColorResolution.Off)
            {
                DepthColumnWidth = new GridLength(depthImageVisualizer.WidthPixels, GridUnitType.Star);
                ColorColumnWidth = new GridLength(
                    depthImageVisualizer.HeightPixels * colorImageVisualizer.WidthPixels / colorImageVisualizer.HeightPixels,
                    GridUnitType.Star);
            }
            else
            {
                DepthColumnWidth = new GridLength(1, GridUnitType.Star);
                ColorColumnWidth = new GridLength(0, GridUnitType.Pixel);
            }
        }
Пример #5
0
        public BackgroundTrackingLoop(ref Calibration calibration, bool cpuOnlyMode, SensorOrientation sensorOrientation, float smoothingFactor)
        {
            var config = new TrackerConfiguration
            {
                SensorOrientation = sensorOrientation,
                ProcessingMode    = cpuOnlyMode
                    ? TrackerProcessingMode.Cpu
                    : TrackerProcessingMode.Gpu
            };

            tracker = new Tracker(ref calibration, config)
            {
                TemporalSmoothingFactor = smoothingFactor
            };
            isRunning        = true;
            backgroundThread = new Thread(BackgroundLoop)
            {
                IsBackground = true
            };
            backgroundThread.Start();
        }
Пример #6
0
 // enumerate image coordinates while correcting for orientation
 static (IEnumerable <int>, IEnumerable <int>, bool)EnumerateCoordinates(SensorOrientation orientation)
 {
     var w = Enumerable.Range(0, widthOutput);
     var h = Enumerable.Range(0, heightOutput);
     return(orientation switch
     {
         SensorOrientation.Clockwise90 => (h.Reverse(), w, true),
         SensorOrientation.Flip180 => (w.Reverse(), h.Reverse(), false),
         SensorOrientation.CounterClockwise90 => (h, w.Reverse(), true),
         _ => (w, h, false), // normal
     });
Пример #7
0
 public LeapTransform(SensorOrientation orientation)
 {
     Orientation = orientation;
 }