Ejemplo n.º 1
0
        private static Task RunVideo()
        {
            return(Task.Run(() =>
            {
                using (var visualizerData = new VisualizerData())
                {
                    renderer = new Renderer(visualizerData);

                    renderer.StartVisualizationThread();

                    // Open device.
                    using (Device device = Device.Open())
                    {
                        device.StartCameras(new DeviceConfiguration()
                        {
                            CameraFPS = FPS.FPS30,
                            ColorResolution = ColorResolution.Off,
                            DepthMode = DepthMode.NFOV_Unbinned,
                            WiredSyncMode = WiredSyncMode.Standalone,
                        });

                        var deviceCalibration = device.GetCalibration();
                        PointCloud.ComputePointCloudCache(deviceCalibration);

                        using (Tracker tracker = Tracker.Create(deviceCalibration, new TrackerConfiguration()
                        {
                            ProcessingMode = TrackerProcessingMode.Gpu, SensorOrientation = SensorOrientation.Default
                        }))
                        {
                            while (renderer.IsActive)
                            {
                                using (Capture sensorCapture = device.GetCapture())
                                {
                                    // Queue latest frame from the sensor.
                                    tracker.EnqueueCapture(sensorCapture);
                                }

                                // Try getting latest tracker frame.
                                using (Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false))
                                {
                                    if (frame != null)
                                    {
                                        // Save this frame for visualization in Renderer.

                                        // One can access frame data here and extract e.g. tracked bodies from it for the needed purpose.
                                        // Instead, for simplicity, we transfer the frame object to the rendering background thread.
                                        // This example shows that frame popped from tracker should be disposed. Since here it is used
                                        // in a different thread, we use Reference method to prolong the lifetime of the frame object.
                                        // For reference on how to read frame data, please take a look at Renderer.NativeWindow_Render().

                                        visualizerData.Frame = frame.Reference();
                                    }
                                }
                            }
                        }
                    }
                }
            }));
        }
Ejemplo n.º 2
0
        /// <summary>
        /// Constuctor
        /// </summary>
        /// <param name="visualizerData"></param>
        public Renderer(VisualizerData visualizerData)
        {
            this.visualizerData = visualizerData;

            LeftHand_Kinect_PA  = new PointArray(MAX_NUM_OF_POINTS);
            RightHand_Kinect_PA = new PointArray(MAX_NUM_OF_POINTS);

            LeftHand_Hololens_PA  = new PointArray(MAX_NUM_OF_POINTS);
            RightHand_Hololens_PA = new PointArray(MAX_NUM_OF_POINTS);

            LeftHand_Hololens_list  = new List <Point3D>();
            RightHand_Hololens_list = new List <Point3D>();
            LeftHand_Kinect_list    = new List <Point3D>();
            RightHand_Kinect_list   = new List <Point3D>();

            LH_Hololens_pos = new Point3D(0, 0, 0);
            RH_Hololens_pos = new Point3D(0, 0, 0);

            MT = new MatrixTransformation();
            ReadHololensIP();

            udpClient = new UdpClient(20201);
        }
Ejemplo n.º 3
0
        static void Main()
        {
            using (var visualizerData = new VisualizerData())
            {
                var renderer = new Renderer(visualizerData);

                renderer.StartVisualizationThread();

                // Open device.
                using (Device device = Device.Open())
                {
                    var fileName = $"bt-dump-{DateTime.Now:yyyy-MM-dd_hh-mm-ss}.btd";
                    using (BinaryWriter file = new BinaryWriter(new FileStream(fileName, FileMode.Create, FileAccess.Write)))
                    {
                        file.Write(Encoding.ASCII.GetBytes("BT-DUMP\nV: 1\n"));
                        WriteVersion(file, device.Version);
                        file.Write(Encoding.ASCII.GetBytes($"device-sn: {device.SerialNum}\n"));

                        device.StartCameras(new DeviceConfiguration()
                        {
                            CameraFPS       = FPS.FPS30,
                            ColorResolution = ColorResolution.Off,
                            DepthMode       = DepthMode.NFOV_Unbinned,
                            WiredSyncMode   = WiredSyncMode.Standalone,
                        });

                        file.Write(Encoding.ASCII.GetBytes($"device-color-resolution: {device.CurrentColorResolution}\n"));
                        file.Write(Encoding.ASCII.GetBytes($"device-depth-mode: {device.CurrentDepthMode}\n"));

                        var deviceCalibration = device.GetCalibration();
                        PointCloud.ComputePointCloudCache(deviceCalibration);

                        WriteCalibration(file, "depth", deviceCalibration.DepthCameraCalibration);
                        WriteCalibration(file, "color", deviceCalibration.ColorCameraCalibration);

                        using (Tracker tracker = Tracker.Create(deviceCalibration, new TrackerConfiguration()
                        {
                            ProcessingMode = TrackerProcessingMode.Gpu, SensorOrientation = SensorOrientation.Default
                        }))
                        {
                            file.Write(Encoding.ASCII.GetBytes($"joint-count: {Skeleton.JointCount}\n"));
                            file.Write(Encoding.ASCII.GetBytes("data:\n"));

                            while (renderer.IsActive)
                            {
                                using (Capture sensorCapture = device.GetCapture())
                                {
                                    // Queue latest frame from the sensor.
                                    tracker.EnqueueCapture(sensorCapture);
                                }

                                // Try getting latest tracker frame.
                                using (Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false))
                                {
                                    if (frame != null)
                                    {
                                        // Save to recording file.
                                        file.Write(frame.DeviceTimestamp.Ticks); // long = Int64
                                        file.Write(frame.NumberOfBodies);        // uint = Int32
                                        for (uint i = 0; i < frame.NumberOfBodies; i++)
                                        {
                                            var person = frame.GetBodyId(i);
                                            file.Write(person); // uint = Int32
                                            var s = frame.GetBodySkeleton(i);
                                            for (uint j = 0; j < Skeleton.JointCount; j++)
                                            {
                                                var joint = s.GetJoint((JointId)j);
                                                var c     = joint.ConfidenceLevel;
                                                var p     = joint.Position;
                                                var r     = joint.Quaternion;
                                                file.Write((byte)c);                               // byte = 8-bit unsigned
                                                file.Write(p.X); file.Write(p.Y); file.Write(p.Z); // float = 32-bit floating point.
                                                file.Write(r.X); file.Write(r.Y); file.Write(r.Z); file.Write(r.W);
                                            }
                                        }

                                        // Save this frame for visualization in Renderer.

                                        // One can access frame data here and extract e.g. tracked bodies from it for the needed purpose.
                                        // Instead, for simplicity, we transfer the frame object to the rendering background thread.
                                        // This example shows that frame popped from tracker should be disposed. Since here it is used
                                        // in a different thread, we use Reference method to prolong the lifetime of the frame object.
                                        // For reference on how to read frame data, please take a look at Renderer.NativeWindow_Render().
                                        visualizerData.Frame = frame.Reference();
                                    }
                                }
                            }
                        }
                    }
                }
            }
        }
Ejemplo n.º 4
0
        static async Task Main()
        {
            using (var visualizerData = new VisualizerData())
            {
                var renderer = new PosSaver(visualizerData);

                renderer.StartVisualizationThread();

                // Open device.
                using (Device device = Device.Open())
                {
                    device.StartCameras(new DeviceConfiguration()
                    {
                        ColorFormat            = ImageFormat.ColorBGRA32,
                        ColorResolution        = ColorResolution.R720p,
                        DepthMode              = DepthMode.NFOV_Unbinned,
                        SynchronizedImagesOnly = true,
                        WiredSyncMode          = WiredSyncMode.Standalone,
                        CameraFPS              = FPS.FPS15
                    });

                    var deviceCalibration = device.GetCalibration();
                    var transformation    = deviceCalibration.CreateTransformation();
                    PointCloud.ComputePointCloudCache(deviceCalibration);

                    using (Tracker tracker = Tracker.Create(deviceCalibration, new TrackerConfiguration()
                    {
                        ProcessingMode = TrackerProcessingMode.Gpu, SensorOrientation = SensorOrientation.Default
                    }))
                    {
                        while (renderer.IsActive)
                        {
                            using (Capture sensorCapture = await Task.Run(() => device.GetCapture()).ConfigureAwait(true))
                            {
                                // Queue latest frame from the sensor.
                                tracker.EnqueueCapture(sensorCapture);
                                if (renderer.IsHuman)
                                {
                                    unsafe
                                    {
                                        //Depth画像の横幅(width)と縦幅(height)を取得
                                        int depth_width  = device.GetCalibration().DepthCameraCalibration.ResolutionWidth;
                                        int depth_height = device.GetCalibration().DepthCameraCalibration.ResolutionHeight;
                                        // Bitmap depthBitmap = new Bitmap(depth_width, depth_height, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
                                        Bitmap colorBitmap = new Bitmap(depth_width, depth_height, System.Drawing.Imaging.PixelFormat.Format32bppArgb);

                                        // Image depthImage = sensorCapture.Depth;
                                        Image colorImage = transformation.ColorImageToDepthCamera(sensorCapture);
                                        // ushort[] depthArray = depthImage.GetPixels<ushort>().ToArray();
                                        BGRA[] colorArray = colorImage.GetPixels <BGRA>().ToArray();
                                        // BitmapData bitmapData = depthBitmap.LockBits(new Rectangle(0, 0, depthBitmap.Width, depthBitmap.Height), System.Drawing.Imaging.ImageLockMode.WriteOnly, PixelFormat.Format32bppArgb);
                                        BitmapData bitmapData = colorBitmap.LockBits(new Rectangle(0, 0, colorBitmap.Width, colorBitmap.Height), System.Drawing.Imaging.ImageLockMode.WriteOnly, PixelFormat.Format32bppArgb);
                                        //各ピクセルの値へのポインタ
                                        byte *pixels = (byte *)bitmapData.Scan0;
                                        int   index  = 0;
                                        //一ピクセルずつ処理
                                        for (int i = 0; i < colorArray.Length; i++)
                                        {
                                            pixels[index++] = colorArray[i].B;
                                            pixels[index++] = colorArray[i].G;
                                            pixels[index++] = colorArray[i].R;
                                            pixels[index++] = 255;//Alpha値を固定して不透過に
                                        }
                                        //書き込み終了
                                        colorBitmap.UnlockBits(bitmapData);
                                        string string_now = renderer.now.ToString("HHmmssfff");
                                        colorBitmap.Save($@"{PosSaver.path}\{renderer.day}\{renderer.scene}\depth\{string_now}.png", System.Drawing.Imaging.ImageFormat.Png);
                                    }
                                }
                            }

                            // Try getting latest tracker frame.
                            using (Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false))
                            {
                                if (frame != null)
                                {
                                    // Save this frame for visualization in Renderer.

                                    // One can access frame data here and extract e.g. tracked bodies from it for the needed purpose.
                                    // Instead, for simplicity, we transfer the frame object to the rendering background thread.
                                    // This example shows that frame popped from tracker should be disposed. Since here it is used
                                    // in a different thread, we use Reference method to prolong the lifetime of the frame object.
                                    // For reference on how to read frame data, please take a look at Renderer.NativeWindow_Render().
                                    visualizerData.Frame = frame.Reference();
                                }
                            }
                        }
                    }
                }
            }
        }
Ejemplo n.º 5
0
 public Renderer(VisualizerData visualizerData)
 {
     this.visualizerData = visualizerData;
 }