コード例 #1
0
        static async Task Main()
        {
            using (var visualizerData = new VisualizerData())
            {
                var renderer = new PosSaver(visualizerData);

                renderer.StartVisualizationThread();

                // Open device.
                using (Device device = Device.Open())
                {
                    device.StartCameras(new DeviceConfiguration()
                    {
                        ColorFormat            = ImageFormat.ColorBGRA32,
                        ColorResolution        = ColorResolution.R720p,
                        DepthMode              = DepthMode.NFOV_Unbinned,
                        SynchronizedImagesOnly = true,
                        WiredSyncMode          = WiredSyncMode.Standalone,
                        CameraFPS              = FPS.FPS15
                    });

                    var deviceCalibration = device.GetCalibration();
                    var transformation    = deviceCalibration.CreateTransformation();
                    PointCloud.ComputePointCloudCache(deviceCalibration);

                    using (Tracker tracker = Tracker.Create(deviceCalibration, new TrackerConfiguration()
                    {
                        ProcessingMode = TrackerProcessingMode.Gpu, SensorOrientation = SensorOrientation.Default
                    }))
                    {
                        while (renderer.IsActive)
                        {
                            using (Capture sensorCapture = await Task.Run(() => device.GetCapture()).ConfigureAwait(true))
                            {
                                // Queue latest frame from the sensor.
                                tracker.EnqueueCapture(sensorCapture);
                                if (renderer.IsHuman)
                                {
                                    unsafe
                                    {
                                        //Depth画像の横幅(width)と縦幅(height)を取得
                                        int depth_width  = device.GetCalibration().DepthCameraCalibration.ResolutionWidth;
                                        int depth_height = device.GetCalibration().DepthCameraCalibration.ResolutionHeight;
                                        // Bitmap depthBitmap = new Bitmap(depth_width, depth_height, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
                                        Bitmap colorBitmap = new Bitmap(depth_width, depth_height, System.Drawing.Imaging.PixelFormat.Format32bppArgb);

                                        // Image depthImage = sensorCapture.Depth;
                                        Image colorImage = transformation.ColorImageToDepthCamera(sensorCapture);
                                        // ushort[] depthArray = depthImage.GetPixels<ushort>().ToArray();
                                        BGRA[] colorArray = colorImage.GetPixels <BGRA>().ToArray();
                                        // BitmapData bitmapData = depthBitmap.LockBits(new Rectangle(0, 0, depthBitmap.Width, depthBitmap.Height), System.Drawing.Imaging.ImageLockMode.WriteOnly, PixelFormat.Format32bppArgb);
                                        BitmapData bitmapData = colorBitmap.LockBits(new Rectangle(0, 0, colorBitmap.Width, colorBitmap.Height), System.Drawing.Imaging.ImageLockMode.WriteOnly, PixelFormat.Format32bppArgb);
                                        //各ピクセルの値へのポインタ
                                        byte *pixels = (byte *)bitmapData.Scan0;
                                        int   index  = 0;
                                        //一ピクセルずつ処理
                                        for (int i = 0; i < colorArray.Length; i++)
                                        {
                                            pixels[index++] = colorArray[i].B;
                                            pixels[index++] = colorArray[i].G;
                                            pixels[index++] = colorArray[i].R;
                                            pixels[index++] = 255;//Alpha値を固定して不透過に
                                        }
                                        //書き込み終了
                                        colorBitmap.UnlockBits(bitmapData);
                                        string string_now = renderer.now.ToString("HHmmssfff");
                                        colorBitmap.Save($@"{PosSaver.path}\{renderer.day}\{renderer.scene}\depth\{string_now}.png", System.Drawing.Imaging.ImageFormat.Png);
                                    }
                                }
                            }

                            // Try getting latest tracker frame.
                            using (Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false))
                            {
                                if (frame != null)
                                {
                                    // Save this frame for visualization in Renderer.

                                    // One can access frame data here and extract e.g. tracked bodies from it for the needed purpose.
                                    // Instead, for simplicity, we transfer the frame object to the rendering background thread.
                                    // This example shows that frame popped from tracker should be disposed. Since here it is used
                                    // in a different thread, we use Reference method to prolong the lifetime of the frame object.
                                    // For reference on how to read frame data, please take a look at Renderer.NativeWindow_Render().
                                    visualizerData.Frame = frame.Reference();
                                }
                            }
                        }
                    }
                }
            }
        }
コード例 #2
0
        static void Main()
        {
            using (var visualizerData = new VisualizerData())
            {
                var renderer = new Renderer(visualizerData);

                renderer.StartVisualizationThread();

                // Open device.
                using (Device device = Device.Open())
                {
                    var fileName = $"bt-dump-{DateTime.Now:yyyy-MM-dd_hh-mm-ss}.btd";
                    using (BinaryWriter file = new BinaryWriter(new FileStream(fileName, FileMode.Create, FileAccess.Write)))
                    {
                        file.Write(Encoding.ASCII.GetBytes("BT-DUMP\nV: 1\n"));
                        WriteVersion(file, device.Version);
                        file.Write(Encoding.ASCII.GetBytes($"device-sn: {device.SerialNum}\n"));

                        device.StartCameras(new DeviceConfiguration()
                        {
                            CameraFPS       = FPS.FPS30,
                            ColorResolution = ColorResolution.Off,
                            DepthMode       = DepthMode.NFOV_Unbinned,
                            WiredSyncMode   = WiredSyncMode.Standalone,
                        });

                        file.Write(Encoding.ASCII.GetBytes($"device-color-resolution: {device.CurrentColorResolution}\n"));
                        file.Write(Encoding.ASCII.GetBytes($"device-depth-mode: {device.CurrentDepthMode}\n"));

                        var deviceCalibration = device.GetCalibration();
                        PointCloud.ComputePointCloudCache(deviceCalibration);

                        WriteCalibration(file, "depth", deviceCalibration.DepthCameraCalibration);
                        WriteCalibration(file, "color", deviceCalibration.ColorCameraCalibration);

                        using (Tracker tracker = Tracker.Create(deviceCalibration, new TrackerConfiguration()
                        {
                            ProcessingMode = TrackerProcessingMode.Gpu, SensorOrientation = SensorOrientation.Default
                        }))
                        {
                            file.Write(Encoding.ASCII.GetBytes($"joint-count: {Skeleton.JointCount}\n"));
                            file.Write(Encoding.ASCII.GetBytes("data:\n"));

                            while (renderer.IsActive)
                            {
                                using (Capture sensorCapture = device.GetCapture())
                                {
                                    // Queue latest frame from the sensor.
                                    tracker.EnqueueCapture(sensorCapture);
                                }

                                // Try getting latest tracker frame.
                                using (Frame frame = tracker.PopResult(TimeSpan.Zero, throwOnTimeout: false))
                                {
                                    if (frame != null)
                                    {
                                        // Save to recording file.
                                        file.Write(frame.DeviceTimestamp.Ticks); // long = Int64
                                        file.Write(frame.NumberOfBodies);        // uint = Int32
                                        for (uint i = 0; i < frame.NumberOfBodies; i++)
                                        {
                                            var person = frame.GetBodyId(i);
                                            file.Write(person); // uint = Int32
                                            var s = frame.GetBodySkeleton(i);
                                            for (uint j = 0; j < Skeleton.JointCount; j++)
                                            {
                                                var joint = s.GetJoint((JointId)j);
                                                var c     = joint.ConfidenceLevel;
                                                var p     = joint.Position;
                                                var r     = joint.Quaternion;
                                                file.Write((byte)c);                               // byte = 8-bit unsigned
                                                file.Write(p.X); file.Write(p.Y); file.Write(p.Z); // float = 32-bit floating point.
                                                file.Write(r.X); file.Write(r.Y); file.Write(r.Z); file.Write(r.W);
                                            }
                                        }

                                        // Save this frame for visualization in Renderer.

                                        // One can access frame data here and extract e.g. tracked bodies from it for the needed purpose.
                                        // Instead, for simplicity, we transfer the frame object to the rendering background thread.
                                        // This example shows that frame popped from tracker should be disposed. Since here it is used
                                        // in a different thread, we use Reference method to prolong the lifetime of the frame object.
                                        // For reference on how to read frame data, please take a look at Renderer.NativeWindow_Render().
                                        visualizerData.Frame = frame.Reference();
                                    }
                                }
                            }
                        }
                    }
                }
            }
        }
コード例 #3
0
        private void NativeWindow_Render(object sender, NativeWindowEventArgs e)
        {
            using (var lastFrame = visualizerData.TakeFrameWithOwnership())
            {
                if (lastFrame == null)
                {
                    return;
                }

                NativeWindow nativeWindow = (NativeWindow)sender;

                Gl.Viewport(0, 0, (int)nativeWindow.Width, (int)nativeWindow.Height);
                Gl.Clear(ClearBufferMask.ColorBufferBit);

                // Update model/view/projective matrices in shader
                var proj = Matrix4x4.CreatePerspectiveFieldOfView(ToRadians(65.0f), (float)nativeWindow.Width / nativeWindow.Height, 0.1f, 150.0f);
                var view = Matrix4x4.CreateLookAt(Vector3.Zero, Vector3.UnitZ, -Vector3.UnitY);

                SphereRenderer.View       = view;
                SphereRenderer.Projection = proj;

                CylinderRenderer.View       = view;
                CylinderRenderer.Projection = proj;

                PointCloudRenderer.View       = view;
                PointCloudRenderer.Projection = proj;

                PointCloud.ComputePointCloud(lastFrame.Capture.Depth, ref pointCloud);
                PointCloudRenderer.Render(pointCloud, new Vector4(1, 1, 1, 1));
                Vector3 rightHand = Vector3.Zero;
                for (uint i = 0; i < lastFrame.NumberOfBodies; ++i)
                {
                    var skeleton  = lastFrame.GetBodySkeleton(i);
                    var bodyId    = lastFrame.GetBodyId(i);
                    var bodyColor = BodyColors.GetColorAsVector(bodyId);

                    for (int jointId = 0; jointId < (int)JointId.Count; ++jointId)
                    {
                        var joint = skeleton.GetJoint(jointId);

                        if (jointId == (int)JointId.HandRight)
                        {
                            if (pullPoint)
                            {
                                if (rightHand == Vector3.Zero)
                                {
                                    rightHand = joint.Position;
                                }
                                else
                                {
                                    if (Math.Abs(Vector3.Distance(rightHand, Vector3.Zero)) > Math.Abs(Vector3.Distance(joint.Position, Vector3.Zero)))
                                    {
                                        rightHand = joint.Position;
                                    }
                                }
                            }
                            // Console.WriteLine($"Right Hand Found X:{joint.Position.X} Y:{joint.Position.Y} Z:{joint.Position.Z} ");
                        }

                        // Render the joint as a sphere.
                        const float radius = 0.024f;
                        SphereRenderer.Render(joint.Position / 1000, radius, bodyColor);

                        if (JointConnections.JointParent.TryGetValue((JointId)jointId, out JointId parentId))
                        {
                            // Render a bone connecting this joint and its parent as a cylinder.
                            CylinderRenderer.Render(joint.Position / 1000, skeleton.GetJoint((int)parentId).Position / 1000, bodyColor);
                        }
                    }
                }

                if (rightHand != Vector3.Zero)
                {
                    thePoint = rightHand;
                }
            }
        }