Exemplo n.º 1
0
        static void Main(string[] args)
        {
            Console.WriteLine("Initializing console Skeleton Tracking sample with RealSense ... ");

            // Initialize logging to output all messages with severity level INFO or higher to the console
            Cubemos.Api.InitialiseLogging(Cubemos.LogLevel.CM_LL_ERROR, bWriteToConsole: true);
            Cubemos.SkeletonTracking.Api skeletontrackingApi;

            // Create cubemos Skeleton tracking Api handle and specify the directory containing a cubemos_license.json file
            try
            {
                skeletontrackingApi = new Cubemos.SkeletonTracking.Api(Common.DefaultLicenseDir());
            }
            catch (Cubemos.Exception ex)
            {
                Console.WriteLine("If you haven't activated the SDK yet, please run post_installation script as described in the Getting Started Guide to activate your license.");
                Console.ReadLine();
                return;
            }

            // Initialise cubemos DNN framework with the required model
            try
            {
                skeletontrackingApi.LoadModel(Cubemos.TargetComputeDevice.CM_CPU,
                                              Common.DefaultModelDir() + "\\fp32\\skeleton-tracking.cubemos");
            }
            catch (Cubemos.Exception ex)
            {
                Console.WriteLine(String.Format("Error during model loading. " +
                                                "Please verify the model exists at the path {0}. Details: {1}", Common.DefaultModelDir() + "\\fp32\\skeleton-tracking.cubemos", ex));
                Console.ReadLine();
                return;
            }

            Console.Write("Hold the Intel RealSense with person(s) in scene and hit <ENTER>... ");
            Console.ReadLine();

            // Initialise the intel realsense pipeline as an acquisition device
            Pipeline pipeline = new Pipeline();
            Config   cfg      = new Config();
            Context  context  = new Intel.RealSense.Context();

            cfg.EnableStream(Intel.RealSense.Stream.Color, 1280, 720, Format.Bgr8, framerate: 30);
            PipelineProfile pp = pipeline.Start(cfg);

            // Set the network input size to 128 for faster inference
            int networkHeight = 128;

            // Acquire a single color frame and run Skeleton Tracking on it
            using (var frames = pipeline.WaitForFrames())
            {
                var frame = frames.ColorFrame.DisposeWith(frames);
                System.Drawing.Bitmap inputImage =
                    new System.Drawing.Bitmap(frame.Width,
                                              frame.Height,
                                              frame.Stride,
                                              System.Drawing.Imaging.PixelFormat.Format24bppRgb,
                                              frame.Data);

                System.Collections.Generic.List <Cubemos.SkeletonTracking.Api.SkeletonKeypoints> skeletonKeypoints;

                // Send inference request and get the skeletons
                skeletontrackingApi.RunSkeletonTracking(ref inputImage, networkHeight, out skeletonKeypoints);

                // Output detected skeletons
                Console.WriteLine("# Persons detected: " + skeletonKeypoints.Count);
                for (int skeleton_index = 0; skeleton_index < skeletonKeypoints.Count; skeleton_index++)
                {
                    var skeleton = skeletonKeypoints[skeleton_index];
                    Console.WriteLine("Skeleton #" + skeleton_index);
                    for (int joint_index = 0; joint_index < skeleton.listJoints.Count; joint_index++)
                    {
                        Cubemos.SkeletonTracking.Api.Coordinate coordinate = skeleton.listJoints[joint_index];
                        Console.WriteLine("\tJoint coordinate #" + joint_index + ": " + coordinate.x + "; " +
                                          coordinate.y);
                    }
                }
            }
            Console.Write("Press <Enter> to exit... ");
            Console.ReadLine();
        }
Exemplo n.º 2
0
        public ProcessingWindow()
        {
            InitializeComponent();

            try
            {
                var cfg = new Config();

                using (var ctx = new Context())
                {
                    var devices = ctx.QueryDevices();
                    var dev     = devices[0];

                    Console.WriteLine("\nUsing device 0, an {0}", dev.Info[CameraInfo.Name]);
                    Console.WriteLine("    Serial number: {0}", dev.Info[CameraInfo.SerialNumber]);
                    Console.WriteLine("    Firmware version: {0}", dev.Info[CameraInfo.FirmwareVersion]);

                    var sensors     = dev.QuerySensors();
                    var depthSensor = sensors[0];
                    var colorSensor = sensors[1];

                    var depthProfile = depthSensor.StreamProfiles
                                       .Where(p => p.Stream == Stream.Depth)
                                       .OrderBy(p => p.Framerate)
                                       .Select(p => p.As <VideoStreamProfile>()).First();

                    var colorProfile = colorSensor.StreamProfiles
                                       .Where(p => p.Stream == Stream.Color)
                                       .OrderBy(p => p.Framerate)
                                       .Select(p => p.As <VideoStreamProfile>()).First();

                    cfg.EnableStream(Stream.Depth, depthProfile.Width, depthProfile.Height, depthProfile.Format, depthProfile.Framerate);
                    cfg.EnableStream(Stream.Color, colorProfile.Width, colorProfile.Height, colorProfile.Format, colorProfile.Framerate);
                }
                var pp = pipeline.Start(cfg);

                // Get the recommended processing blocks for the depth sensor
                var sensor = pp.Device.QuerySensors().First(s => s.Is(Extension.DepthSensor));
                var blocks = sensor.ProcessingBlocks.ToList();

                // Allocate bitmaps for rendring.
                // Since the sample aligns the depth frames to the color frames, both of the images will have the color resolution
                using (var p = pp.GetStream(Stream.Color).As <VideoStreamProfile>())
                {
                    imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
                    imgDepth.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
                }
                var updateColor = UpdateImage(imgColor);
                var updateDepth = UpdateImage(imgDepth);

                // Create custom processing block
                // For demonstration purposes it will:
                // a. Get a frameset
                // b. Run post-processing on the depth frame
                // c. Combine the result back into a frameset
                // Processing blocks are inherently thread-safe and play well with
                // other API primitives such as frame-queues,
                // and can be used to encapsulate advanced operations.
                // All invocations are, however, synchronious so the high-level threading model
                // is up to the developer
                block = new CustomProcessingBlock((f, src) =>
                {
                    // We create a FrameReleaser object that would track
                    // all newly allocated .NET frames, and ensure deterministic finalization
                    // at the end of scope.
                    using (var releaser = new FramesReleaser())
                    {
                        foreach (ProcessingBlock p in blocks)
                        {
                            f = p.Process(f).DisposeWith(releaser);
                        }

                        f = f.ApplyFilter(align).DisposeWith(releaser);
                        f = f.ApplyFilter(colorizer).DisposeWith(releaser);

                        var frames = f.As <FrameSet>().DisposeWith(releaser);

                        var colorFrame     = frames[Stream.Color, Format.Rgb8].DisposeWith(releaser);
                        var colorizedDepth = frames[Stream.Depth, Format.Rgb8].DisposeWith(releaser);

                        // Combine the frames into a single result
                        var res = src.AllocateCompositeFrame(colorizedDepth, colorFrame).DisposeWith(releaser);
                        // Send it to the next processing stage
                        src.FrameReady(res);
                    }
                });

                // Register to results of processing via a callback:
                block.Start(f =>
                {
                    using (var frames = f.As <FrameSet>())
                    {
                        var colorFrame     = frames.ColorFrame.DisposeWith(frames);
                        var colorizedDepth = frames.First <VideoFrame>(Stream.Depth, Format.Rgb8).DisposeWith(frames);

                        Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth);
                        Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame);
                    }
                });

                var token = tokenSource.Token;

                var t = Task.Factory.StartNew(() =>
                {
                    while (!token.IsCancellationRequested)
                    {
                        using (var frames = pipeline.WaitForFrames())
                        {
                            // Invoke custom processing block
                            block.Process(frames);
                        }
                    }
                }, token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }

            InitializeComponent();
        }