Example #1
0
        public CaptureWindow()
        {
            InitializeComponent();

            try
            {
                Action <VideoFrame> updateDepth;
                Action <VideoFrame> updateColor;

                // The colorizer processing block will be used to visualize the depth frames.
                colorizer = new Colorizer();

                // Create and config the pipeline to strem color and depth frames.
                pipeline = new Pipeline();

                var cfg = new Config();
                cfg.EnableStream(Stream.Depth, 640, 480);
                cfg.EnableStream(Stream.Color, Format.Rgb8);

                var pp = pipeline.Start(cfg);

                SetupWindow(pp, out updateDepth, out updateColor);

                Task.Factory.StartNew(() =>
                {
                    while (!tokenSource.Token.IsCancellationRequested)
                    {
                        // We wait for the next available FrameSet and using it as a releaser object that would track
                        // all newly allocated .NET frames, and ensure deterministic finalization
                        // at the end of scope.
                        using (var frames = pipeline.WaitForFrames())
                        {
                            var colorFrame = frames.ColorFrame.DisposeWith(frames);
                            var depthFrame = frames.DepthFrame.DisposeWith(frames);

                            // We colorize the depth frame for visualization purposes
                            var colorizedDepth = colorizer.Process <VideoFrame>(depthFrame).DisposeWith(frames);

                            // Render the frames.
                            Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth);
                            Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame);

                            Dispatcher.Invoke(new Action(() =>
                            {
                                String depth_dev_sn = new Sensor.CameraInfos(depthFrame.Sensor)[CameraInfo.SerialNumber];
                                txtTimeStamp.Text   = depth_dev_sn + " : " + String.Format("{0,-20:0.00}", depthFrame.Timestamp) + "(" + depthFrame.TimestampDomain.ToString() + ")";
                            }));
                        }
                    }
                }, tokenSource.Token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }
        }
        private void Init()
        {
            using Context ctx = new Context();
            var devices = ctx.QueryDevices();

            Console.WriteLine($"Found {devices.Count} RealSense devices connected.");
            if (devices.Count == 0)
            {
                throw new Exception("No RealSense device detected!");
            }

            Device dev = devices[0];

            Console.WriteLine($"Using device 0: {dev.Info[CameraInfo.Name]}");
            Console.WriteLine("Device Sources:");

            foreach (Sensor sensor in dev.Sensors)
            {
                Console.WriteLine($"Sensor found: {sensor.Info[CameraInfo.Name]}");
            }
            var cfg = new Config();

            cfg.EnableStream(Stream.Depth);
            cfg.EnableStream(Stream.Color, Format.Bgr8);

            intelPipe = new Intel.RealSense.Pipeline();
            PipelineProfile profileIntelPipe = intelPipe.Start(cfg);
            var             streamDepth      = profileIntelPipe.GetStream <VideoStreamProfile>(Stream.Depth);

            sicsDepth = streamDepth.GetIntrinsics();
            Console.WriteLine($"Depth Stream: {sicsDepth.width}X{sicsDepth.height}");

            var streamRBG = profileIntelPipe.GetStream <VideoStreamProfile>(Stream.Color);

            sicsRBG = streamRBG.GetIntrinsics();
            Console.WriteLine($"RBG Stream: {sicsRBG.width}X{sicsRBG.height}");

            Task.Run(() =>
            {
                while (true)
                {
                    try
                    {
                        using FrameSet frames = intelPipe.WaitForFrames();
                        using Frame frDepth   = frames.FirstOrDefault(Stream.Depth);
                        qDepth.Enqueue(frDepth);
                        using Frame frRBG = frames.FirstOrDefault(Stream.Color);
                        qRBG.Enqueue(frRBG);
                    }
                    catch (Exception e)
                    {
                        Console.WriteLine(e.Message);
                    }
                }
            });
        }
        public CaptureWindow()
        {
            try
            {
                pipeline  = new Pipeline();
                colorizer = new Colorizer();

                var cfg = new Config();
                cfg.EnableStream(Stream.Depth, 640, 480);
                cfg.EnableStream(Stream.Color, Format.Rgb8);

                pipeline.Start(cfg);

                var token = tokenSource.Token;

                var t = Task.Factory.StartNew(() =>
                {
                    while (!token.IsCancellationRequested)
                    {
                        var frames = pipeline.WaitForFrames();

                        var depth_frame     = frames.DepthFrame;
                        var color_frame     = frames.ColorFrame;
                        var colorized_depth = colorizer.Colorize(depth_frame);

                        UploadImage(imgDepth, colorized_depth);
                        UploadImage(imgColor, color_frame);

                        // It is important to pre-emptively dispose of native resources
                        // to avoid creating bottleneck at finalization stage after GC
                        // (Also see FrameReleaser helper object in next tutorial)
                        frames.Dispose();
                        depth_frame.Dispose();
                        colorized_depth.Dispose();
                        color_frame.Dispose();
                    }
                }, token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }

            InitializeComponent();
        }
Example #4
0
        static void Main(string[] args)
        {
            using (var ctx = new Context())
            {
                DeviceList devices = ctx.QueryDevices();
                if (devices.Count == 0)
                {
                    Console.WriteLine("RealSense devices are not connected.");
                    return;
                }

                using (var pipeline = new Pipeline(ctx))
                    using (var config = new Config())
                    {
                        // Add pose stream
                        config.EnableStream(Stream.Pose, Format.SixDOF);
                        // Start pipeline with chosen configuration
                        using (var profile = pipeline.Start(config))
                            using (var streamprofile = profile.GetStream(Stream.Pose).As <PoseStreamProfile>())
                            {
                                Console.WriteLine($"\nDevice : {profile.Device.Info[CameraInfo.Name]}");
                                Console.WriteLine($"    Serial number: {profile.Device.Info[CameraInfo.SerialNumber]}");
                                Console.WriteLine($"    Firmware version: {profile.Device.Info[CameraInfo.FirmwareVersion]}");
                                Console.WriteLine($"    Pose stream framerate: {streamprofile.Framerate}\n");
                            }

                        while (true)
                        {
                            // Wait for the next set of frames from the camera
                            using (FrameSet frameset = pipeline.WaitForFrames())
                                // Get a frame from the pose stream
                                using (PoseFrame frame = frameset.PoseFrame)
                                {
                                    // Get pose frame data
                                    Pose data = frame.PoseData;

                                    // Print the x, y, z values of the translation, relative to initial position
                                    Console.Write("\r" + new String(' ', 80));
                                    Console.Write("\rDevice Position: {0} {1} {2} (meters)", data.translation.x.ToString("N3"), data.translation.y.ToString("N3"), data.translation.z.ToString("N3"));
                                }
                        }
                    }
            }
        }
Example #5
0
        public CaptureWindow()
        {
            try
            {
                pipeline  = new Pipeline();
                colorizer = new Colorizer();

                var cfg = new Config();
                cfg.EnableStream(Stream.Depth, 640, 480);
                cfg.EnableStream(Stream.Color, Format.Rgb8);

                pipeline.Start(cfg);

                var token = tokenSource.Token;

                var t = Task.Factory.StartNew(() =>
                {
                    while (!token.IsCancellationRequested)
                    {
                        var frames = pipeline.WaitForFrames();

                        var colorized_depth = colorizer.Colorize(frames.DepthFrame);
                        UploadImage(imgDepth, colorized_depth);

                        UploadImage(imgColor, frames.ColorFrame);
                    }
                }, token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }

            InitializeComponent();
        }
Example #6
0
        public ProcessingWindow()
        {
            InitializeComponent();

            try
            {
                var cfg = new Config();
                cfg.EnableStream(Stream.Depth, 640, 480);
                cfg.EnableStream(Stream.Color, Format.Rgb8);
                var pp = pipeline.Start(cfg);
                var s  = pp.Device.Sensors;

                var blocks = new List <ProcessingBlock>();

                foreach (var sensor in pp.Device.Sensors)
                {
                    var list = sensor.ProcessingBlocks;
                    foreach (var block in list)
                    {
                        blocks.Add(block);
                    }
                }

                // Allocate bitmaps for rendring.
                // Since the sample aligns the depth frames to the color frames, both of the images will have the color resolution
                using (var p = pp.GetStream(Stream.Color).As <VideoStreamProfile>())
                {
                    imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
                    imgDepth.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
                }
                var updateColor = UpdateImage(imgColor);
                var updateDepth = UpdateImage(imgDepth);

                // Create custom processing block
                // For demonstration purposes it will:
                // a. Get a frameset
                // b. Run post-processing on the depth frame
                // c. Combine the result back into a frameset
                // Processing blocks are inherently thread-safe and play well with
                // other API primitives such as frame-queues,
                // and can be used to encapsulate advanced operations.
                // All invokations are, however, synchronious so the high-level threading model
                // is up to the developer
                block = new CustomProcessingBlock((f, src) =>
                {
                    // We create a FrameReleaser object that would track
                    // all newly allocated .NET frames, and ensure deterministic finalization
                    // at the end of scope.
                    using (var releaser = new FramesReleaser())
                    {
                        var frames = FrameSet.FromFrame(f).DisposeWith(releaser);

                        foreach (ProcessingBlock p in blocks)
                        {
                            frames = p.Process(frames).DisposeWith(releaser);
                        }

                        frames = frames.ApplyFilter(align).DisposeWith(releaser);
                        frames = frames.ApplyFilter(colorizer).DisposeWith(releaser);

                        var colorFrame     = frames[Stream.Color, Format.Rgb8].DisposeWith(releaser);
                        var colorizedDepth = frames[Stream.Depth, Format.Rgb8].DisposeWith(releaser);

                        // Combine the frames into a single result
                        var res = src.AllocateCompositeFrame(colorizedDepth, colorFrame).DisposeWith(releaser);
                        // Send it to the next processing stage
                        src.FramesReady(res);
                    }
                });

                // Register to results of processing via a callback:
                block.Start(f =>
                {
                    using (var frames = FrameSet.FromFrame(f))
                    {
                        var colorFrame     = frames.ColorFrame.DisposeWith(frames);
                        var colorizedDepth = frames[Stream.Depth, Format.Rgb8].As <VideoFrame>().DisposeWith(frames);

                        Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth);
                        Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame);
                    }
                });

                var token = tokenSource.Token;

                var t = Task.Factory.StartNew(() =>
                {
                    while (!token.IsCancellationRequested)
                    {
                        using (var frames = pipeline.WaitForFrames())
                        {
                            // Invoke custom processing block
                            block.ProcessFrames(frames);
                        }
                    }
                }, token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }

            InitializeComponent();
        }
        public MainWindow()
        {
            InitializeComponent();
            pipeline  = new RS.Pipeline();
            colorizer = new RS.Colorizer();


            Action <RS.VideoFrame> mainAction;
            Action <RS.VideoFrame> depthAction;

            var config = new RS.Config();

            config.EnableStream(RS.Stream.Color, 640, 480, RS.Format.Rgb8);
            config.EnableStream(RS.Stream.Depth, 640, 480);

            timer                   = new System.Timers.Timer();
            timer.Elapsed          += Timer_Elapsed;
            timer.Interval          = 1000;
            timer.Enabled           = false;
            timerCountdown          = new System.Timers.Timer();
            timerCountdown.Elapsed += TimerCountdown_Elapsed;
            timerCountdown.Interval = 1000;
            timerCountdown.Enabled  = false;


            try
            {
                var pp = pipeline.Start(config);


                SetupWindow(pp, out mainAction, out depthAction);

                Task.Factory.StartNew(() =>
                {
                    while (!tokenSource.Token.IsCancellationRequested)
                    {
                        using (var frames = pipeline.WaitForFrames())
                        {
                            var mainFrame      = frames.ColorFrame.DisposeWith(frames);
                            var depthFrame     = frames.DepthFrame.DisposeWith(frames);
                            var colorizedDepth = colorizer.Process <VideoFrame>(depthFrame).DisposeWith(frames);
                            Dispatcher.Invoke(DispatcherPriority.Render, mainAction, mainFrame);
                            Dispatcher.Invoke(DispatcherPriority.Render, depthAction, colorizedDepth);
                        }

                        if (isRecording)
                        {
                            imageCount++;
                            Dispatcher.Invoke(new SaveImagesDelegate(SaveImage), new object[] { "image_", imageCount });
                        }
                        else
                        {
                            imageCount = 0;
                        }
                    }
                }, tokenSource.Token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
Example #8
0
        public CaptureWindow()
        {
            InitializeComponent();

            try
            {
                Action <VideoFrame> updateDepth;
                Action <VideoFrame> updateColor;

                pipeline  = new Pipeline();
                colorizer = new Colorizer();

                var depthWidth  = 640;
                var depthHeight = 480;
                var depthFrames = 30;
                var depthFormat = Format.Z16;

                var colorWidth  = 640;
                var colorHeight = 480;
                var colorFrames = 30;
                using (var ctx = new Context())
                {
                    var devices = ctx.QueryDevices();
                    var dev     = devices[0];

                    Console.WriteLine("\nUsing device 0, an {0}", dev.Info[CameraInfo.Name]);
                    Console.WriteLine("    Serial number: {0}", dev.Info[CameraInfo.SerialNumber]);
                    Console.WriteLine("    Firmware version: {0}", dev.Info[CameraInfo.FirmwareVersion]);

                    var sensors     = dev.QuerySensors();
                    var depthSensor = sensors[0];
                    var colorSensor = sensors[1];

                    var depthProfiles = depthSensor.StreamProfiles
                                        .Where(p => p.Stream == Stream.Depth)
                                        .OrderBy(p => p.Framerate)
                                        .Select(p => p.As <VideoStreamProfile>());
                    VideoStreamProfile colorProfile = null;

                    // select color profile to have frameset equal or closer to depth frameset to syncer work smooth
                    foreach (var depthProfile in depthProfiles)
                    {
                        depthWidth   = depthProfile.Width;
                        depthHeight  = depthProfile.Height;
                        depthFrames  = depthProfile.Framerate;
                        depthFormat  = depthProfile.Format;
                        colorProfile = colorSensor.StreamProfiles
                                       .Where(p => p.Stream == Stream.Color)
                                       .OrderByDescending(p => p.Framerate)
                                       .Select(p => p.As <VideoStreamProfile>())
                                       .FirstOrDefault(p => p.Framerate == depthFrames);
                        if (colorProfile != null)
                        {
                            colorWidth  = colorProfile.Width;
                            colorHeight = colorProfile.Height;
                            colorFrames = colorProfile.Framerate;
                            break;
                        }
                    }
                    if (colorProfile == null)
                    {
                        // if no profile with the same framerate found, takes the first
                        colorProfile = colorSensor.StreamProfiles
                                       .Where(p => p.Stream == Stream.Color)
                                       .OrderByDescending(p => p.Framerate)
                                       .Select(p => p.As <VideoStreamProfile>()).FirstOrDefault();
                        if (colorProfile == null)
                        {
                            throw new InvalidOperationException($"Error while finding appropriate depth and color profiles");
                        }
                        colorWidth  = colorProfile.Width;
                        colorHeight = colorProfile.Height;
                        colorFrames = colorProfile.Framerate;
                    }
                }

                var cfg = new Config();
                cfg.EnableStream(Stream.Depth, depthWidth, depthHeight, depthFormat, depthFrames);
                cfg.EnableStream(Stream.Color, colorWidth, colorHeight, Format.Rgb8, colorFrames);

                var profile = pipeline.Start(cfg);

                SetupWindow(profile, out updateDepth, out updateColor);

                // Setup the SW device and sensors
                var software_dev  = new SoftwareDevice();
                var depth_sensor  = software_dev.AddSensor("Depth");
                var depth_profile = depth_sensor.AddVideoStream(new SoftwareVideoStream
                {
                    type       = Stream.Depth,
                    index      = 0,
                    uid        = 100,
                    width      = depthWidth,
                    height     = depthHeight,
                    fps        = depthFrames,
                    bpp        = 2,
                    format     = depthFormat,
                    intrinsics = profile.GetStream(Stream.Depth).As <VideoStreamProfile>().GetIntrinsics()
                });
                depth_sensor.AddReadOnlyOption(Option.DepthUnits, 1.0f / 5000);

                var color_sensor  = software_dev.AddSensor("Color");
                var color_profile = color_sensor.AddVideoStream(new SoftwareVideoStream
                {
                    type       = Stream.Color,
                    index      = 0,
                    uid        = 101,
                    width      = colorWidth,
                    height     = colorHeight,
                    fps        = colorFrames,
                    bpp        = 3,
                    format     = Format.Rgb8,
                    intrinsics = profile.GetStream(Stream.Color).As <VideoStreamProfile>().GetIntrinsics()
                });

                // Note about the Syncer: If actual FPS is significantly different from reported FPS in AddVideoStream
                // this can confuse the syncer and prevent it from producing synchronized pairs
                software_dev.SetMatcher(Matchers.Default);

                var sync = new Syncer();

                // The raw depth->metric units translation scale is required for Colorizer to work
                var realDepthSensor = profile.Device.QuerySensors().First(s => s.Is(Extension.DepthSensor));
                depth_sensor.AddReadOnlyOption(Option.DepthUnits, realDepthSensor.DepthScale);

                depth_sensor.Open(depth_profile);
                color_sensor.Open(color_profile);

                // Push the SW device frames to the syncer
                depth_sensor.Start(sync.SubmitFrame);
                color_sensor.Start(sync.SubmitFrame);

                var token = tokenSource.Token;

                ushort[] depthData = null;
                byte[]   colorData = null;

                var t = Task.Factory.StartNew(() =>
                {
                    while (!token.IsCancellationRequested)
                    {
                        // We use the frames that are captured from live camera as the input data for the SW device
                        using (var frames = pipeline.WaitForFrames())
                        {
                            var depthFrame = frames.DepthFrame.DisposeWith(frames);
                            var colorFrame = frames.ColorFrame.DisposeWith(frames);

                            depthData = depthData ?? new ushort[depthFrame.Width * depthFrame.Height];
                            depthFrame.CopyTo(depthData);
                            depth_sensor.AddVideoFrame(depthData, depthFrame.Stride, depthFrame.BitsPerPixel / 8, depthFrame.Timestamp,
                                                       depthFrame.TimestampDomain, (int)depthFrame.Number, depth_profile);

                            colorData = colorData ?? new byte[colorFrame.Stride * colorFrame.Height];
                            colorFrame.CopyTo(colorData);
                            color_sensor.AddVideoFrame(colorData, colorFrame.Stride, colorFrame.BitsPerPixel / 8, colorFrame.Timestamp,
                                                       colorFrame.TimestampDomain, (int)colorFrame.Number, color_profile);
                        }

                        // Dispaly the frames that come from the SW device after synchronization
                        using (var new_frames = sync.WaitForFrames())
                        {
                            if (new_frames.Count == 2)
                            {
                                var colorFrame = new_frames.ColorFrame.DisposeWith(new_frames);
                                var depthFrame = new_frames.DepthFrame.DisposeWith(new_frames);

                                var colorizedDepth = colorizer.Process <VideoFrame>(depthFrame).DisposeWith(new_frames);
                                // Render the frames.
                                Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth);
                                Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame);
                            }
                        }
                    }
                }, token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }
        }
        /**
         * NOTES
         * Curently it records immediately after linking the program with LabStreamLayer.
         * There might be a better solution, but we don't want to increase the number of button presses for the protoccol. It is probably better to record more than to forget pressing
         * the record button before an experiment.
         *
         * **/
        // Code Taken Directly from the LibRealSense 2 Examples -- Captures and Displays Depth and RGB Camera.
        private void startRecordingProcess()
        {
            try
            {
                pipeline  = new Pipeline();
                colorizer = new Colorizer();

                var cfg = new Config();
                cfg.EnableStream(Stream.Depth, 640, 480, Format.Z16, 30);
                cfg.EnableStream(Stream.Color, 640, 480, Format.Bgr8, 30);

                //cfg.EnableRecordToFile(fileRecording); // This is now taken care of by FFMPEG
                pipeline.Start(cfg);

                applyRecordingConfig();

                processBlock = new CustomProcessingBlock((f, src) =>
                {
                    using (var releaser = new FramesReleaser())
                    {
                        var frames = FrameSet.FromFrame(f, releaser);

                        VideoFrame depth = FramesReleaser.ScopedReturn(releaser, frames.DepthFrame);
                        VideoFrame color = FramesReleaser.ScopedReturn(releaser, frames.ColorFrame);

                        var res = src.AllocateCompositeFrame(releaser, depth, color);

                        src.FramesReady(res);
                    }
                });

                processBlock.Start(f =>
                {
                    using (var releaser = new FramesReleaser())
                    {
                        var frames = FrameSet.FromFrame(f, releaser);

                        var depth_frame = FramesReleaser.ScopedReturn(releaser, frames.DepthFrame);
                        var color_frame = FramesReleaser.ScopedReturn(releaser, frames.ColorFrame);

                        var colorized_depth = colorizer.Colorize(depth_frame);

                        UploadImage(imgDepth, colorized_depth);
                        UploadImage(imgColor, color_frame);

                        // Record FFMPEG
                        Bitmap bmpColor = new Bitmap(color_frame.Width, color_frame.Height, color_frame.Stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, color_frame.Data);
                        vidWriter_Color.WriteVideoFrame(bmpColor);

                        Bitmap bmpDepth = new Bitmap(colorized_depth.Width, colorized_depth.Height, colorized_depth.Stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, colorized_depth.Data);
                        vidWriter_Depth.WriteVideoFrame(bmpDepth);

                        if (lslOutlet != null)
                        {
                            // Do LSL Streaming Here
                            sample[0] = "" + colorized_depth.Number + "_" + colorized_depth.Timestamp;
                            sample[1] = "" + color_frame.Number + "_" + color_frame.Timestamp;
                            lslOutlet.push_sample(sample, liblsl.local_clock());
                        }
                    }
                });


                var token = tokenSource.Token;

                var t = Task.Factory.StartNew(() =>
                {
                    // Main Loop --
                    while (!token.IsCancellationRequested)
                    {
                        using (var frames = pipeline.WaitForFrames())
                        {
                            processBlock.ProcessFrames(frames);
                        }
                    }
                }, token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }
        }
Example #10
0
        public CaptureWindow()
        {
            InitializeComponent();

            try
            {
                Action <VideoFrame> updateDepth;
                Action <VideoFrame> updateColor;

                pipeline  = new Pipeline();
                colorizer = new Colorizer();

                var cfg = new Config();
                cfg.EnableStream(Stream.Depth, 640, 480, Format.Z16, 30);
                cfg.EnableStream(Stream.Color, 640, 480, Format.Rgb8, 30);

                var profile = pipeline.Start(cfg);

                SetupWindow(profile, out updateDepth, out updateColor);

                // Setup the SW device and sensors
                var software_dev  = new SoftwareDevice();
                var depth_sensor  = software_dev.AddSensor("Depth");
                var depth_profile = depth_sensor.AddVideoStream(new SoftwareVideoStream
                {
                    type       = Stream.Depth,
                    index      = 0,
                    uid        = 100,
                    width      = 640,
                    height     = 480,
                    fps        = 30,
                    bpp        = 2,
                    format     = Format.Z16,
                    intrinsics = profile.GetStream(Stream.Depth).As <VideoStreamProfile>().GetIntrinsics()
                });
                var color_sensor  = software_dev.AddSensor("Color");
                var color_profile = color_sensor.AddVideoStream(new SoftwareVideoStream
                {
                    type       = Stream.Color,
                    index      = 0,
                    uid        = 101,
                    width      = 640,
                    height     = 480,
                    fps        = 30,
                    bpp        = 3,
                    format     = Format.Rgb8,
                    intrinsics = profile.GetStream(Stream.Color).As <VideoStreamProfile>().GetIntrinsics()
                });

                // Note about the Syncer: If actual FPS is significantly different from reported FPS in AddVideoStream
                // this can confuse the syncer and prevent it from producing synchronized pairs
                software_dev.SetMatcher(Matchers.Default);

                var sync = new Syncer();

                depth_sensor.Open(depth_profile);
                color_sensor.Open(color_profile);

                // Push the SW device frames to the syncer
                depth_sensor.Start(sync.SubmitFrame);
                color_sensor.Start(sync.SubmitFrame);

                var token = tokenSource.Token;

                ushort[] depthData = null;
                byte[]   colorData = null;

                var t = Task.Factory.StartNew(() =>
                {
                    while (!token.IsCancellationRequested)
                    {
                        // We use the frames that are captured from live camera as the input data for the SW device
                        using (var frames = pipeline.WaitForFrames())
                        {
                            var depthFrame = frames.DepthFrame.DisposeWith(frames);
                            var colorFrame = frames.ColorFrame.DisposeWith(frames);

                            depthData = depthData ?? new ushort[depthFrame.Width * depthFrame.Height];
                            depthFrame.CopyTo(depthData);
                            depth_sensor.AddVideoFrame(depthData, depthFrame.Stride, depthFrame.BitsPerPixel / 8, depthFrame.Timestamp,
                                                       depthFrame.TimestampDomain, (int)depthFrame.Number, depth_profile);

                            colorData = colorData ?? new byte[colorFrame.Stride * colorFrame.Height];
                            colorFrame.CopyTo(colorData);
                            color_sensor.AddVideoFrame(colorData, colorFrame.Stride, colorFrame.BitsPerPixel / 8, colorFrame.Timestamp,
                                                       colorFrame.TimestampDomain, (int)colorFrame.Number, color_profile);
                        }

                        // Dispaly the frames that come from the SW device after synchronization
                        using (var new_frames = sync.WaitForFrames())
                        {
                            if (new_frames.Count == 2)
                            {
                                var depthFrame = new_frames.DepthFrame.DisposeWith(new_frames);
                                var colorFrame = new_frames.ColorFrame.DisposeWith(new_frames);

                                VideoFrame colorizedDepth = colorizer.Process(depthFrame).As <VideoFrame>().DisposeWith(new_frames);

                                // Render the frames.
                                Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth);
                                Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame);
                            }
                        }
                    }
                }, token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }
        }
Example #11
0
        public CaptureWindow()
        {
            InitializeComponent();

            try
            {
                Action <VideoFrame> updateDepth;
                Action <VideoFrame> updateColor;

                // The colorizer processing block will be used to visualize the depth frames.
                colorizer = new Colorizer();

                // Create and config the pipeline to strem color and depth frames.
                pipeline = new Pipeline();

                using (var ctx = new Context())
                {
                    var devices = ctx.QueryDevices();
                    var dev     = devices[0];

                    Console.WriteLine("\nUsing device 0, an {0}", dev.Info[CameraInfo.Name]);
                    Console.WriteLine("    Serial number: {0}", dev.Info[CameraInfo.SerialNumber]);
                    Console.WriteLine("    Firmware version: {0}", dev.Info[CameraInfo.FirmwareVersion]);

                    var sensors     = dev.QuerySensors();
                    var depthSensor = sensors[0];
                    var colorSensor = sensors[1];

                    var depthProfile = depthSensor.StreamProfiles
                                       .Where(p => p.Stream == Stream.Depth)
                                       .OrderBy(p => p.Framerate)
                                       .Select(p => p.As <VideoStreamProfile>()).First();

                    var colorProfile = colorSensor.StreamProfiles
                                       .Where(p => p.Stream == Stream.Color)
                                       .OrderBy(p => p.Framerate)
                                       .Select(p => p.As <VideoStreamProfile>()).First();

                    if (!testLoadSettingsJson.LoadSettingsJson(dev))
                    {
                        return;
                    }

                    var cfg = new Config();
                    cfg.EnableDevice(dev.Info.GetInfo(CameraInfo.SerialNumber));
                    cfg.EnableStream(Stream.Depth, depthProfile.Width, depthProfile.Height, depthProfile.Format, depthProfile.Framerate);
                    cfg.EnableStream(Stream.Color, colorProfile.Width, colorProfile.Height, colorProfile.Format, colorProfile.Framerate);

                    var pp = pipeline.Start(cfg);
                    SetupWindow(pp, out updateDepth, out updateColor);

                    // more device info
                    Console.WriteLine($"--------------------------");
                    foreach (var item in pp.Device.Info.ToArray())
                    {
                        Console.WriteLine($"{item.Key} - {item.Value}");
                    }
                    Console.WriteLine($"--------------------------");
                }

                Task.Factory.StartNew(() =>
                {
                    while (!tokenSource.Token.IsCancellationRequested)
                    {
                        // We wait for the next available FrameSet and using it as a releaser object that would track
                        // all newly allocated .NET frames, and ensure deterministic finalization
                        // at the end of scope.
                        using (var frames = pipeline.WaitForFrames())
                        {
                            var colorFrame = frames.ColorFrame.DisposeWith(frames);
                            var depthFrame = frames.DepthFrame.DisposeWith(frames);

                            // We colorize the depth frame for visualization purposes
                            var colorizedDepth = colorizer.Process <VideoFrame>(depthFrame).DisposeWith(frames);

                            // Render the frames.
                            Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth);
                            Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame);

                            Dispatcher.Invoke(new Action(() =>
                            {
                                String depth_dev_sn = depthFrame.Sensor.Info[CameraInfo.SerialNumber];
                                txtTimeStamp.Text   = depth_dev_sn + " : " + String.Format("{0,-20:0.00}", depthFrame.Timestamp) + "(" + depthFrame.TimestampDomain.ToString() + ")";
                            }));
                        }
                    }
                }, tokenSource.Token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }
        }
Example #12
0
        public void timer1_cycle(object sender, EventArgs e)
        {
            try
            {
                double[] ptx   = new double[W_H];  //640*480
                double[] pty   = new double[W_H];  //640*480
                double[] ptz   = new double[W_H];  //640*480
                ushort[] depth = new ushort[W_H];  //640*480


                //获取相机内参
                var stream          = (VideoStreamProfile)pProfile.GetStream(Stream.Color);
                var colorIntrinsics = stream.GetIntrinsics();

                //获取深度图像
                var frames = pipeline.WaitForFrames();
                var dframe = frames.DepthFrame;
                dframe.CopyTo(depth);

                //Write_Flag = true;

                //是否开始识别
                if (Write_Flag == true)
                {
                    Write_Flag = false;

                    //获取有效空间范围内的深度并转换为点云
                    int pc_cnt = 0;
                    for (int i = 0; i < W_H; i++)
                    {
                        if (depth[i] > 400 && depth[i] < 6000)
                        {
                            ptx[pc_cnt] = (((i % WIDTH) - colorIntrinsics.ppx) / colorIntrinsics.fx) * depth[i];
                            pty[pc_cnt] = (((i / WIDTH) - colorIntrinsics.ppy) / colorIntrinsics.fy) * depth[i];
                            ptz[pc_cnt] = depth[i];
                            pc_cnt++;
                        }
                    }
                    double[] pcx = new double[pc_cnt];    //640*480
                    double[] pcy = new double[pc_cnt];    //640*480
                    double[] pcz = new double[pc_cnt];    //640*480
                    Array.Copy(ptx, pcx, pc_cnt);
                    Array.Copy(pty, pcy, pc_cnt);
                    Array.Copy(ptz, pcz, pc_cnt);

                    //********************************************************
                    //RANSAC地面提取
                    int    ITER       = 200;
                    double bpa        = 0;
                    double bpb        = 0;
                    double bpc        = 0;
                    int    best_count = 0;
                    Random rd         = new Random(Guid.NewGuid().GetHashCode());
                    var    rd1        = rd.Next(1, pcx.Length);
                    var    rd2        = rd.Next(1, pcx.Length);
                    var    rd3        = rd.Next(1, pcx.Length);
                    for (int i = 0; i < ITER; i++)
                    {
                        rd1 = rd.Next(1, pcx.Length);
                        rd2 = rd.Next(1, pcx.Length);
                        rd3 = rd.Next(1, pcx.Length);
                        //拟合直线方程z=ax+by+c
                        double[] plane = new double[3];
                        double   a     = ((pcz[rd1] - pcz[rd2]) * (pcy[rd2] - pcy[rd3]) - (pcz[rd2] - pcz[rd3]) * (pcy[rd1] - pcy[rd2])) / ((pcx[rd1] - pcx[rd2]) * (pcy[rd2] - pcy[rd3]) - (pcx[rd2] - pcx[rd3]) * (pcy[rd1] - pcy[rd2]));
                        double   b     = ((pcz[rd1] - pcz[rd2]) - a * (pcx[rd1] - pcx[rd2])) / (pcy[rd1] - pcy[rd2]);
                        double   c     = pcz[rd1] - a * pcx[rd1] - b * pcy[rd1];

                        double[] dist = new double[pcx.Length];
                        int      m    = 0;
                        for (int k = 0; k < pcx.Length; k++)
                        {
                            dist[k] = Math.Abs(a * pcx[k] + b * pcy[k] + c - pcz[k]);
                            if (dist[k] < 1.0)
                            {
                                m++;
                            }
                        }
                        if (m > best_count)
                        {
                            best_count = m;
                            bpa        = a;
                            bpb        = b;
                            bpc        = c;
                        }
                    }

                    //将点云分为地面点和其他点
                    double[] o_pcx    = new double[pcx.Length];
                    double[] o_pcy    = new double[pcx.Length];
                    double[] o_pcz    = new double[pcx.Length];
                    int      o_pc_cnt = 0;
                    for (int i = 0; i < pcx.Length; i++)
                    {
                        double dist = bpa * pcx[i] + bpb * pcy[i] + bpc - pcz[i];
                        if (dist < -20 || dist > 20)
                        {
                            o_pcx[o_pc_cnt] = pcx[i];
                            o_pcy[o_pc_cnt] = pcy[i];
                            o_pcz[o_pc_cnt] = pcz[i];
                            o_pc_cnt++;
                        }
                    }

                    double[] on_pcx = new double[o_pc_cnt];
                    double[] on_pcy = new double[o_pc_cnt];
                    double[] on_pcz = new double[o_pc_cnt];
                    Array.Copy(o_pcx, on_pcx, o_pc_cnt);
                    Array.Copy(o_pcy, on_pcy, o_pc_cnt);
                    Array.Copy(o_pcz, on_pcz, o_pc_cnt);

                    //获取新的深度图,不包含地面
                    int[,] new_depth      = new int[WIDTH, HEIGHT];
                    double[,] depth_label = new double[WIDTH, HEIGHT];
                    for (int i = 0; i < on_pcx.Length; i++)
                    {
                        var x = Math.Round((on_pcx[i] / on_pcz[i]) * colorIntrinsics.fx + colorIntrinsics.ppx);
                        var y = Math.Round((on_pcy[i] / on_pcz[i]) * colorIntrinsics.fy + colorIntrinsics.ppy);
                        if (x < WIDTH && y < HEIGHT)
                        {
                            new_depth[(int)x, (int)y]   = 1;
                            depth_label[(int)x, (int)y] = on_pcz[i];
                        }
                    }

                    //连通域标记
                    var cc = CalConnections(new_depth);
                    //获取距离人中心最近的地面物体
                    double[] dist1    = new double[cc.Count];
                    int[]    dist_key = new int[cc.Count];
                    int      dist_cnt = 0;
                    foreach (int key in cc.Keys)
                    {
                        var cc_values = cc[key];
                        foreach (var val in cc_values)
                        {
                            var d = (val.X - 480) * (val.X - 480) + (val.Y - 320) * (val.Y - 320);
                            //var d = val.Y * val.Y + (val.X - 320) * (val.X - 320);
                            if (dist1[dist_cnt] == 0)
                            {
                                dist1[dist_cnt] = d;
                            }
                            if (d < dist1[dist_cnt])
                            {
                                dist1[dist_cnt] = d;
                            }
                        }
                        dist_key[dist_cnt] = key;
                        dist_cnt++;
                    }

                    //获取最近区域
                    int ii    = 0;
                    int index = 0;
                    while (true)
                    {
                        if (dist1[ii] == dist1.Min())
                        {
                            index = ii;
                            break;
                        }
                        ii++;
                    }

                    //将障碍物转换为点云
                    var obs_depth = cc[dist_key[index]];
                    //obsize_cnt.Content = obs_depth.Count.ToString();
                    double[] obs_pcx = new double[obs_depth.Count];
                    double[] obs_pcy = new double[obs_depth.Count];
                    double[] obs_pcz = new double[obs_depth.Count];
                    byte[,] todisp = new byte[WIDTH, HEIGHT];

                    for (int i = 0; i < obs_depth.Count; i++)
                    {
                        todisp[obs_depth[i].Y, obs_depth[i].X] = 255;

                        obs_pcx[i] = ((obs_depth[i].Y - colorIntrinsics.ppx) / colorIntrinsics.fx) * depth_label[obs_depth[i].Y, obs_depth[i].X];
                        obs_pcy[i] = ((obs_depth[i].X - colorIntrinsics.ppy) / colorIntrinsics.fy) * depth_label[obs_depth[i].Y, obs_depth[i].X];
                        obs_pcz[i] = depth_label[obs_depth[i].Y, obs_depth[i].X];
                    }

                    //计算障碍物尺寸、距离等信息
                    double[] size_zs = new double[obs_pcz.Length];
                    for (int i = 0; i < size_zs.Length; i++)
                    {
                        size_zs[i] = Math.Abs((bpa * obs_pcx[i] + bpb * obs_pcy[i] - obs_pcz[i] + bpc) / (Math.Sqrt(bpa * bpa + bpb * bpb + 1)));
                    }
                    double size_z = size_zs.Max();

                    var theta = Math.Acos(1 / Math.Sqrt(bpa * bpa + bpb * bpb + 1));//夹角要加绝对值

                    double[] obs_pcx_pj = new double[obs_pcx.Length];
                    double[] obs_pcy_pj = new double[obs_pcx.Length];
                    double[] obs_pcz_pj = new double[obs_pcx.Length];
                    double[] dist_ys    = new double[obs_pcx.Length];
                    for (int i = 0; i < obs_pcx_pj.Length; i++)
                    {
                        var T = (bpa * obs_pcx[i] + bpb * obs_pcy[i] - obs_pcz[i] + bpc) / (bpa * bpa + bpb * bpb + 1);
                        obs_pcx_pj[i] = obs_pcx[i] - bpa * T;
                        obs_pcy_pj[i] = obs_pcy[i] - bpb * T;
                        obs_pcz_pj[i] = obs_pcz[i] + T;
                        dist_ys[i]    = Math.Abs(obs_pcz_pj[i] * Math.Sin(theta) - obs_pcy_pj[i] * Math.Cos(theta));
                    }

                    var size_x = Math.Abs(obs_pcx_pj.Max() - obs_pcx_pj.Min());
                    var size_y = Math.Abs((obs_pcy_pj.Max() - obs_pcy_pj.Min()) * (-1 / Math.Sqrt(bpa * bpa + bpb * bpb + 1)));
                    var dist_y = dist_ys.Min();
                    //********************************************************

                    obsize_x.Content = size_x.ToString("f2");
                    obsize_y.Content = size_y.ToString("f2");
                    obsize_z.Content = size_z.ToString("f2");
                    obdist_y.Content = dist_y.ToString("f2");

                    byte[] ethbuf = new byte[11];
                    //新增加:将障碍物高度、长度和距离转换为下一步的步长和步高
                    int Dmin    = 80;       //预留脚离障碍物的最小距离
                    int Lmax    = 800;      //机器人允许的最大步长
                    int Hmax    = 500;      //机器人允许的最大步高
                    int DeltaL  = 30;       //测量的最大误差
                    int DeltaH  = 30;       //测量的最大误差
                    int Lnormal = 400;      //正常行走的步长
                    int Hnormal = 100;      //正常行走的步高

                    int nStep          = 0; //接下来应该按正常步长走nStep步
                    int lastStepLength = 0; //走完nStep步后最后一小步的步长
                    int lastStepHeight = 0; //走完nStep步后最后一小步的步高
                    int overStepLength = 0; //跨越的步长
                    int overStepHeight = 0; //跨越的步高

                    nStep = ((int)dist_y - Dmin) / Lnormal;
                    if (((int)dist_y - Dmin) % Lnormal <= 80)
                    {
                        // 若最后一步小于80则使最后一步步长等于80
                        lastStepLength = 80;
                    }
                    else
                    {
                        lastStepLength = ((int)dist_y - Dmin) % Lnormal;
                    }
                    lastStepHeight = Hnormal;

                    if ((int)size_z + DeltaH < 200)
                    {
                        // 若跨越步高小于200则设为200
                        overStepHeight = 200;
                    }
                    else
                    {
                        //overStepHeight = (int)size_z + DeltaH;
                        overStepHeight = 400;
                    }
                    overStepLength = 2 * Dmin + (int)size_y + DeltaL;

                    if (overStepLength > Lmax || overStepHeight > Hmax)  //如果步长或步高有一个超限了,将跨越步长和步高都置0,即不跨越
                    {
                        overStepLength = 0;
                        overStepHeight = 0;
                    }

                    ComWinTextBox.AppendText("Send to Exoskeleton:" + lastStepLength.ToString() + lastStepHeight.ToString() + overStepLength.ToString() + overStepHeight.ToString() + "\n");

                    //jiang
                    ethbuf[0]  = 0xAA;                                       //起始标志
                    ethbuf[1]  = (byte)nStep;
                    ethbuf[2]  = (byte)((lastStepLength >> 8) & 0x000000FF); //取最后一步步长的高8位
                    ethbuf[3]  = (byte)(lastStepLength & 0x000000FF);        //取最后一步步长的低8位
                    ethbuf[4]  = (byte)((lastStepHeight >> 8) & 0x000000FF); //取最后一步步高的高8位
                    ethbuf[5]  = (byte)(lastStepHeight & 0x000000FF);        //取最后一步步高的低8位
                    ethbuf[6]  = (byte)((overStepLength >> 8) & 0x000000FF); //取跨越步长的高8位
                    ethbuf[7]  = (byte)(overStepLength & 0x000000FF);        //取跨越步长的低8位
                    ethbuf[8]  = (byte)((overStepHeight >> 8) & 0x000000FF); //取跨越步高的高8位
                    ethbuf[9]  = (byte)(overStepHeight & 0x000000FF);        //取跨越步高的低8位
                    ethbuf[10] = 0xEE;                                       //结束标志

                    //将ethbuf通过网络发送出去
                    NetworkStream sendStream = client.GetStream(); //获得用于数据传输的流
                    sendStream.Write(ethbuf, 0, ethbuf.Length);    //最终写入流中
                    //string showmsg = Encoding.Default.GetString(ethbuf, 0, ethbuf.Length);
                    //ComWinTextBox.AppendText("发送给客户端数据:" + showmsg + "\n");

                    ////write_point_to_txt(pcx, pcy, pcz, "valid_pc");
                    ////write_point_to_txt_1(bpa, bpb, bpc, "best_plane");

                    obsize_x.Content = size_x.ToString("f2");
                    obsize_y.Content = size_y.ToString("f2");
                    obsize_z.Content = size_z.ToString("f2");
                    obdist_y.Content = dist_y.ToString("f2");

                    var bytes = new byte[HEIGHT * WIDTH];
                    for (int i = 0; i < HEIGHT; i++)
                    {
                        for (int j = 0; j < WIDTH; j++)
                        {
                            bytes[i * WIDTH + j] = todisp[j, i];
                        }
                    }
                    var bs     = BitmapSource.Create(WIDTH, HEIGHT, 300, 300, PixelFormats.Gray8, null, bytes, WIDTH);
                    var imgSrc = bs as ImageSource;
                    imgObs.Source = imgSrc;

                    //write_point_to_txt_2(bytes, "obstacle");
                }

                var colorized_depth = colorizer.Colorize(frames.DepthFrame);
                UploadImage(imgDepth, colorized_depth);
                UploadImage(imgColor, frames.ColorFrame);
            }
            catch (Exception ex)
            {
            }
        }
Example #13
0
        public CaptureWindow()
        {
            //Log.ToFile(LogSeverity.Debug, "1.log");

            try
            {
                pipeline  = new Pipeline();
                colorizer = new Colorizer();

                var cfg = new Config();
                cfg.EnableStream(Stream.Depth, 640, 480, Format.Z16, 30);
                cfg.EnableStream(Stream.Color, 640, 480, Format.Rgb8, 30);

                var profile = pipeline.Start(cfg);

                var software_dev  = new SoftwareDevice();
                var depth_sensor  = software_dev.AddSensor("Depth");
                var depth_profile = depth_sensor.AddVideoStream(new VideoStream
                {
                    type       = Stream.Depth,
                    index      = 0,
                    uid        = 100,
                    width      = 640,
                    height     = 480,
                    fps        = 30,
                    bpp        = 2,
                    fmt        = Format.Z16,
                    intrinsics = (profile.GetStream(Stream.Depth) as VideoStreamProfile).GetIntrinsics()
                });
                var color_sensor  = software_dev.AddSensor("Color");
                var color_profile = color_sensor.AddVideoStream(new VideoStream
                {
                    type       = Stream.Color,
                    index      = 0,
                    uid        = 101,
                    width      = 640,
                    height     = 480,
                    fps        = 30,
                    bpp        = 2,
                    fmt        = Format.Z16,
                    intrinsics = (profile.GetStream(Stream.Color) as VideoStreamProfile).GetIntrinsics()
                });
                // Note about the Syncer: If actual FPS is significantly different from reported FPS in AddVideoStream
                // this can confuse the syncer and prevent it from producing synchronized pairs
                software_dev.SetMatcher(Matchers.Default);

                var sync = new Syncer();

                depth_sensor.Open(depth_profile);
                color_sensor.Open(color_profile);

                depth_sensor.Start(f =>
                {
                    sync.SubmitFrame(f);
                    //Debug.WriteLine("D");
                });
                color_sensor.Start(f => {
                    sync.SubmitFrame(f);
                    //Debug.WriteLine("C");
                });

                var token = tokenSource.Token;

                var t = Task.Factory.StartNew(() =>
                {
                    while (!token.IsCancellationRequested)
                    {
                        var frames = pipeline.WaitForFrames();

                        var depth_frame = frames.DepthFrame;
                        var color_frame = frames.ColorFrame;

                        var bytes = new byte[depth_frame.Stride * depth_frame.Height];
                        depth_frame.CopyTo(bytes);
                        depth_sensor.AddVideoFrame(bytes, depth_frame.Stride, 2, depth_frame.Timestamp,
                                                   depth_frame.TimestampDomain, (int)depth_frame.Number,
                                                   depth_profile);

                        bytes = new byte[color_frame.Stride * color_frame.Height];
                        color_frame.CopyTo(bytes);
                        color_sensor.AddVideoFrame(bytes, color_frame.Stride, 2, depth_frame.Timestamp,
                                                   color_frame.TimestampDomain, (int)depth_frame.Number,
                                                   color_profile);

                        depth_frame.Dispose();
                        color_frame.Dispose();
                        frames.Dispose();

                        var new_frames = sync.WaitForFrames();
                        if (new_frames.Count == 2)
                        {
                            depth_frame = new_frames.DepthFrame;
                            color_frame = new_frames.ColorFrame;

                            var colorized_depth = colorizer.Colorize(depth_frame);

                            UploadImage(imgDepth, colorized_depth);
                            UploadImage(imgColor, color_frame);

                            depth_frame.Dispose();
                            colorized_depth.Dispose();
                            color_frame.Dispose();
                        }
                        new_frames.Dispose();
                    }
                }, token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }

            InitializeComponent();
        }
Example #14
0
        public ProcessingWindow()
        {
            try
            {
                var cfg = new Config();
                cfg.EnableStream(Stream.Depth, 640, 480);
                cfg.EnableStream(Stream.Color, Format.Rgb8);
                pipeline.Start(cfg);

                // Create custom processing block
                // For demonstration purposes it will:
                // a. Get a frameset
                // b. Break it down to frames
                // c. Run post-processing on the depth frame
                // d. Combine the result back into a frameset
                // Processing blocks are inherently thread-safe and play well with
                // other API primitives such as frame-queues,
                // and can be used to encapsulate advanced operations.
                // All invokations are, however, synchronious so the high-level threading model
                // is up to the developer
                block = new CustomProcessingBlock((f, src) =>
                {
                    // We create a FrameReleaser object that would track
                    // all newly allocated .NET frames, and ensure deterministic finalization
                    // at the end of scope.
                    using (var releaser = new FramesReleaser())
                    {
                        var frames = FrameSet.FromFrame(f, releaser);

                        VideoFrame depth = FramesReleaser.ScopedReturn(releaser, frames.DepthFrame);
                        VideoFrame color = FramesReleaser.ScopedReturn(releaser, frames.ColorFrame);

                        // Apply depth post-processing
                        depth = decimate.ApplyFilter(depth, releaser);
                        depth = spatial.ApplyFilter(depth, releaser);
                        depth = temp.ApplyFilter(depth, releaser);

                        // Combine the frames into a single result
                        var res = src.AllocateCompositeFrame(releaser, depth, color);
                        // Send it to the next processing stage
                        src.FramesReady(res);
                    }
                });

                // Register to results of processing via a callback:
                block.Start(f =>
                {
                    using (var releaser = new FramesReleaser())
                    {
                        // Align, colorize and upload frames for rendering
                        var frames = FrameSet.FromFrame(f, releaser);

                        // Align both frames to the viewport of color camera
                        frames = align.Process(frames, releaser);

                        var depth_frame = FramesReleaser.ScopedReturn(releaser, frames.DepthFrame);
                        var color_frame = FramesReleaser.ScopedReturn(releaser, frames.ColorFrame);

                        UploadImage(imgDepth, colorizer.Colorize(depth_frame, releaser));
                        UploadImage(imgColor, color_frame);
                    }
                });

                var token = tokenSource.Token;

                var t = Task.Factory.StartNew(() =>
                {
                    while (!token.IsCancellationRequested)
                    {
                        using (var frames = pipeline.WaitForFrames())
                        {
                            // Invoke custom processing block
                            block.ProcessFrames(frames);
                        }
                    }
                }, token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }

            InitializeComponent();
        }
Example #15
0
        public CaptureWindow()
        {
            InitializeComponent();

            try
            {
                Action <VideoFrame> updateDepth;
                Action <VideoFrame> updateColor;

                // The colorizer processing block will be used to visualize the depth frames.
                colorizer = new Colorizer();

                // Create and config the pipeline to strem color and depth frames.
                pipeline = new Pipeline();
                var cfg = new Config();

                using (var ctx = new Context())
                {
                    var devices = ctx.QueryDevices();

                    if ((devices.Count != 1) || (!ExampleAutocalibrateDevice.IsTheDeviceD400Series(devices[0])))
                    {
                        Console.WriteLine("The tutorial {0} requires a single Realsense D400 device to run.\nFix the setup and rerun",
                                          System.Diagnostics.Process.GetCurrentProcess().ProcessName);
                        Environment.Exit(1);
                    }

                    var dev = devices[0];
                    Console.WriteLine("Using device 0, an {0}", dev.Info[CameraInfo.Name]);
                    Console.WriteLine("    Serial number: {0}", dev.Info[CameraInfo.SerialNumber]);
                    Console.WriteLine("    Firmware version: {0}", dev.Info[CameraInfo.FirmwareVersion]);

                    var sensors = dev.QuerySensors();

                    var depthProfile = sensors
                                       .SelectMany(s => s.StreamProfiles)
                                       .Where(sp => sp.Stream == Stream.Depth)
                                       .Select(sp => sp.As <VideoStreamProfile>())
                                       .OrderBy(p => p.Framerate)
                                       .First();

                    var colorProfile = sensors
                                       .SelectMany(s => s.StreamProfiles)
                                       .Where(sp => sp.Stream == Stream.Color)
                                       .Select(sp => sp.As <VideoStreamProfile>())
                                       .OrderBy(p => p.Framerate)
                                       .First();


                    cfg.EnableDevice(dev.Info[CameraInfo.SerialNumber]);
                    cfg.EnableStream(Stream.Depth, depthProfile.Width, depthProfile.Height, depthProfile.Format, depthProfile.Framerate);
                    cfg.EnableStream(Stream.Color, colorProfile.Width, colorProfile.Height, colorProfile.Format, colorProfile.Framerate);


                    var pp = pipeline.Start(cfg);

                    SetupWindow(pp, out updateDepth, out updateColor);
                }

                // Rendering task
                var renderingPause = false;
                var rendering      = Task.Factory.StartNew(() =>
                {
                    while (!tokenSource.Token.IsCancellationRequested)
                    {
                        if (renderingPause)
                        {
                            continue;                 //pause the rendering
                        }
                        // We wait for the next available FrameSet and using it as a releaser object that would track
                        // all newly allocated .NET frames, and ensure deterministic finalization
                        // at the end of scope.
                        using (var frames = pipeline.WaitForFrames())
                        {
                            var colorFrame = frames.ColorFrame.DisposeWith(frames);
                            var depthFrame = frames.DepthFrame.DisposeWith(frames);

                            // Render the frames.
                            if (depthFrame != null)
                            {
                                // We colorize the depth frame for visualization purposes
                                var colorizedDepth = colorizer.Process <VideoFrame>(depthFrame).DisposeWith(frames);
                                Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth);
                            }
                            if (colorFrame != null)
                            {
                                Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame);
                            }

                            if (depthFrame != null)
                            {
                                Dispatcher.Invoke(new Action(() =>
                                {
                                    String depth_dev_sn = depthFrame.Sensor.Info[CameraInfo.SerialNumber];
                                    txtTimeStamp.Text   = $"{depth_dev_sn} : {depthFrame.Timestamp,-20:0.00}({depthFrame.TimestampDomain})" +
                                                          $"{Environment.NewLine}To start Auto-Calibration flow, switch focus to the application console and press C";
                                }));
                            }
                        }
                    }
                }, tokenSource.Token);

                // Input to calibration mode task
                Task.Factory.StartNew(() =>
                {
                    while (!tokenSource.Token.IsCancellationRequested)
                    {
                        if (ConsoleKey.C == ExampleAutocalibrateDevice.ConsoleGetKey(new[] { ConsoleKey.C },
                                                                                     "To start Auto-Calibration flow, switch focus to the application console and press C"))
                        {
                            renderingPause = true;
                            Console.WriteLine($"{Environment.NewLine}Stopping rendering pipeline...");
                            pipeline.Stop();

                            new ExampleAutocalibrateDevice().Start();

                            Console.WriteLine($"{Environment.NewLine}Starting rendering pipeline...");
                            pipeline.Start(cfg);
                            renderingPause = false;
                        }
                    }
                }, tokenSource.Token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }
        }
Example #16
0
        public ProcessingWindow()
        {
            InitializeComponent();

            try
            {
                var cfg = new Config();

                using (var ctx = new Context())
                {
                    var devices = ctx.QueryDevices();
                    var dev     = devices[0];

                    Console.WriteLine("\nUsing device 0, an {0}", dev.Info[CameraInfo.Name]);
                    Console.WriteLine("    Serial number: {0}", dev.Info[CameraInfo.SerialNumber]);
                    Console.WriteLine("    Firmware version: {0}", dev.Info[CameraInfo.FirmwareVersion]);

                    var sensors     = dev.QuerySensors();
                    var depthSensor = sensors[0];
                    var colorSensor = sensors[1];

                    var depthProfile = depthSensor.StreamProfiles
                                       .Where(p => p.Stream == Stream.Depth)
                                       .OrderBy(p => p.Framerate)
                                       .Select(p => p.As <VideoStreamProfile>()).First();

                    var colorProfile = colorSensor.StreamProfiles
                                       .Where(p => p.Stream == Stream.Color)
                                       .OrderBy(p => p.Framerate)
                                       .Select(p => p.As <VideoStreamProfile>()).First();

                    cfg.EnableStream(Stream.Depth, depthProfile.Width, depthProfile.Height, depthProfile.Format, depthProfile.Framerate);
                    cfg.EnableStream(Stream.Color, colorProfile.Width, colorProfile.Height, colorProfile.Format, colorProfile.Framerate);
                }
                var pp = pipeline.Start(cfg);

                // Get the recommended processing blocks for the depth sensor
                var sensor = pp.Device.QuerySensors().First(s => s.Is(Extension.DepthSensor));
                var blocks = sensor.ProcessingBlocks.ToList();

                // Allocate bitmaps for rendring.
                // Since the sample aligns the depth frames to the color frames, both of the images will have the color resolution
                using (var p = pp.GetStream(Stream.Color).As <VideoStreamProfile>())
                {
                    imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
                    imgDepth.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
                }
                var updateColor = UpdateImage(imgColor);
                var updateDepth = UpdateImage(imgDepth);

                // Create custom processing block
                // For demonstration purposes it will:
                // a. Get a frameset
                // b. Run post-processing on the depth frame
                // c. Combine the result back into a frameset
                // Processing blocks are inherently thread-safe and play well with
                // other API primitives such as frame-queues,
                // and can be used to encapsulate advanced operations.
                // All invocations are, however, synchronious so the high-level threading model
                // is up to the developer
                block = new CustomProcessingBlock((f, src) =>
                {
                    // We create a FrameReleaser object that would track
                    // all newly allocated .NET frames, and ensure deterministic finalization
                    // at the end of scope.
                    using (var releaser = new FramesReleaser())
                    {
                        foreach (ProcessingBlock p in blocks)
                        {
                            f = p.Process(f).DisposeWith(releaser);
                        }

                        f = f.ApplyFilter(align).DisposeWith(releaser);
                        f = f.ApplyFilter(colorizer).DisposeWith(releaser);

                        var frames = f.As <FrameSet>().DisposeWith(releaser);

                        var colorFrame     = frames[Stream.Color, Format.Rgb8].DisposeWith(releaser);
                        var colorizedDepth = frames[Stream.Depth, Format.Rgb8].DisposeWith(releaser);

                        // Combine the frames into a single result
                        var res = src.AllocateCompositeFrame(colorizedDepth, colorFrame).DisposeWith(releaser);
                        // Send it to the next processing stage
                        src.FrameReady(res);
                    }
                });

                // Register to results of processing via a callback:
                block.Start(f =>
                {
                    using (var frames = f.As <FrameSet>())
                    {
                        var colorFrame     = frames.ColorFrame.DisposeWith(frames);
                        var colorizedDepth = frames.First <VideoFrame>(Stream.Depth, Format.Rgb8).DisposeWith(frames);

                        Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth);
                        Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame);
                    }
                });

                var token = tokenSource.Token;

                var t = Task.Factory.StartNew(() =>
                {
                    while (!token.IsCancellationRequested)
                    {
                        using (var frames = pipeline.WaitForFrames())
                        {
                            // Invoke custom processing block
                            block.Process(frames);
                        }
                    }
                }, token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }

            InitializeComponent();
        }