Ejemplo n.º 1
0
        public FrameSet WaitForFrames(uint timeout_ms = 5000, FramesReleaser releaser = null)
        {
            object error;
            var    ptr = NativeMethods.rs2_pipeline_wait_for_frames(m_instance.Handle, timeout_ms, out error);

            return(FramesReleaser.ScopedReturn(releaser, new FrameSet(ptr)));
        }
Ejemplo n.º 2
0
        public void SubmitFrame(Frame f, FramesReleaser releaser = null)
        {
            object error;

            NativeMethods.rs2_frame_add_ref(f.m_instance.Handle, out error);
            NativeMethods.rs2_process_frame(m_instance.Handle, f.m_instance.Handle, out error);
        }
Ejemplo n.º 3
0
        public FrameSet WaitForFrames(FramesReleaser releaser = null)
        {
            object error;
            var    ptr = NativeMethods.rs2_wait_for_frame(m_instance.Handle, 5000, out error);

            return(FramesReleaser.ScopedReturn(releaser, new FrameSet(ptr)));
        }
Ejemplo n.º 4
0
        public FrameSet Process(FrameSet original, FramesReleaser releaser = null)
        {
            object error;

            NativeMethods.rs2_frame_add_ref(original.m_instance.Handle, out error);
            NativeMethods.rs2_process_frame(m_instance.Handle, original.m_instance.Handle, out error);
            return(FramesReleaser.ScopedReturn(releaser, queue.WaitForFrames() as FrameSet));
        }
Ejemplo n.º 5
0
 // Add an object to a releaser (if one is provided) and return the object
 public static T ScopedReturn <T>(FramesReleaser releaser, T obj) where T : IDisposable
 {
     if (releaser != null)
     {
         releaser.AddFrameToRelease(obj);
     }
     return(obj);
 }
Ejemplo n.º 6
0
        public VideoFrame Colorize(VideoFrame original, FramesReleaser releaser = null)
        {
            object error;

            NativeMethods.rs2_frame_add_ref(original.m_instance.Handle, out error);
            NativeMethods.rs2_process_frame(m_instance.Handle, original.m_instance.Handle, out error);
            return(FramesReleaser.ScopedReturn(releaser, queue.WaitForFrame() as VideoFrame));
        }
Ejemplo n.º 7
0
        public Points Calculate(Frame original, FramesReleaser releaser = null)
        {
            object error;

            NativeMethods.rs2_frame_add_ref(original.m_instance.Handle, out error);
            NativeMethods.rs2_process_frame(m_instance.Handle, original.m_instance.Handle, out error);
            return(FramesReleaser.ScopedReturn(releaser, queue.WaitForFrame() as Points));
        }
Ejemplo n.º 8
0
        public bool PollForFrame(out Frame frame, FramesReleaser releaser = null)
        {
            object error;

            if (NativeMethods.rs2_poll_for_frame(m_instance.Handle, out frame, out error) > 0)
            {
                frame = FramesReleaser.ScopedReturn(releaser, FrameSet.CreateFrame(frame.m_instance.Handle));
                return(true);
            }
            return(false);
        }
Ejemplo n.º 9
0
        public static FrameSet FromFrame(Frame composite, FramesReleaser releaser = null)
        {
            object error;

            if (NativeMethods.rs2_is_frame_extendable_to(composite.m_instance.Handle,
                                                         Extension.CompositeFrame, out error) > 0)
            {
                NativeMethods.rs2_frame_add_ref(composite.m_instance.Handle, out error);
                return(FramesReleaser.ScopedReturn(releaser, new FrameSet(composite.m_instance.Handle)));
            }
            throw new Exception("The frame is a not composite frame");
        }
Ejemplo n.º 10
0
        public FrameSet AllocateCompositeFrame(FramesReleaser releaser, params Frame[] frames)
        {
            object error;
            var    frame_refs = frames.Select(x => x.m_instance.Handle).ToArray();

            foreach (var fref in frame_refs)
            {
                NativeMethods.rs2_frame_add_ref(fref, out error);
            }
            var frame_ref = NativeMethods.rs2_allocate_composite_frame(m_instance.Handle, frame_refs, frames.Count(), out error);

            return(FramesReleaser.ScopedReturn(releaser, new FrameSet(frame_ref)));
        }
Ejemplo n.º 11
0
        public bool PollForFrames(out FrameSet result, FramesReleaser releaser = null)
        {
            object   error;
            FrameSet fs;

            if (NativeMethods.rs2_pipeline_poll_for_frames(m_instance.Handle, out fs, out error) > 0)
            {
                result = FramesReleaser.ScopedReturn(releaser, fs);
                return(true);
            }
            result = null;
            return(false);
        }
Ejemplo n.º 12
0
        public bool PollForFrames(out FrameSet result, FramesReleaser releaser = null)
        {
            object error;
            Frame  f;

            if (NativeMethods.rs2_poll_for_frame(queue.m_instance.Handle, out f, out error) > 0)
            {
                result = FramesReleaser.ScopedReturn(releaser, new FrameSet(f.m_instance.Handle));
                f.Dispose();
                return(true);
            }
            result = null;
            return(false);
        }
Ejemplo n.º 13
0
        public ProcessingWindow()
        {
            InitializeComponent();

            try
            {
                var cfg = new Config();

                using (var ctx = new Context())
                {
                    var devices = ctx.QueryDevices();
                    var dev     = devices[0];

                    Console.WriteLine("\nUsing device 0, an {0}", dev.Info[CameraInfo.Name]);
                    Console.WriteLine("    Serial number: {0}", dev.Info[CameraInfo.SerialNumber]);
                    Console.WriteLine("    Firmware version: {0}", dev.Info[CameraInfo.FirmwareVersion]);

                    var sensors     = dev.QuerySensors();
                    var depthSensor = sensors[0];
                    var colorSensor = sensors[1];

                    var depthProfile = depthSensor.StreamProfiles
                                       .Where(p => p.Stream == Stream.Depth)
                                       .OrderBy(p => p.Framerate)
                                       .Select(p => p.As <VideoStreamProfile>()).First();

                    var colorProfile = colorSensor.StreamProfiles
                                       .Where(p => p.Stream == Stream.Color)
                                       .OrderBy(p => p.Framerate)
                                       .Select(p => p.As <VideoStreamProfile>()).First();

                    cfg.EnableStream(Stream.Depth, depthProfile.Width, depthProfile.Height, depthProfile.Format, depthProfile.Framerate);
                    cfg.EnableStream(Stream.Color, colorProfile.Width, colorProfile.Height, colorProfile.Format, colorProfile.Framerate);
                }
                var pp = pipeline.Start(cfg);

                // Get the recommended processing blocks for the depth sensor
                var sensor = pp.Device.QuerySensors().First(s => s.Is(Extension.DepthSensor));
                var blocks = sensor.ProcessingBlocks.ToList();

                // Allocate bitmaps for rendring.
                // Since the sample aligns the depth frames to the color frames, both of the images will have the color resolution
                using (var p = pp.GetStream(Stream.Color).As <VideoStreamProfile>())
                {
                    imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
                    imgDepth.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
                }
                var updateColor = UpdateImage(imgColor);
                var updateDepth = UpdateImage(imgDepth);

                // Create custom processing block
                // For demonstration purposes it will:
                // a. Get a frameset
                // b. Run post-processing on the depth frame
                // c. Combine the result back into a frameset
                // Processing blocks are inherently thread-safe and play well with
                // other API primitives such as frame-queues,
                // and can be used to encapsulate advanced operations.
                // All invocations are, however, synchronious so the high-level threading model
                // is up to the developer
                block = new CustomProcessingBlock((f, src) =>
                {
                    // We create a FrameReleaser object that would track
                    // all newly allocated .NET frames, and ensure deterministic finalization
                    // at the end of scope.
                    using (var releaser = new FramesReleaser())
                    {
                        foreach (ProcessingBlock p in blocks)
                        {
                            f = p.Process(f).DisposeWith(releaser);
                        }

                        f = f.ApplyFilter(align).DisposeWith(releaser);
                        f = f.ApplyFilter(colorizer).DisposeWith(releaser);

                        var frames = f.As <FrameSet>().DisposeWith(releaser);

                        var colorFrame     = frames[Stream.Color, Format.Rgb8].DisposeWith(releaser);
                        var colorizedDepth = frames[Stream.Depth, Format.Rgb8].DisposeWith(releaser);

                        // Combine the frames into a single result
                        var res = src.AllocateCompositeFrame(colorizedDepth, colorFrame).DisposeWith(releaser);
                        // Send it to the next processing stage
                        src.FrameReady(res);
                    }
                });

                // Register to results of processing via a callback:
                block.Start(f =>
                {
                    using (var frames = f.As <FrameSet>())
                    {
                        var colorFrame     = frames.ColorFrame.DisposeWith(frames);
                        var colorizedDepth = frames.First <VideoFrame>(Stream.Depth, Format.Rgb8).DisposeWith(frames);

                        Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth);
                        Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame);
                    }
                });

                var token = tokenSource.Token;

                var t = Task.Factory.StartNew(() =>
                {
                    while (!token.IsCancellationRequested)
                    {
                        using (var frames = pipeline.WaitForFrames())
                        {
                            // Invoke custom processing block
                            block.Process(frames);
                        }
                    }
                }, token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }

            InitializeComponent();
        }
Ejemplo n.º 14
0
 public FrameSet Process(FrameSet original, FramesReleaser releaser)
 {
     return(Process(original).DisposeWith(releaser));
 }
Ejemplo n.º 15
0
 public VideoFrame ApplyFilter(Frame original, FramesReleaser releaser = null)
 {
     return(Process(original).DisposeWith(releaser) as VideoFrame);
 }
Ejemplo n.º 16
0
 public FrameSet AllocateCompositeFrame(FramesReleaser releaser, params Frame[] frames)
 {
     return(AllocateCompositeFrame((IList <Frame>)frames).DisposeWith(releaser));
 }
Ejemplo n.º 17
0
        public ProcessingWindow()
        {
            InitializeComponent();

            try
            {
                var cfg = new Config();
                cfg.EnableStream(Stream.Depth, 640, 480);
                cfg.EnableStream(Stream.Color, Format.Rgb8);
                var pp = pipeline.Start(cfg);
                var s  = pp.Device.Sensors;

                var blocks = new List <ProcessingBlock>();

                foreach (var sensor in pp.Device.Sensors)
                {
                    var list = sensor.ProcessingBlocks;
                    foreach (var block in list)
                    {
                        blocks.Add(block);
                    }
                }

                // Allocate bitmaps for rendring.
                // Since the sample aligns the depth frames to the color frames, both of the images will have the color resolution
                using (var p = pp.GetStream(Stream.Color).As <VideoStreamProfile>())
                {
                    imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
                    imgDepth.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
                }
                var updateColor = UpdateImage(imgColor);
                var updateDepth = UpdateImage(imgDepth);

                // Create custom processing block
                // For demonstration purposes it will:
                // a. Get a frameset
                // b. Run post-processing on the depth frame
                // c. Combine the result back into a frameset
                // Processing blocks are inherently thread-safe and play well with
                // other API primitives such as frame-queues,
                // and can be used to encapsulate advanced operations.
                // All invokations are, however, synchronious so the high-level threading model
                // is up to the developer
                block = new CustomProcessingBlock((f, src) =>
                {
                    // We create a FrameReleaser object that would track
                    // all newly allocated .NET frames, and ensure deterministic finalization
                    // at the end of scope.
                    using (var releaser = new FramesReleaser())
                    {
                        var frames = FrameSet.FromFrame(f).DisposeWith(releaser);

                        foreach (ProcessingBlock p in blocks)
                        {
                            frames = p.Process(frames).DisposeWith(releaser);
                        }

                        frames = frames.ApplyFilter(align).DisposeWith(releaser);
                        frames = frames.ApplyFilter(colorizer).DisposeWith(releaser);

                        var colorFrame     = frames[Stream.Color, Format.Rgb8].DisposeWith(releaser);
                        var colorizedDepth = frames[Stream.Depth, Format.Rgb8].DisposeWith(releaser);

                        // Combine the frames into a single result
                        var res = src.AllocateCompositeFrame(colorizedDepth, colorFrame).DisposeWith(releaser);
                        // Send it to the next processing stage
                        src.FramesReady(res);
                    }
                });

                // Register to results of processing via a callback:
                block.Start(f =>
                {
                    using (var frames = FrameSet.FromFrame(f))
                    {
                        var colorFrame     = frames.ColorFrame.DisposeWith(frames);
                        var colorizedDepth = frames[Stream.Depth, Format.Rgb8].As <VideoFrame>().DisposeWith(frames);

                        Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth);
                        Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame);
                    }
                });

                var token = tokenSource.Token;

                var t = Task.Factory.StartNew(() =>
                {
                    while (!token.IsCancellationRequested)
                    {
                        using (var frames = pipeline.WaitForFrames())
                        {
                            // Invoke custom processing block
                            block.ProcessFrames(frames);
                        }
                    }
                }, token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }

            InitializeComponent();
        }
        /**
         * NOTES
         * Curently it records immediately after linking the program with LabStreamLayer.
         * There might be a better solution, but we don't want to increase the number of button presses for the protoccol. It is probably better to record more than to forget pressing
         * the record button before an experiment.
         *
         * **/
        // Code Taken Directly from the LibRealSense 2 Examples -- Captures and Displays Depth and RGB Camera.
        private void startRecordingProcess()
        {
            try
            {
                pipeline  = new Pipeline();
                colorizer = new Colorizer();

                var cfg = new Config();
                cfg.EnableStream(Stream.Depth, 640, 480, Format.Z16, 30);
                cfg.EnableStream(Stream.Color, 640, 480, Format.Bgr8, 30);

                //cfg.EnableRecordToFile(fileRecording); // This is now taken care of by FFMPEG
                pipeline.Start(cfg);

                applyRecordingConfig();

                processBlock = new CustomProcessingBlock((f, src) =>
                {
                    using (var releaser = new FramesReleaser())
                    {
                        var frames = FrameSet.FromFrame(f, releaser);

                        VideoFrame depth = FramesReleaser.ScopedReturn(releaser, frames.DepthFrame);
                        VideoFrame color = FramesReleaser.ScopedReturn(releaser, frames.ColorFrame);

                        var res = src.AllocateCompositeFrame(releaser, depth, color);

                        src.FramesReady(res);
                    }
                });

                processBlock.Start(f =>
                {
                    using (var releaser = new FramesReleaser())
                    {
                        var frames = FrameSet.FromFrame(f, releaser);

                        var depth_frame = FramesReleaser.ScopedReturn(releaser, frames.DepthFrame);
                        var color_frame = FramesReleaser.ScopedReturn(releaser, frames.ColorFrame);

                        var colorized_depth = colorizer.Colorize(depth_frame);

                        UploadImage(imgDepth, colorized_depth);
                        UploadImage(imgColor, color_frame);

                        // Record FFMPEG
                        Bitmap bmpColor = new Bitmap(color_frame.Width, color_frame.Height, color_frame.Stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, color_frame.Data);
                        vidWriter_Color.WriteVideoFrame(bmpColor);

                        Bitmap bmpDepth = new Bitmap(colorized_depth.Width, colorized_depth.Height, colorized_depth.Stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, colorized_depth.Data);
                        vidWriter_Depth.WriteVideoFrame(bmpDepth);

                        if (lslOutlet != null)
                        {
                            // Do LSL Streaming Here
                            sample[0] = "" + colorized_depth.Number + "_" + colorized_depth.Timestamp;
                            sample[1] = "" + color_frame.Number + "_" + color_frame.Timestamp;
                            lslOutlet.push_sample(sample, liblsl.local_clock());
                        }
                    }
                });


                var token = tokenSource.Token;

                var t = Task.Factory.StartNew(() =>
                {
                    // Main Loop --
                    while (!token.IsCancellationRequested)
                    {
                        using (var frames = pipeline.WaitForFrames())
                        {
                            processBlock.ProcessFrames(frames);
                        }
                    }
                }, token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }
        }
Ejemplo n.º 19
0
 public static FrameSet FromFrame(Frame composite, FramesReleaser releaser)
 {
     return(FromFrame(composite).DisposeWith(releaser));
 }
Ejemplo n.º 20
0
 public VideoFrame ApplyFilter(Frame original, FramesReleaser releaser)
 {
     return(Process(original).As <VideoFrame>().DisposeWith(releaser));
 }
Ejemplo n.º 21
0
        public ProcessingWindow()
        {
            try
            {
                var cfg = new Config();
                cfg.EnableStream(Stream.Depth, 640, 480);
                cfg.EnableStream(Stream.Color, Format.Rgb8);
                pipeline.Start(cfg);

                // Create custom processing block
                // For demonstration purposes it will:
                // a. Get a frameset
                // b. Break it down to frames
                // c. Run post-processing on the depth frame
                // d. Combine the result back into a frameset
                // Processing blocks are inherently thread-safe and play well with
                // other API primitives such as frame-queues,
                // and can be used to encapsulate advanced operations.
                // All invokations are, however, synchronious so the high-level threading model
                // is up to the developer
                block = new CustomProcessingBlock((f, src) =>
                {
                    // We create a FrameReleaser object that would track
                    // all newly allocated .NET frames, and ensure deterministic finalization
                    // at the end of scope.
                    using (var releaser = new FramesReleaser())
                    {
                        var frames = FrameSet.FromFrame(f, releaser);

                        VideoFrame depth = FramesReleaser.ScopedReturn(releaser, frames.DepthFrame);
                        VideoFrame color = FramesReleaser.ScopedReturn(releaser, frames.ColorFrame);

                        // Apply depth post-processing
                        depth = decimate.ApplyFilter(depth, releaser);
                        depth = spatial.ApplyFilter(depth, releaser);
                        depth = temp.ApplyFilter(depth, releaser);

                        // Combine the frames into a single result
                        var res = src.AllocateCompositeFrame(releaser, depth, color);
                        // Send it to the next processing stage
                        src.FramesReady(res);
                    }
                });

                // Register to results of processing via a callback:
                block.Start(f =>
                {
                    using (var releaser = new FramesReleaser())
                    {
                        // Align, colorize and upload frames for rendering
                        var frames = FrameSet.FromFrame(f, releaser);

                        // Align both frames to the viewport of color camera
                        frames = align.Process(frames, releaser);

                        var depth_frame = FramesReleaser.ScopedReturn(releaser, frames.DepthFrame);
                        var color_frame = FramesReleaser.ScopedReturn(releaser, frames.ColorFrame);

                        UploadImage(imgDepth, colorizer.Colorize(depth_frame, releaser));
                        UploadImage(imgColor, color_frame);
                    }
                });

                var token = tokenSource.Token;

                var t = Task.Factory.StartNew(() =>
                {
                    while (!token.IsCancellationRequested)
                    {
                        using (var frames = pipeline.WaitForFrames())
                        {
                            // Invoke custom processing block
                            block.ProcessFrames(frames);
                        }
                    }
                }, token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }

            InitializeComponent();
        }
Ejemplo n.º 22
0
 public Points Calculate(Frame original, FramesReleaser releaser = null)
 {
     return(Process(original).DisposeWith(releaser).As <Points>());
 }
Ejemplo n.º 23
0
 public VideoFrame Colorize(Frame original, FramesReleaser releaser = null)
 {
     return(Process(original).As <VideoFrame>().DisposeWith(releaser));
 }