public void Release(FrameSet t) { lock (locker) { stack.Push(t); } }
public FrameSet Process(FrameSet original) { object error; NativeMethods.rs2_process_frame(m_instance.Handle, original.m_instance.Handle, out error); return(queue.WaitForFrames()); }
public FrameSet WaitForFrames(uint timeout_ms = 5000u) { object error; var ptr = NativeMethods.rs2_pipeline_wait_for_frames(Handle, timeout_ms, out error); return(FrameSet.Create(ptr)); }
public FrameSet WaitForFrames(uint timeout_ms = 5000) { object error; var ptr = NativeMethods.rs2_wait_for_frame(queue.m_instance.Handle, timeout_ms, out error); return(FrameSet.Create(ptr)); }
public FrameSet AllocateCompositeFrame(IList <Frame> frames) { if (frames == null) { throw new ArgumentNullException(nameof(frames)); } IntPtr frame_refs = IntPtr.Zero; try { object error; int fl = frames.Count; frame_refs = Marshal.AllocHGlobal(fl * IntPtr.Size); for (int i = 0; i < fl; i++) { var fr = frames[i].m_instance.Handle; Marshal.WriteIntPtr(frame_refs, i * IntPtr.Size, fr); NativeMethods.rs2_frame_add_ref(fr, out error); } var frame_ref = NativeMethods.rs2_allocate_composite_frame(m_instance.Handle, frame_refs, fl, out error); return(FrameSet.Create(frame_ref)); } finally { if (frame_refs != IntPtr.Zero) { Marshal.FreeHGlobal(frame_refs); } } }
public Frame WaitForFrame(FramesReleaser releaser = null) { object error; var ptr = NativeMethods.rs2_wait_for_frame(m_instance.Handle, 5000, out error); return(FramesReleaser.ScopedReturn(releaser, FrameSet.CreateFrame(ptr))); }
public FrameSet Process(FrameSet original, FramesReleaser releaser = null) { object error; NativeMethods.rs2_frame_add_ref(original.m_instance.Handle, out error); NativeMethods.rs2_process_frame(m_instance.Handle, original.m_instance.Handle, out error); return(FramesReleaser.ScopedReturn(releaser, queue.WaitForFrames() as FrameSet)); }
public FrameSet WaitForFrames() { object error; var ptr = NativeMethods.rs2_wait_for_frame(m_instance.Handle, 5000, out error); var frame = new FrameSet(ptr); return(frame); }
public bool TryWaitForFrames(out FrameSet frames, uint timeout_ms = 5000) { object error; IntPtr ptr; bool res = NativeMethods.rs2_pipeline_try_wait_for_frames(m_instance.Handle, out ptr, timeout_ms, out error) > 0; frames = res ? FrameSet.Create(ptr) : null; return(res); }
public bool PollForFrame(out Frame frame) { object error; if (NativeMethods.rs2_poll_for_frame(m_instance.Handle, out frame, out error) > 0) { frame = FrameSet.CreateFrame(frame.m_instance.Handle); return(true); } return(false); }
public bool PollForFrame(out Frame frame, FramesReleaser releaser = null) { object error; if (NativeMethods.rs2_poll_for_frame(m_instance.Handle, out frame, out error) > 0) { frame = FramesReleaser.ScopedReturn(releaser, FrameSet.CreateFrame(frame.m_instance.Handle)); return(true); } return(false); }
public bool PollForFrames(out FrameSet result) { object error; IntPtr ptr; if (NativeMethods.rs2_poll_for_frame(queue.m_instance.Handle, out ptr, out error) > 0) { result = FrameSet.Create(ptr); return(true); } result = null; return(false); }
public bool PollForFrames(out FrameSet result, FramesReleaser releaser = null) { object error; FrameSet fs; if (NativeMethods.rs2_pipeline_poll_for_frames(m_instance.Handle, out fs, out error) > 0) { result = FramesReleaser.ScopedReturn(releaser, fs); return(true); } result = null; return(false); }
public FrameSet Process(FrameSet original) { FrameSet rv; using (var singleOriginal = original.AsFrame()) { using (var processed = Process(singleOriginal)) { rv = FrameSet.FromFrame(processed); } } return(rv); }
public bool PollForFrames(out FrameSet result) { object error; IntPtr fs; if (NativeMethods.rs2_pipeline_poll_for_frames(m_instance.Handle, out fs, out error) > 0) { result = FrameSet.Pool.Get(fs); return(true); } result = null; return(false); }
public bool PollForFrames(out FrameSet result, FramesReleaser releaser = null) { object error; Frame f; if (NativeMethods.rs2_poll_for_frame(queue.m_instance.Handle, out f, out error) > 0) { result = FramesReleaser.ScopedReturn(releaser, new FrameSet(f.m_instance.Handle)); f.Dispose(); return(true); } result = null; return(false); }
static void Main(string[] args) { using (var ctx = new Context()) { DeviceList devices = ctx.QueryDevices(); if (devices.Count == 0) { Console.WriteLine("RealSense devices are not connected."); return; } using (var pipeline = new Pipeline(ctx)) using (var config = new Config()) { // Add pose stream config.EnableStream(Stream.Pose, Format.SixDOF); // Start pipeline with chosen configuration using (var profile = pipeline.Start(config)) using (var streamprofile = profile.GetStream(Stream.Pose).As <PoseStreamProfile>()) { Console.WriteLine($"\nDevice : {profile.Device.Info[CameraInfo.Name]}"); Console.WriteLine($" Serial number: {profile.Device.Info[CameraInfo.SerialNumber]}"); Console.WriteLine($" Firmware version: {profile.Device.Info[CameraInfo.FirmwareVersion]}"); Console.WriteLine($" Pose stream framerate: {streamprofile.Framerate}\n"); } while (true) { // Wait for the next set of frames from the camera using (FrameSet frameset = pipeline.WaitForFrames()) // Get a frame from the pose stream using (PoseFrame frame = frameset.PoseFrame) { // Get pose frame data Pose data = frame.PoseData; // Print the x, y, z values of the translation, relative to initial position Console.Write("\r" + new String(' ', 80)); Console.Write("\rDevice Position: {0} {1} {2} (meters)", data.translation.x.ToString("N3"), data.translation.y.ToString("N3"), data.translation.z.ToString("N3")); } } } } }
public FrameSet Get(IntPtr ptr) { lock (locker) { if (stack.Count != 0) { FrameSet f = stack.Pop(); f.m_instance = new HandleRef(f, ptr); f.disposedValue = false; object error; f.m_count = NativeMethods.rs2_embedded_frames_count(f.m_instance.Handle, out error); f.m_enum.Reset(); //f.m_disposable = new EmptyDisposable(); f.disposables.Clear(); return(f); } else { return(new FrameSet(ptr)); } } }
public static FrameSet ApplyFilter(this FrameSet frames, IProcessingBlock block) { return(block.Process(frames)); }
public void FramesReady(FrameSet fs) { using (fs) FrameReady(fs.m_instance.Handle); }
/** * NOTES * Curently it records immediately after linking the program with LabStreamLayer. * There might be a better solution, but we don't want to increase the number of button presses for the protoccol. It is probably better to record more than to forget pressing * the record button before an experiment. * * **/ // Code Taken Directly from the LibRealSense 2 Examples -- Captures and Displays Depth and RGB Camera. private void startRecordingProcess() { try { pipeline = new Pipeline(); colorizer = new Colorizer(); var cfg = new Config(); cfg.EnableStream(Stream.Depth, 640, 480, Format.Z16, 30); cfg.EnableStream(Stream.Color, 640, 480, Format.Bgr8, 30); //cfg.EnableRecordToFile(fileRecording); // This is now taken care of by FFMPEG pipeline.Start(cfg); applyRecordingConfig(); processBlock = new CustomProcessingBlock((f, src) => { using (var releaser = new FramesReleaser()) { var frames = FrameSet.FromFrame(f, releaser); VideoFrame depth = FramesReleaser.ScopedReturn(releaser, frames.DepthFrame); VideoFrame color = FramesReleaser.ScopedReturn(releaser, frames.ColorFrame); var res = src.AllocateCompositeFrame(releaser, depth, color); src.FramesReady(res); } }); processBlock.Start(f => { using (var releaser = new FramesReleaser()) { var frames = FrameSet.FromFrame(f, releaser); var depth_frame = FramesReleaser.ScopedReturn(releaser, frames.DepthFrame); var color_frame = FramesReleaser.ScopedReturn(releaser, frames.ColorFrame); var colorized_depth = colorizer.Colorize(depth_frame); UploadImage(imgDepth, colorized_depth); UploadImage(imgColor, color_frame); // Record FFMPEG Bitmap bmpColor = new Bitmap(color_frame.Width, color_frame.Height, color_frame.Stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, color_frame.Data); vidWriter_Color.WriteVideoFrame(bmpColor); Bitmap bmpDepth = new Bitmap(colorized_depth.Width, colorized_depth.Height, colorized_depth.Stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, colorized_depth.Data); vidWriter_Depth.WriteVideoFrame(bmpDepth); if (lslOutlet != null) { // Do LSL Streaming Here sample[0] = "" + colorized_depth.Number + "_" + colorized_depth.Timestamp; sample[1] = "" + color_frame.Number + "_" + color_frame.Timestamp; lslOutlet.push_sample(sample, liblsl.local_clock()); } } }); var token = tokenSource.Token; var t = Task.Factory.StartNew(() => { // Main Loop -- while (!token.IsCancellationRequested) { using (var frames = pipeline.WaitForFrames()) { processBlock.ProcessFrames(frames); } } }, token); } catch (Exception ex) { MessageBox.Show(ex.Message); Application.Current.Shutdown(); } }
public Enumerator(FrameSet fs) { this.fs = fs; index = 0; current = default(Frame); }
public static FrameSet AsFrameSet(this Frame frame) { return(FrameSet.FromFrame(frame)); }
public void ProcessFrames(FrameSet fs) { using (var f = fs.AsFrame()) ProcessFrame(f); }
public ProcessingWindow() { InitializeComponent(); try { var cfg = new Config(); cfg.EnableStream(Stream.Depth, 640, 480); cfg.EnableStream(Stream.Color, Format.Rgb8); var pp = pipeline.Start(cfg); var s = pp.Device.Sensors; var blocks = new List <ProcessingBlock>(); foreach (var sensor in pp.Device.Sensors) { var list = sensor.ProcessingBlocks; foreach (var block in list) { blocks.Add(block); } } // Allocate bitmaps for rendring. // Since the sample aligns the depth frames to the color frames, both of the images will have the color resolution using (var p = pp.GetStream(Stream.Color).As <VideoStreamProfile>()) { imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null); imgDepth.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null); } var updateColor = UpdateImage(imgColor); var updateDepth = UpdateImage(imgDepth); // Create custom processing block // For demonstration purposes it will: // a. Get a frameset // b. Run post-processing on the depth frame // c. Combine the result back into a frameset // Processing blocks are inherently thread-safe and play well with // other API primitives such as frame-queues, // and can be used to encapsulate advanced operations. // All invokations are, however, synchronious so the high-level threading model // is up to the developer block = new CustomProcessingBlock((f, src) => { // We create a FrameReleaser object that would track // all newly allocated .NET frames, and ensure deterministic finalization // at the end of scope. using (var releaser = new FramesReleaser()) { var frames = FrameSet.FromFrame(f).DisposeWith(releaser); foreach (ProcessingBlock p in blocks) { frames = p.Process(frames).DisposeWith(releaser); } frames = frames.ApplyFilter(align).DisposeWith(releaser); frames = frames.ApplyFilter(colorizer).DisposeWith(releaser); var colorFrame = frames[Stream.Color, Format.Rgb8].DisposeWith(releaser); var colorizedDepth = frames[Stream.Depth, Format.Rgb8].DisposeWith(releaser); // Combine the frames into a single result var res = src.AllocateCompositeFrame(colorizedDepth, colorFrame).DisposeWith(releaser); // Send it to the next processing stage src.FramesReady(res); } }); // Register to results of processing via a callback: block.Start(f => { using (var frames = FrameSet.FromFrame(f)) { var colorFrame = frames.ColorFrame.DisposeWith(frames); var colorizedDepth = frames[Stream.Depth, Format.Rgb8].As <VideoFrame>().DisposeWith(frames); Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth); Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame); } }); var token = tokenSource.Token; var t = Task.Factory.StartNew(() => { while (!token.IsCancellationRequested) { using (var frames = pipeline.WaitForFrames()) { // Invoke custom processing block block.ProcessFrames(frames); } } }, token); } catch (Exception ex) { MessageBox.Show(ex.Message); Application.Current.Shutdown(); } InitializeComponent(); }
public FrameSet Process(FrameSet original, FramesReleaser releaser) { return(Process(original).DisposeWith(releaser)); }
internal static extern int rs2_pipeline_poll_for_frames(IntPtr pipe, [Out, MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(FrameSetMarshaler))] out FrameSet output_frame, [MarshalAs(UnmanagedType.CustomMarshaler, MarshalTypeRef = typeof(Helpers.ErrorMarshaler))] out object error);
public void Release(FrameSet t) { stack.Push(t); }
public void FramesReady(FrameSet fs) { using (var f = fs.AsFrame()) FrameReady(f); }
public ProcessingWindow() { try { var cfg = new Config(); cfg.EnableStream(Stream.Depth, 640, 480); cfg.EnableStream(Stream.Color, Format.Rgb8); pipeline.Start(cfg); // Create custom processing block // For demonstration purposes it will: // a. Get a frameset // b. Break it down to frames // c. Run post-processing on the depth frame // d. Combine the result back into a frameset // Processing blocks are inherently thread-safe and play well with // other API primitives such as frame-queues, // and can be used to encapsulate advanced operations. // All invokations are, however, synchronious so the high-level threading model // is up to the developer block = new CustomProcessingBlock((f, src) => { // We create a FrameReleaser object that would track // all newly allocated .NET frames, and ensure deterministic finalization // at the end of scope. using (var releaser = new FramesReleaser()) { var frames = FrameSet.FromFrame(f, releaser); VideoFrame depth = FramesReleaser.ScopedReturn(releaser, frames.DepthFrame); VideoFrame color = FramesReleaser.ScopedReturn(releaser, frames.ColorFrame); // Apply depth post-processing depth = decimate.ApplyFilter(depth, releaser); depth = spatial.ApplyFilter(depth, releaser); depth = temp.ApplyFilter(depth, releaser); // Combine the frames into a single result var res = src.AllocateCompositeFrame(releaser, depth, color); // Send it to the next processing stage src.FramesReady(res); } }); // Register to results of processing via a callback: block.Start(f => { using (var releaser = new FramesReleaser()) { // Align, colorize and upload frames for rendering var frames = FrameSet.FromFrame(f, releaser); // Align both frames to the viewport of color camera frames = align.Process(frames, releaser); var depth_frame = FramesReleaser.ScopedReturn(releaser, frames.DepthFrame); var color_frame = FramesReleaser.ScopedReturn(releaser, frames.ColorFrame); UploadImage(imgDepth, colorizer.Colorize(depth_frame, releaser)); UploadImage(imgColor, color_frame); } }); var token = tokenSource.Token; var t = Task.Factory.StartNew(() => { while (!token.IsCancellationRequested) { using (var frames = pipeline.WaitForFrames()) { // Invoke custom processing block block.ProcessFrames(frames); } } }, token); } catch (Exception ex) { MessageBox.Show(ex.Message); Application.Current.Shutdown(); } InitializeComponent(); }