private void Init() { using Context ctx = new Context(); var devices = ctx.QueryDevices(); Console.WriteLine($"Found {devices.Count} RealSense devices connected."); if (devices.Count == 0) { throw new Exception("No RealSense device detected!"); } Device dev = devices[0]; Console.WriteLine($"Using device 0: {dev.Info[CameraInfo.Name]}"); Console.WriteLine("Device Sources:"); foreach (Sensor sensor in dev.Sensors) { Console.WriteLine($"Sensor found: {sensor.Info[CameraInfo.Name]}"); } var cfg = new Config(); cfg.EnableStream(Stream.Depth); cfg.EnableStream(Stream.Color, Format.Bgr8); intelPipe = new Intel.RealSense.Pipeline(); PipelineProfile profileIntelPipe = intelPipe.Start(cfg); var streamDepth = profileIntelPipe.GetStream <VideoStreamProfile>(Stream.Depth); sicsDepth = streamDepth.GetIntrinsics(); Console.WriteLine($"Depth Stream: {sicsDepth.width}X{sicsDepth.height}"); var streamRBG = profileIntelPipe.GetStream <VideoStreamProfile>(Stream.Color); sicsRBG = streamRBG.GetIntrinsics(); Console.WriteLine($"RBG Stream: {sicsRBG.width}X{sicsRBG.height}"); Task.Run(() => { while (true) { try { using FrameSet frames = intelPipe.WaitForFrames(); using Frame frDepth = frames.FirstOrDefault(Stream.Depth); qDepth.Enqueue(frDepth); using Frame frRBG = frames.FirstOrDefault(Stream.Color); qRBG.Enqueue(frRBG); } catch (Exception e) { Console.WriteLine(e.Message); } } }); }
public MainWindow() { InitializeComponent(); pipeline = new RS.Pipeline(); colorizer = new RS.Colorizer(); Action <RS.VideoFrame> mainAction; Action <RS.VideoFrame> depthAction; var config = new RS.Config(); config.EnableStream(RS.Stream.Color, 640, 480, RS.Format.Rgb8); config.EnableStream(RS.Stream.Depth, 640, 480); timer = new System.Timers.Timer(); timer.Elapsed += Timer_Elapsed; timer.Interval = 1000; timer.Enabled = false; timerCountdown = new System.Timers.Timer(); timerCountdown.Elapsed += TimerCountdown_Elapsed; timerCountdown.Interval = 1000; timerCountdown.Enabled = false; try { var pp = pipeline.Start(config); SetupWindow(pp, out mainAction, out depthAction); Task.Factory.StartNew(() => { while (!tokenSource.Token.IsCancellationRequested) { using (var frames = pipeline.WaitForFrames()) { var mainFrame = frames.ColorFrame.DisposeWith(frames); var depthFrame = frames.DepthFrame.DisposeWith(frames); var colorizedDepth = colorizer.Process <VideoFrame>(depthFrame).DisposeWith(frames); Dispatcher.Invoke(DispatcherPriority.Render, mainAction, mainFrame); Dispatcher.Invoke(DispatcherPriority.Render, depthAction, colorizedDepth); } if (isRecording) { imageCount++; Dispatcher.Invoke(new SaveImagesDelegate(SaveImage), new object[] { "image_", imageCount }); } else { imageCount = 0; } } }, tokenSource.Token); } catch (Exception ex) { MessageBox.Show(ex.Message); } }
public CaptureWindow() { InitializeComponent(); try { Action <VideoFrame> updateDepth; Action <VideoFrame> updateColor; pipeline = new Pipeline(); colorizer = new Colorizer(); var cfg = new Config(); cfg.EnableStream(Stream.Depth, 640, 480, Format.Z16, 30); cfg.EnableStream(Stream.Color, 640, 480, Format.Rgb8, 30); var profile = pipeline.Start(cfg); SetupWindow(profile, out updateDepth, out updateColor); // Setup the SW device and sensors var software_dev = new SoftwareDevice(); var depth_sensor = software_dev.AddSensor("Depth"); var depth_profile = depth_sensor.AddVideoStream(new SoftwareVideoStream { type = Stream.Depth, index = 0, uid = 100, width = 640, height = 480, fps = 30, bpp = 2, format = Format.Z16, intrinsics = profile.GetStream(Stream.Depth).As <VideoStreamProfile>().GetIntrinsics() }); var color_sensor = software_dev.AddSensor("Color"); var color_profile = color_sensor.AddVideoStream(new SoftwareVideoStream { type = Stream.Color, index = 0, uid = 101, width = 640, height = 480, fps = 30, bpp = 3, format = Format.Rgb8, intrinsics = profile.GetStream(Stream.Color).As <VideoStreamProfile>().GetIntrinsics() }); // Note about the Syncer: If actual FPS is significantly different from reported FPS in AddVideoStream // this can confuse the syncer and prevent it from producing synchronized pairs software_dev.SetMatcher(Matchers.Default); var sync = new Syncer(); depth_sensor.Open(depth_profile); color_sensor.Open(color_profile); // Push the SW device frames to the syncer depth_sensor.Start(sync.SubmitFrame); color_sensor.Start(sync.SubmitFrame); var token = tokenSource.Token; ushort[] depthData = null; byte[] colorData = null; var t = Task.Factory.StartNew(() => { while (!token.IsCancellationRequested) { // We use the frames that are captured from live camera as the input data for the SW device using (var frames = pipeline.WaitForFrames()) { var depthFrame = frames.DepthFrame.DisposeWith(frames); var colorFrame = frames.ColorFrame.DisposeWith(frames); depthData = depthData ?? new ushort[depthFrame.Width * depthFrame.Height]; depthFrame.CopyTo(depthData); depth_sensor.AddVideoFrame(depthData, depthFrame.Stride, depthFrame.BitsPerPixel / 8, depthFrame.Timestamp, depthFrame.TimestampDomain, (int)depthFrame.Number, depth_profile); colorData = colorData ?? new byte[colorFrame.Stride * colorFrame.Height]; colorFrame.CopyTo(colorData); color_sensor.AddVideoFrame(colorData, colorFrame.Stride, colorFrame.BitsPerPixel / 8, colorFrame.Timestamp, colorFrame.TimestampDomain, (int)colorFrame.Number, color_profile); } // Dispaly the frames that come from the SW device after synchronization using (var new_frames = sync.WaitForFrames()) { if (new_frames.Count == 2) { var depthFrame = new_frames.DepthFrame.DisposeWith(new_frames); var colorFrame = new_frames.ColorFrame.DisposeWith(new_frames); var colorizedDepth = colorizer.Process <VideoFrame>(depthFrame).DisposeWith(new_frames); // Render the frames. Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth); Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame); } } } }, token); } catch (Exception ex) { MessageBox.Show(ex.Message); Application.Current.Shutdown(); } }