public CaptureWindow() { InitializeComponent(); try { Action <VideoFrame> updateDepth; Action <VideoFrame> updateColor; // The colorizer processing block will be used to visualize the depth frames. colorizer = new Colorizer(); // Create and config the pipeline to strem color and depth frames. pipeline = new Pipeline(); var cfg = new Config(); cfg.EnableStream(Stream.Depth, 640, 480); cfg.EnableStream(Stream.Color, Format.Rgb8); var pp = pipeline.Start(cfg); SetupWindow(pp, out updateDepth, out updateColor); Task.Factory.StartNew(() => { while (!tokenSource.Token.IsCancellationRequested) { // We wait for the next available FrameSet and using it as a releaser object that would track // all newly allocated .NET frames, and ensure deterministic finalization // at the end of scope. using (var frames = pipeline.WaitForFrames()) { var colorFrame = frames.ColorFrame.DisposeWith(frames); var depthFrame = frames.DepthFrame.DisposeWith(frames); // We colorize the depth frame for visualization purposes var colorizedDepth = colorizer.Process <VideoFrame>(depthFrame).DisposeWith(frames); // Render the frames. Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth); Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame); Dispatcher.Invoke(new Action(() => { String depth_dev_sn = new Sensor.CameraInfos(depthFrame.Sensor)[CameraInfo.SerialNumber]; txtTimeStamp.Text = depth_dev_sn + " : " + String.Format("{0,-20:0.00}", depthFrame.Timestamp) + "(" + depthFrame.TimestampDomain.ToString() + ")"; })); } } }, tokenSource.Token); } catch (Exception ex) { MessageBox.Show(ex.Message); Application.Current.Shutdown(); } }
public CaptureWindow() { try { pipeline = new Pipeline(); colorizer = new Colorizer(); var cfg = new Config(); cfg.EnableStream(Stream.Depth, 640, 480); cfg.EnableStream(Stream.Color, Format.Rgb8); pipeline.Start(cfg); var token = tokenSource.Token; var t = Task.Factory.StartNew(() => { while (!token.IsCancellationRequested) { var frames = pipeline.WaitForFrames(); var depth_frame = frames.DepthFrame; var color_frame = frames.ColorFrame; var colorized_depth = colorizer.Colorize(depth_frame); UploadImage(imgDepth, colorized_depth); UploadImage(imgColor, color_frame); // It is important to pre-emptively dispose of native resources // to avoid creating bottleneck at finalization stage after GC // (Also see FrameReleaser helper object in next tutorial) frames.Dispose(); depth_frame.Dispose(); colorized_depth.Dispose(); color_frame.Dispose(); } }, token); } catch (Exception ex) { MessageBox.Show(ex.Message); Application.Current.Shutdown(); } InitializeComponent(); }
public CaptureWindow() { try { //窗口最大化 //WindowState = WindowState.Maximized; //imgColor.Width = WIDTH; //imgColor.Height = HEIGHT; //imgDepth.Width = WIDTH; //imgDepth.Height = HEIGHT; //imgObs.Width = WIDTH; //imgObs.Height = HEIGHT; //初始化定时器 timer1.Tick += new EventHandler(timer1_cycle); timer1.Interval = new TimeSpan(0, 0, 0, 0, 1000); pipeline = new Pipeline(); colorizer = new Colorizer(); var cfg = new Config(); cfg.EnableStream(Stream.Depth, WIDTH, HEIGHT); cfg.EnableStream(Stream.Color, WIDTH, HEIGHT, Format.Rgb8); //cfg.EnableStream(Stream.Gyro, Format.Z16); pProfile = pipeline.Start(cfg); //var gyro = pProfile.GetStream(Stream.Gyro); imgColor = new Image(); imgDepth = new Image(); timer1.Start(); } catch (Exception ex) { MessageBox.Show(ex.Message); Application.Current.Shutdown(); } InitializeComponent(); }
public CaptureWindow() { try { pipeline = new Pipeline(); colorizer = new Colorizer(); var cfg = new Config(); cfg.EnableStream(Stream.Depth, 640, 480); cfg.EnableStream(Stream.Color, Format.Rgb8); pipeline.Start(cfg); var token = tokenSource.Token; var t = Task.Factory.StartNew(() => { while (!token.IsCancellationRequested) { var frames = pipeline.WaitForFrames(); var colorized_depth = colorizer.Colorize(frames.DepthFrame); UploadImage(imgDepth, colorized_depth); UploadImage(imgColor, frames.ColorFrame); } }, token); } catch (Exception ex) { MessageBox.Show(ex.Message); Application.Current.Shutdown(); } InitializeComponent(); }
public MainWindow() { InitializeComponent(); pipeline = new RS.Pipeline(); colorizer = new RS.Colorizer(); Action <RS.VideoFrame> mainAction; Action <RS.VideoFrame> depthAction; var config = new RS.Config(); config.EnableStream(RS.Stream.Color, 640, 480, RS.Format.Rgb8); config.EnableStream(RS.Stream.Depth, 640, 480); timer = new System.Timers.Timer(); timer.Elapsed += Timer_Elapsed; timer.Interval = 1000; timer.Enabled = false; timerCountdown = new System.Timers.Timer(); timerCountdown.Elapsed += TimerCountdown_Elapsed; timerCountdown.Interval = 1000; timerCountdown.Enabled = false; try { var pp = pipeline.Start(config); SetupWindow(pp, out mainAction, out depthAction); Task.Factory.StartNew(() => { while (!tokenSource.Token.IsCancellationRequested) { using (var frames = pipeline.WaitForFrames()) { var mainFrame = frames.ColorFrame.DisposeWith(frames); var depthFrame = frames.DepthFrame.DisposeWith(frames); var colorizedDepth = colorizer.Process <VideoFrame>(depthFrame).DisposeWith(frames); Dispatcher.Invoke(DispatcherPriority.Render, mainAction, mainFrame); Dispatcher.Invoke(DispatcherPriority.Render, depthAction, colorizedDepth); } if (isRecording) { imageCount++; Dispatcher.Invoke(new SaveImagesDelegate(SaveImage), new object[] { "image_", imageCount }); } else { imageCount = 0; } } }, tokenSource.Token); } catch (Exception ex) { MessageBox.Show(ex.Message); } }
public CaptureWindow() { InitializeComponent(); try { Action <VideoFrame> updateDepth; Action <VideoFrame> updateColor; pipeline = new Pipeline(); colorizer = new Colorizer(); var depthWidth = 640; var depthHeight = 480; var depthFrames = 30; var depthFormat = Format.Z16; var colorWidth = 640; var colorHeight = 480; var colorFrames = 30; using (var ctx = new Context()) { var devices = ctx.QueryDevices(); var dev = devices[0]; Console.WriteLine("\nUsing device 0, an {0}", dev.Info[CameraInfo.Name]); Console.WriteLine(" Serial number: {0}", dev.Info[CameraInfo.SerialNumber]); Console.WriteLine(" Firmware version: {0}", dev.Info[CameraInfo.FirmwareVersion]); var sensors = dev.QuerySensors(); var depthSensor = sensors[0]; var colorSensor = sensors[1]; var depthProfiles = depthSensor.StreamProfiles .Where(p => p.Stream == Stream.Depth) .OrderBy(p => p.Framerate) .Select(p => p.As <VideoStreamProfile>()); VideoStreamProfile colorProfile = null; // select color profile to have frameset equal or closer to depth frameset to syncer work smooth foreach (var depthProfile in depthProfiles) { depthWidth = depthProfile.Width; depthHeight = depthProfile.Height; depthFrames = depthProfile.Framerate; depthFormat = depthProfile.Format; colorProfile = colorSensor.StreamProfiles .Where(p => p.Stream == Stream.Color) .OrderByDescending(p => p.Framerate) .Select(p => p.As <VideoStreamProfile>()) .FirstOrDefault(p => p.Framerate == depthFrames); if (colorProfile != null) { colorWidth = colorProfile.Width; colorHeight = colorProfile.Height; colorFrames = colorProfile.Framerate; break; } } if (colorProfile == null) { // if no profile with the same framerate found, takes the first colorProfile = colorSensor.StreamProfiles .Where(p => p.Stream == Stream.Color) .OrderByDescending(p => p.Framerate) .Select(p => p.As <VideoStreamProfile>()).FirstOrDefault(); if (colorProfile == null) { throw new InvalidOperationException($"Error while finding appropriate depth and color profiles"); } colorWidth = colorProfile.Width; colorHeight = colorProfile.Height; colorFrames = colorProfile.Framerate; } } var cfg = new Config(); cfg.EnableStream(Stream.Depth, depthWidth, depthHeight, depthFormat, depthFrames); cfg.EnableStream(Stream.Color, colorWidth, colorHeight, Format.Rgb8, colorFrames); var profile = pipeline.Start(cfg); SetupWindow(profile, out updateDepth, out updateColor); // Setup the SW device and sensors var software_dev = new SoftwareDevice(); var depth_sensor = software_dev.AddSensor("Depth"); var depth_profile = depth_sensor.AddVideoStream(new SoftwareVideoStream { type = Stream.Depth, index = 0, uid = 100, width = depthWidth, height = depthHeight, fps = depthFrames, bpp = 2, format = depthFormat, intrinsics = profile.GetStream(Stream.Depth).As <VideoStreamProfile>().GetIntrinsics() }); depth_sensor.AddReadOnlyOption(Option.DepthUnits, 1.0f / 5000); var color_sensor = software_dev.AddSensor("Color"); var color_profile = color_sensor.AddVideoStream(new SoftwareVideoStream { type = Stream.Color, index = 0, uid = 101, width = colorWidth, height = colorHeight, fps = colorFrames, bpp = 3, format = Format.Rgb8, intrinsics = profile.GetStream(Stream.Color).As <VideoStreamProfile>().GetIntrinsics() }); // Note about the Syncer: If actual FPS is significantly different from reported FPS in AddVideoStream // this can confuse the syncer and prevent it from producing synchronized pairs software_dev.SetMatcher(Matchers.Default); var sync = new Syncer(); // The raw depth->metric units translation scale is required for Colorizer to work var realDepthSensor = profile.Device.QuerySensors().First(s => s.Is(Extension.DepthSensor)); depth_sensor.AddReadOnlyOption(Option.DepthUnits, realDepthSensor.DepthScale); depth_sensor.Open(depth_profile); color_sensor.Open(color_profile); // Push the SW device frames to the syncer depth_sensor.Start(sync.SubmitFrame); color_sensor.Start(sync.SubmitFrame); var token = tokenSource.Token; ushort[] depthData = null; byte[] colorData = null; var t = Task.Factory.StartNew(() => { while (!token.IsCancellationRequested) { // We use the frames that are captured from live camera as the input data for the SW device using (var frames = pipeline.WaitForFrames()) { var depthFrame = frames.DepthFrame.DisposeWith(frames); var colorFrame = frames.ColorFrame.DisposeWith(frames); depthData = depthData ?? new ushort[depthFrame.Width * depthFrame.Height]; depthFrame.CopyTo(depthData); depth_sensor.AddVideoFrame(depthData, depthFrame.Stride, depthFrame.BitsPerPixel / 8, depthFrame.Timestamp, depthFrame.TimestampDomain, (int)depthFrame.Number, depth_profile); colorData = colorData ?? new byte[colorFrame.Stride * colorFrame.Height]; colorFrame.CopyTo(colorData); color_sensor.AddVideoFrame(colorData, colorFrame.Stride, colorFrame.BitsPerPixel / 8, colorFrame.Timestamp, colorFrame.TimestampDomain, (int)colorFrame.Number, color_profile); } // Dispaly the frames that come from the SW device after synchronization using (var new_frames = sync.WaitForFrames()) { if (new_frames.Count == 2) { var colorFrame = new_frames.ColorFrame.DisposeWith(new_frames); var depthFrame = new_frames.DepthFrame.DisposeWith(new_frames); var colorizedDepth = colorizer.Process <VideoFrame>(depthFrame).DisposeWith(new_frames); // Render the frames. Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth); Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame); } } } }, token); } catch (Exception ex) { MessageBox.Show(ex.Message); Application.Current.Shutdown(); } }
/** * NOTES * Curently it records immediately after linking the program with LabStreamLayer. * There might be a better solution, but we don't want to increase the number of button presses for the protoccol. It is probably better to record more than to forget pressing * the record button before an experiment. * * **/ // Code Taken Directly from the LibRealSense 2 Examples -- Captures and Displays Depth and RGB Camera. private void startRecordingProcess() { try { pipeline = new Pipeline(); colorizer = new Colorizer(); var cfg = new Config(); cfg.EnableStream(Stream.Depth, 640, 480, Format.Z16, 30); cfg.EnableStream(Stream.Color, 640, 480, Format.Bgr8, 30); //cfg.EnableRecordToFile(fileRecording); // This is now taken care of by FFMPEG pipeline.Start(cfg); applyRecordingConfig(); processBlock = new CustomProcessingBlock((f, src) => { using (var releaser = new FramesReleaser()) { var frames = FrameSet.FromFrame(f, releaser); VideoFrame depth = FramesReleaser.ScopedReturn(releaser, frames.DepthFrame); VideoFrame color = FramesReleaser.ScopedReturn(releaser, frames.ColorFrame); var res = src.AllocateCompositeFrame(releaser, depth, color); src.FramesReady(res); } }); processBlock.Start(f => { using (var releaser = new FramesReleaser()) { var frames = FrameSet.FromFrame(f, releaser); var depth_frame = FramesReleaser.ScopedReturn(releaser, frames.DepthFrame); var color_frame = FramesReleaser.ScopedReturn(releaser, frames.ColorFrame); var colorized_depth = colorizer.Colorize(depth_frame); UploadImage(imgDepth, colorized_depth); UploadImage(imgColor, color_frame); // Record FFMPEG Bitmap bmpColor = new Bitmap(color_frame.Width, color_frame.Height, color_frame.Stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, color_frame.Data); vidWriter_Color.WriteVideoFrame(bmpColor); Bitmap bmpDepth = new Bitmap(colorized_depth.Width, colorized_depth.Height, colorized_depth.Stride, System.Drawing.Imaging.PixelFormat.Format24bppRgb, colorized_depth.Data); vidWriter_Depth.WriteVideoFrame(bmpDepth); if (lslOutlet != null) { // Do LSL Streaming Here sample[0] = "" + colorized_depth.Number + "_" + colorized_depth.Timestamp; sample[1] = "" + color_frame.Number + "_" + color_frame.Timestamp; lslOutlet.push_sample(sample, liblsl.local_clock()); } } }); var token = tokenSource.Token; var t = Task.Factory.StartNew(() => { // Main Loop -- while (!token.IsCancellationRequested) { using (var frames = pipeline.WaitForFrames()) { processBlock.ProcessFrames(frames); } } }, token); } catch (Exception ex) { MessageBox.Show(ex.Message); Application.Current.Shutdown(); } }
public CaptureWindow() { InitializeComponent(); try { Action <VideoFrame> updateDepth; Action <VideoFrame> updateColor; pipeline = new Pipeline(); colorizer = new Colorizer(); var cfg = new Config(); cfg.EnableStream(Stream.Depth, 640, 480, Format.Z16, 30); cfg.EnableStream(Stream.Color, 640, 480, Format.Rgb8, 30); var profile = pipeline.Start(cfg); SetupWindow(profile, out updateDepth, out updateColor); // Setup the SW device and sensors var software_dev = new SoftwareDevice(); var depth_sensor = software_dev.AddSensor("Depth"); var depth_profile = depth_sensor.AddVideoStream(new SoftwareVideoStream { type = Stream.Depth, index = 0, uid = 100, width = 640, height = 480, fps = 30, bpp = 2, format = Format.Z16, intrinsics = profile.GetStream(Stream.Depth).As <VideoStreamProfile>().GetIntrinsics() }); var color_sensor = software_dev.AddSensor("Color"); var color_profile = color_sensor.AddVideoStream(new SoftwareVideoStream { type = Stream.Color, index = 0, uid = 101, width = 640, height = 480, fps = 30, bpp = 3, format = Format.Rgb8, intrinsics = profile.GetStream(Stream.Color).As <VideoStreamProfile>().GetIntrinsics() }); // Note about the Syncer: If actual FPS is significantly different from reported FPS in AddVideoStream // this can confuse the syncer and prevent it from producing synchronized pairs software_dev.SetMatcher(Matchers.Default); var sync = new Syncer(); depth_sensor.Open(depth_profile); color_sensor.Open(color_profile); // Push the SW device frames to the syncer depth_sensor.Start(sync.SubmitFrame); color_sensor.Start(sync.SubmitFrame); var token = tokenSource.Token; ushort[] depthData = null; byte[] colorData = null; var t = Task.Factory.StartNew(() => { while (!token.IsCancellationRequested) { // We use the frames that are captured from live camera as the input data for the SW device using (var frames = pipeline.WaitForFrames()) { var depthFrame = frames.DepthFrame.DisposeWith(frames); var colorFrame = frames.ColorFrame.DisposeWith(frames); depthData = depthData ?? new ushort[depthFrame.Width * depthFrame.Height]; depthFrame.CopyTo(depthData); depth_sensor.AddVideoFrame(depthData, depthFrame.Stride, depthFrame.BitsPerPixel / 8, depthFrame.Timestamp, depthFrame.TimestampDomain, (int)depthFrame.Number, depth_profile); colorData = colorData ?? new byte[colorFrame.Stride * colorFrame.Height]; colorFrame.CopyTo(colorData); color_sensor.AddVideoFrame(colorData, colorFrame.Stride, colorFrame.BitsPerPixel / 8, colorFrame.Timestamp, colorFrame.TimestampDomain, (int)colorFrame.Number, color_profile); } // Dispaly the frames that come from the SW device after synchronization using (var new_frames = sync.WaitForFrames()) { if (new_frames.Count == 2) { var depthFrame = new_frames.DepthFrame.DisposeWith(new_frames); var colorFrame = new_frames.ColorFrame.DisposeWith(new_frames); VideoFrame colorizedDepth = colorizer.Process(depthFrame).As <VideoFrame>().DisposeWith(new_frames); // Render the frames. Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth); Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame); } } } }, token); } catch (Exception ex) { MessageBox.Show(ex.Message); Application.Current.Shutdown(); } }
public CaptureWindow() { InitializeComponent(); try { Action <VideoFrame> updateDepth; Action <VideoFrame> updateColor; // The colorizer processing block will be used to visualize the depth frames. colorizer = new Colorizer(); // Create and config the pipeline to strem color and depth frames. pipeline = new Pipeline(); using (var ctx = new Context()) { var devices = ctx.QueryDevices(); var dev = devices[0]; Console.WriteLine("\nUsing device 0, an {0}", dev.Info[CameraInfo.Name]); Console.WriteLine(" Serial number: {0}", dev.Info[CameraInfo.SerialNumber]); Console.WriteLine(" Firmware version: {0}", dev.Info[CameraInfo.FirmwareVersion]); var sensors = dev.QuerySensors(); var depthSensor = sensors[0]; var colorSensor = sensors[1]; var depthProfile = depthSensor.StreamProfiles .Where(p => p.Stream == Stream.Depth) .OrderBy(p => p.Framerate) .Select(p => p.As <VideoStreamProfile>()).First(); var colorProfile = colorSensor.StreamProfiles .Where(p => p.Stream == Stream.Color) .OrderBy(p => p.Framerate) .Select(p => p.As <VideoStreamProfile>()).First(); if (!testLoadSettingsJson.LoadSettingsJson(dev)) { return; } var cfg = new Config(); cfg.EnableDevice(dev.Info.GetInfo(CameraInfo.SerialNumber)); cfg.EnableStream(Stream.Depth, depthProfile.Width, depthProfile.Height, depthProfile.Format, depthProfile.Framerate); cfg.EnableStream(Stream.Color, colorProfile.Width, colorProfile.Height, colorProfile.Format, colorProfile.Framerate); var pp = pipeline.Start(cfg); SetupWindow(pp, out updateDepth, out updateColor); // more device info Console.WriteLine($"--------------------------"); foreach (var item in pp.Device.Info.ToArray()) { Console.WriteLine($"{item.Key} - {item.Value}"); } Console.WriteLine($"--------------------------"); } Task.Factory.StartNew(() => { while (!tokenSource.Token.IsCancellationRequested) { // We wait for the next available FrameSet and using it as a releaser object that would track // all newly allocated .NET frames, and ensure deterministic finalization // at the end of scope. using (var frames = pipeline.WaitForFrames()) { var colorFrame = frames.ColorFrame.DisposeWith(frames); var depthFrame = frames.DepthFrame.DisposeWith(frames); // We colorize the depth frame for visualization purposes var colorizedDepth = colorizer.Process <VideoFrame>(depthFrame).DisposeWith(frames); // Render the frames. Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth); Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame); Dispatcher.Invoke(new Action(() => { String depth_dev_sn = depthFrame.Sensor.Info[CameraInfo.SerialNumber]; txtTimeStamp.Text = depth_dev_sn + " : " + String.Format("{0,-20:0.00}", depthFrame.Timestamp) + "(" + depthFrame.TimestampDomain.ToString() + ")"; })); } } }, tokenSource.Token); } catch (Exception ex) { MessageBox.Show(ex.Message); Application.Current.Shutdown(); } }
public CaptureWindow() { //Log.ToFile(LogSeverity.Debug, "1.log"); try { pipeline = new Pipeline(); colorizer = new Colorizer(); var cfg = new Config(); cfg.EnableStream(Stream.Depth, 640, 480, Format.Z16, 30); cfg.EnableStream(Stream.Color, 640, 480, Format.Rgb8, 30); var profile = pipeline.Start(cfg); var software_dev = new SoftwareDevice(); var depth_sensor = software_dev.AddSensor("Depth"); var depth_profile = depth_sensor.AddVideoStream(new VideoStream { type = Stream.Depth, index = 0, uid = 100, width = 640, height = 480, fps = 30, bpp = 2, fmt = Format.Z16, intrinsics = (profile.GetStream(Stream.Depth) as VideoStreamProfile).GetIntrinsics() }); var color_sensor = software_dev.AddSensor("Color"); var color_profile = color_sensor.AddVideoStream(new VideoStream { type = Stream.Color, index = 0, uid = 101, width = 640, height = 480, fps = 30, bpp = 2, fmt = Format.Z16, intrinsics = (profile.GetStream(Stream.Color) as VideoStreamProfile).GetIntrinsics() }); // Note about the Syncer: If actual FPS is significantly different from reported FPS in AddVideoStream // this can confuse the syncer and prevent it from producing synchronized pairs software_dev.SetMatcher(Matchers.Default); var sync = new Syncer(); depth_sensor.Open(depth_profile); color_sensor.Open(color_profile); depth_sensor.Start(f => { sync.SubmitFrame(f); //Debug.WriteLine("D"); }); color_sensor.Start(f => { sync.SubmitFrame(f); //Debug.WriteLine("C"); }); var token = tokenSource.Token; var t = Task.Factory.StartNew(() => { while (!token.IsCancellationRequested) { var frames = pipeline.WaitForFrames(); var depth_frame = frames.DepthFrame; var color_frame = frames.ColorFrame; var bytes = new byte[depth_frame.Stride * depth_frame.Height]; depth_frame.CopyTo(bytes); depth_sensor.AddVideoFrame(bytes, depth_frame.Stride, 2, depth_frame.Timestamp, depth_frame.TimestampDomain, (int)depth_frame.Number, depth_profile); bytes = new byte[color_frame.Stride * color_frame.Height]; color_frame.CopyTo(bytes); color_sensor.AddVideoFrame(bytes, color_frame.Stride, 2, depth_frame.Timestamp, color_frame.TimestampDomain, (int)depth_frame.Number, color_profile); depth_frame.Dispose(); color_frame.Dispose(); frames.Dispose(); var new_frames = sync.WaitForFrames(); if (new_frames.Count == 2) { depth_frame = new_frames.DepthFrame; color_frame = new_frames.ColorFrame; var colorized_depth = colorizer.Colorize(depth_frame); UploadImage(imgDepth, colorized_depth); UploadImage(imgColor, color_frame); depth_frame.Dispose(); colorized_depth.Dispose(); color_frame.Dispose(); } new_frames.Dispose(); } }, token); } catch (Exception ex) { MessageBox.Show(ex.Message); Application.Current.Shutdown(); } InitializeComponent(); }
public CaptureWindow() { InitializeComponent(); try { Action <VideoFrame> updateDepth; Action <VideoFrame> updateColor; // The colorizer processing block will be used to visualize the depth frames. colorizer = new Colorizer(); // Create and config the pipeline to strem color and depth frames. pipeline = new Pipeline(); var cfg = new Config(); using (var ctx = new Context()) { var devices = ctx.QueryDevices(); if ((devices.Count != 1) || (!ExampleAutocalibrateDevice.IsTheDeviceD400Series(devices[0]))) { Console.WriteLine("The tutorial {0} requires a single Realsense D400 device to run.\nFix the setup and rerun", System.Diagnostics.Process.GetCurrentProcess().ProcessName); Environment.Exit(1); } var dev = devices[0]; Console.WriteLine("Using device 0, an {0}", dev.Info[CameraInfo.Name]); Console.WriteLine(" Serial number: {0}", dev.Info[CameraInfo.SerialNumber]); Console.WriteLine(" Firmware version: {0}", dev.Info[CameraInfo.FirmwareVersion]); var sensors = dev.QuerySensors(); var depthProfile = sensors .SelectMany(s => s.StreamProfiles) .Where(sp => sp.Stream == Stream.Depth) .Select(sp => sp.As <VideoStreamProfile>()) .OrderBy(p => p.Framerate) .First(); var colorProfile = sensors .SelectMany(s => s.StreamProfiles) .Where(sp => sp.Stream == Stream.Color) .Select(sp => sp.As <VideoStreamProfile>()) .OrderBy(p => p.Framerate) .First(); cfg.EnableDevice(dev.Info[CameraInfo.SerialNumber]); cfg.EnableStream(Stream.Depth, depthProfile.Width, depthProfile.Height, depthProfile.Format, depthProfile.Framerate); cfg.EnableStream(Stream.Color, colorProfile.Width, colorProfile.Height, colorProfile.Format, colorProfile.Framerate); var pp = pipeline.Start(cfg); SetupWindow(pp, out updateDepth, out updateColor); } // Rendering task var renderingPause = false; var rendering = Task.Factory.StartNew(() => { while (!tokenSource.Token.IsCancellationRequested) { if (renderingPause) { continue; //pause the rendering } // We wait for the next available FrameSet and using it as a releaser object that would track // all newly allocated .NET frames, and ensure deterministic finalization // at the end of scope. using (var frames = pipeline.WaitForFrames()) { var colorFrame = frames.ColorFrame.DisposeWith(frames); var depthFrame = frames.DepthFrame.DisposeWith(frames); // Render the frames. if (depthFrame != null) { // We colorize the depth frame for visualization purposes var colorizedDepth = colorizer.Process <VideoFrame>(depthFrame).DisposeWith(frames); Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth); } if (colorFrame != null) { Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame); } if (depthFrame != null) { Dispatcher.Invoke(new Action(() => { String depth_dev_sn = depthFrame.Sensor.Info[CameraInfo.SerialNumber]; txtTimeStamp.Text = $"{depth_dev_sn} : {depthFrame.Timestamp,-20:0.00}({depthFrame.TimestampDomain})" + $"{Environment.NewLine}To start Auto-Calibration flow, switch focus to the application console and press C"; })); } } } }, tokenSource.Token); // Input to calibration mode task Task.Factory.StartNew(() => { while (!tokenSource.Token.IsCancellationRequested) { if (ConsoleKey.C == ExampleAutocalibrateDevice.ConsoleGetKey(new[] { ConsoleKey.C }, "To start Auto-Calibration flow, switch focus to the application console and press C")) { renderingPause = true; Console.WriteLine($"{Environment.NewLine}Stopping rendering pipeline..."); pipeline.Stop(); new ExampleAutocalibrateDevice().Start(); Console.WriteLine($"{Environment.NewLine}Starting rendering pipeline..."); pipeline.Start(cfg); renderingPause = false; } } }, tokenSource.Token); } catch (Exception ex) { MessageBox.Show(ex.Message); Application.Current.Shutdown(); } }