public MainWindow() { InitializeComponent(); pipeline = new RS.Pipeline(); colorizer = new RS.Colorizer(); Action <RS.VideoFrame> mainAction; Action <RS.VideoFrame> depthAction; var config = new RS.Config(); config.EnableStream(RS.Stream.Color, 640, 480, RS.Format.Rgb8); config.EnableStream(RS.Stream.Depth, 640, 480); timer = new System.Timers.Timer(); timer.Elapsed += Timer_Elapsed; timer.Interval = 1000; timer.Enabled = false; timerCountdown = new System.Timers.Timer(); timerCountdown.Elapsed += TimerCountdown_Elapsed; timerCountdown.Interval = 1000; timerCountdown.Enabled = false; try { var pp = pipeline.Start(config); SetupWindow(pp, out mainAction, out depthAction); Task.Factory.StartNew(() => { while (!tokenSource.Token.IsCancellationRequested) { using (var frames = pipeline.WaitForFrames()) { var mainFrame = frames.ColorFrame.DisposeWith(frames); var depthFrame = frames.DepthFrame.DisposeWith(frames); var colorizedDepth = colorizer.Process <VideoFrame>(depthFrame).DisposeWith(frames); Dispatcher.Invoke(DispatcherPriority.Render, mainAction, mainFrame); Dispatcher.Invoke(DispatcherPriority.Render, depthAction, colorizedDepth); } if (isRecording) { imageCount++; Dispatcher.Invoke(new SaveImagesDelegate(SaveImage), new object[] { "image_", imageCount }); } else { imageCount = 0; } } }, tokenSource.Token); } catch (Exception ex) { MessageBox.Show(ex.Message); } }
public ProcessingWindow() { InitializeComponent(); try { var cfg = new Config(); using (var ctx = new Context()) { var devices = ctx.QueryDevices(); var dev = devices[0]; Console.WriteLine("\nUsing device 0, an {0}", dev.Info[CameraInfo.Name]); Console.WriteLine(" Serial number: {0}", dev.Info[CameraInfo.SerialNumber]); Console.WriteLine(" Firmware version: {0}", dev.Info[CameraInfo.FirmwareVersion]); var sensors = dev.QuerySensors(); var depthSensor = sensors[0]; var colorSensor = sensors[1]; var depthProfile = depthSensor.StreamProfiles .Where(p => p.Stream == Stream.Depth) .OrderBy(p => p.Framerate) .Select(p => p.As <VideoStreamProfile>()).First(); var colorProfile = colorSensor.StreamProfiles .Where(p => p.Stream == Stream.Color) .OrderBy(p => p.Framerate) .Select(p => p.As <VideoStreamProfile>()).First(); cfg.EnableStream(Stream.Depth, depthProfile.Width, depthProfile.Height, depthProfile.Format, depthProfile.Framerate); cfg.EnableStream(Stream.Color, colorProfile.Width, colorProfile.Height, colorProfile.Format, colorProfile.Framerate); } var pp = pipeline.Start(cfg); // Get the recommended processing blocks for the depth sensor var sensor = pp.Device.QuerySensors().First(s => s.Is(Extension.DepthSensor)); var blocks = sensor.ProcessingBlocks.ToList(); // Allocate bitmaps for rendring. // Since the sample aligns the depth frames to the color frames, both of the images will have the color resolution using (var p = pp.GetStream(Stream.Color).As <VideoStreamProfile>()) { imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null); imgDepth.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null); } var updateColor = UpdateImage(imgColor); var updateDepth = UpdateImage(imgDepth); // Create custom processing block // For demonstration purposes it will: // a. Get a frameset // b. Run post-processing on the depth frame // c. Combine the result back into a frameset // Processing blocks are inherently thread-safe and play well with // other API primitives such as frame-queues, // and can be used to encapsulate advanced operations. // All invocations are, however, synchronious so the high-level threading model // is up to the developer block = new CustomProcessingBlock((f, src) => { // We create a FrameReleaser object that would track // all newly allocated .NET frames, and ensure deterministic finalization // at the end of scope. using (var releaser = new FramesReleaser()) { foreach (ProcessingBlock p in blocks) { f = p.Process(f).DisposeWith(releaser); } f = f.ApplyFilter(align).DisposeWith(releaser); f = f.ApplyFilter(colorizer).DisposeWith(releaser); var frames = f.As <FrameSet>().DisposeWith(releaser); var colorFrame = frames[Stream.Color, Format.Rgb8].DisposeWith(releaser); var colorizedDepth = frames[Stream.Depth, Format.Rgb8].DisposeWith(releaser); // Combine the frames into a single result var res = src.AllocateCompositeFrame(colorizedDepth, colorFrame).DisposeWith(releaser); // Send it to the next processing stage src.FrameReady(res); } }); // Register to results of processing via a callback: block.Start(f => { using (var frames = f.As <FrameSet>()) { var colorFrame = frames.ColorFrame.DisposeWith(frames); var colorizedDepth = frames.First <VideoFrame>(Stream.Depth, Format.Rgb8).DisposeWith(frames); Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth); Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame); } }); var token = tokenSource.Token; var t = Task.Factory.StartNew(() => { while (!token.IsCancellationRequested) { using (var frames = pipeline.WaitForFrames()) { // Invoke custom processing block block.Process(frames); } } }, token); } catch (Exception ex) { MessageBox.Show(ex.Message); Application.Current.Shutdown(); } InitializeComponent(); }