public CaptureWindow() { //Log.ToFile(LogSeverity.Debug, "1.log"); try { pipeline = new Pipeline(); colorizer = new Colorizer(); var cfg = new Config(); cfg.EnableStream(Stream.Depth, 640, 480, Format.Z16, 30); cfg.EnableStream(Stream.Color, 640, 480, Format.Rgb8, 30); var profile = pipeline.Start(cfg); var software_dev = new SoftwareDevice(); var depth_sensor = software_dev.AddSensor("Depth"); var depth_profile = depth_sensor.AddVideoStream(new VideoStream { type = Stream.Depth, index = 0, uid = 100, width = 640, height = 480, fps = 30, bpp = 2, fmt = Format.Z16, intrinsics = (profile.GetStream(Stream.Depth) as VideoStreamProfile).GetIntrinsics() }); var color_sensor = software_dev.AddSensor("Color"); var color_profile = color_sensor.AddVideoStream(new VideoStream { type = Stream.Color, index = 0, uid = 101, width = 640, height = 480, fps = 30, bpp = 2, fmt = Format.Z16, intrinsics = (profile.GetStream(Stream.Color) as VideoStreamProfile).GetIntrinsics() }); // Note about the Syncer: If actual FPS is significantly different from reported FPS in AddVideoStream // this can confuse the syncer and prevent it from producing synchronized pairs software_dev.SetMatcher(Matchers.Default); var sync = new Syncer(); depth_sensor.Open(depth_profile); color_sensor.Open(color_profile); depth_sensor.Start(f => { sync.SubmitFrame(f); //Debug.WriteLine("D"); }); color_sensor.Start(f => { sync.SubmitFrame(f); //Debug.WriteLine("C"); }); var token = tokenSource.Token; var t = Task.Factory.StartNew(() => { while (!token.IsCancellationRequested) { var frames = pipeline.WaitForFrames(); var depth_frame = frames.DepthFrame; var color_frame = frames.ColorFrame; var bytes = new byte[depth_frame.Stride * depth_frame.Height]; depth_frame.CopyTo(bytes); depth_sensor.AddVideoFrame(bytes, depth_frame.Stride, 2, depth_frame.Timestamp, depth_frame.TimestampDomain, (int)depth_frame.Number, depth_profile); bytes = new byte[color_frame.Stride * color_frame.Height]; color_frame.CopyTo(bytes); color_sensor.AddVideoFrame(bytes, color_frame.Stride, 2, depth_frame.Timestamp, color_frame.TimestampDomain, (int)depth_frame.Number, color_profile); depth_frame.Dispose(); color_frame.Dispose(); frames.Dispose(); var new_frames = sync.WaitForFrames(); if (new_frames.Count == 2) { depth_frame = new_frames.DepthFrame; color_frame = new_frames.ColorFrame; var colorized_depth = colorizer.Colorize(depth_frame); UploadImage(imgDepth, colorized_depth); UploadImage(imgColor, color_frame); depth_frame.Dispose(); colorized_depth.Dispose(); color_frame.Dispose(); } new_frames.Dispose(); } }, token); } catch (Exception ex) { MessageBox.Show(ex.Message); Application.Current.Shutdown(); } InitializeComponent(); }
public CaptureWindow() { InitializeComponent(); try { Action <VideoFrame> updateDepth; Action <VideoFrame> updateColor; pipeline = new Pipeline(); colorizer = new Colorizer(); var cfg = new Config(); cfg.EnableStream(Stream.Depth, 640, 480, Format.Z16, 30); cfg.EnableStream(Stream.Color, 640, 480, Format.Rgb8, 30); var profile = pipeline.Start(cfg); SetupWindow(profile, out updateDepth, out updateColor); // Setup the SW device and sensors var software_dev = new SoftwareDevice(); var depth_sensor = software_dev.AddSensor("Depth"); var depth_profile = depth_sensor.AddVideoStream(new VideoStream { type = Stream.Depth, index = 0, uid = 100, width = 640, height = 480, fps = 30, bpp = 2, fmt = Format.Z16, intrinsics = (profile.GetStream(Stream.Depth) as VideoStreamProfile).GetIntrinsics() }); var color_sensor = software_dev.AddSensor("Color"); var color_profile = color_sensor.AddVideoStream(new VideoStream { type = Stream.Color, index = 0, uid = 101, width = 640, height = 480, fps = 30, bpp = 3, fmt = Format.Rgb8, intrinsics = (profile.GetStream(Stream.Color) as VideoStreamProfile).GetIntrinsics() }); // Note about the Syncer: If actual FPS is significantly different from reported FPS in AddVideoStream // this can confuse the syncer and prevent it from producing synchronized pairs software_dev.SetMatcher(Matchers.Default); var sync = new Syncer(); depth_sensor.Open(depth_profile); color_sensor.Open(color_profile); // Push the SW device frames to the syncer depth_sensor.Start(f => { sync.SubmitFrame(f); }); color_sensor.Start(f => { sync.SubmitFrame(f); }); var token = tokenSource.Token; var t = Task.Factory.StartNew(() => { while (!token.IsCancellationRequested) { // We use the frames that are captured from live camera as the input data for the SW device using (var frames = pipeline.WaitForFrames()) { var depthFrame = frames.DepthFrame.DisposeWith(frames); var colorFrame = frames.ColorFrame.DisposeWith(frames); var depthBytes = new byte[depthFrame.Stride * depthFrame.Height]; depthFrame.CopyTo(depthBytes); depth_sensor.AddVideoFrame(depthBytes, depthFrame.Stride, depthFrame.BitsPerPixel / 8, depthFrame.Timestamp, depthFrame.TimestampDomain, (int)depthFrame.Number, depth_profile); var colorBytes = new byte[colorFrame.Stride * colorFrame.Height]; colorFrame.CopyTo(colorBytes); color_sensor.AddVideoFrame(colorBytes, colorFrame.Stride, colorFrame.BitsPerPixel / 8, colorFrame.Timestamp, colorFrame.TimestampDomain, (int)colorFrame.Number, color_profile); } // Dispaly the frames that come from the SW device after synchronization using (var new_frames = sync.WaitForFrames()) { if (new_frames.Count == 2) { var depthFrame = new_frames.DepthFrame.DisposeWith(new_frames); var colorFrame = new_frames.ColorFrame.DisposeWith(new_frames); VideoFrame colorizedDepth = colorizer.Process(depthFrame).DisposeWith(new_frames) as VideoFrame; // Render the frames. Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth); Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame); } } } }, token); } catch (Exception ex) { MessageBox.Show(ex.Message); Application.Current.Shutdown(); } }