private Pipeline StartPipeline(VideoStreamProfile depthProfile, Device dev) { var cfg = new Config(); cfg.EnableDevice(dev.Info[CameraInfo.SerialNumber]); cfg.EnableStream(Stream.Depth, depthProfile.Width, depthProfile.Height, depthProfile.Format, depthProfile.Framerate); var pipeline = new Pipeline(); var pp = pipeline.Start(cfg); return(pipeline); }
private string GetRow4FileReport( int depthProfileSet, VideoStreamProfile profile, int calibrationSet, CalibrationSpeed calibrationSpeed, CalibrationScanParameter calibrationScanParameter, CalibrationDataSampling calibrationDataSampling, int timeout, TimeSpan calibrationTakes, string deviceHealth, string deviceHealthDescription, string comments = "" ) { var row = new List <string>(); row.Add($"{depthProfileSet}"); row.Add($"{profile.Width}"); row.Add($"{profile.Height}"); row.Add($"{profile.Framerate}"); row.Add($"{profile.Format}"); row.Add($"{calibrationSet}"); row.Add($"{(int)calibrationSpeed}"); row.Add($"{(int)calibrationScanParameter}"); row.Add($"{(int)calibrationDataSampling}"); row.Add($"{calibrationSpeed}"); row.Add($"{calibrationScanParameter}"); row.Add($"{calibrationDataSampling}"); //row.Add($@"{timeout:mm\:ss\.fff}"); row.Add($@"{calibrationTakes:mm\:ss\.fff}"); row.Add($"{deviceHealth}"); row.Add($"{deviceHealthDescription}"); row.Add($"{comments}"); return(string.Join(",", row)); }
public void AddVideoFrame(byte[] pixels, int stride, int bpp, double timestamp, TimestampDomain domain, int frameNumber, VideoStreamProfile profile) { IntPtr hglobal = Marshal.AllocHGlobal(profile.Height * stride); Marshal.Copy(pixels, 0, hglobal, profile.Height * stride); AddVideoFrame(new SoftwareVideoFrame { pixels = hglobal, deleter = (p) => { Marshal.FreeHGlobal(p); }, stride = stride, bpp = bpp, timestamp = timestamp, domain = domain, frame_number = frameNumber, profile = profile.m_instance.Handle }); }
public CaptureWindow() { InitializeComponent(); try { Action <VideoFrame> updateDepth; Action <VideoFrame> updateColor; pipeline = new Pipeline(); colorizer = new Colorizer(); var depthWidth = 640; var depthHeight = 480; var depthFrames = 30; var depthFormat = Format.Z16; var colorWidth = 640; var colorHeight = 480; var colorFrames = 30; using (var ctx = new Context()) { var devices = ctx.QueryDevices(); var dev = devices[0]; Console.WriteLine("\nUsing device 0, an {0}", dev.Info[CameraInfo.Name]); Console.WriteLine(" Serial number: {0}", dev.Info[CameraInfo.SerialNumber]); Console.WriteLine(" Firmware version: {0}", dev.Info[CameraInfo.FirmwareVersion]); var sensors = dev.QuerySensors(); var depthSensor = sensors[0]; var colorSensor = sensors[1]; var depthProfiles = depthSensor.StreamProfiles .Where(p => p.Stream == Stream.Depth) .OrderBy(p => p.Framerate) .Select(p => p.As <VideoStreamProfile>()); VideoStreamProfile colorProfile = null; // select color profile to have frameset equal or closer to depth frameset to syncer work smooth foreach (var depthProfile in depthProfiles) { depthWidth = depthProfile.Width; depthHeight = depthProfile.Height; depthFrames = depthProfile.Framerate; depthFormat = depthProfile.Format; colorProfile = colorSensor.StreamProfiles .Where(p => p.Stream == Stream.Color) .OrderByDescending(p => p.Framerate) .Select(p => p.As <VideoStreamProfile>()) .FirstOrDefault(p => p.Framerate == depthFrames); if (colorProfile != null) { colorWidth = colorProfile.Width; colorHeight = colorProfile.Height; colorFrames = colorProfile.Framerate; break; } } if (colorProfile == null) { // if no profile with the same framerate found, takes the first colorProfile = colorSensor.StreamProfiles .Where(p => p.Stream == Stream.Color) .OrderByDescending(p => p.Framerate) .Select(p => p.As <VideoStreamProfile>()).FirstOrDefault(); if (colorProfile == null) { throw new InvalidOperationException($"Error while finding appropriate depth and color profiles"); } colorWidth = colorProfile.Width; colorHeight = colorProfile.Height; colorFrames = colorProfile.Framerate; } } var cfg = new Config(); cfg.EnableStream(Stream.Depth, depthWidth, depthHeight, depthFormat, depthFrames); cfg.EnableStream(Stream.Color, colorWidth, colorHeight, Format.Rgb8, colorFrames); var profile = pipeline.Start(cfg); SetupWindow(profile, out updateDepth, out updateColor); // Setup the SW device and sensors var software_dev = new SoftwareDevice(); var depth_sensor = software_dev.AddSensor("Depth"); var depth_profile = depth_sensor.AddVideoStream(new SoftwareVideoStream { type = Stream.Depth, index = 0, uid = 100, width = depthWidth, height = depthHeight, fps = depthFrames, bpp = 2, format = depthFormat, intrinsics = profile.GetStream(Stream.Depth).As <VideoStreamProfile>().GetIntrinsics() }); depth_sensor.AddReadOnlyOption(Option.DepthUnits, 1.0f / 5000); var color_sensor = software_dev.AddSensor("Color"); var color_profile = color_sensor.AddVideoStream(new SoftwareVideoStream { type = Stream.Color, index = 0, uid = 101, width = colorWidth, height = colorHeight, fps = colorFrames, bpp = 3, format = Format.Rgb8, intrinsics = profile.GetStream(Stream.Color).As <VideoStreamProfile>().GetIntrinsics() }); // Note about the Syncer: If actual FPS is significantly different from reported FPS in AddVideoStream // this can confuse the syncer and prevent it from producing synchronized pairs software_dev.SetMatcher(Matchers.Default); var sync = new Syncer(); // The raw depth->metric units translation scale is required for Colorizer to work var realDepthSensor = profile.Device.QuerySensors().First(s => s.Is(Extension.DepthSensor)); depth_sensor.AddReadOnlyOption(Option.DepthUnits, realDepthSensor.DepthScale); depth_sensor.Open(depth_profile); color_sensor.Open(color_profile); // Push the SW device frames to the syncer depth_sensor.Start(sync.SubmitFrame); color_sensor.Start(sync.SubmitFrame); var token = tokenSource.Token; ushort[] depthData = null; byte[] colorData = null; var t = Task.Factory.StartNew(() => { while (!token.IsCancellationRequested) { // We use the frames that are captured from live camera as the input data for the SW device using (var frames = pipeline.WaitForFrames()) { var depthFrame = frames.DepthFrame.DisposeWith(frames); var colorFrame = frames.ColorFrame.DisposeWith(frames); depthData = depthData ?? new ushort[depthFrame.Width * depthFrame.Height]; depthFrame.CopyTo(depthData); depth_sensor.AddVideoFrame(depthData, depthFrame.Stride, depthFrame.BitsPerPixel / 8, depthFrame.Timestamp, depthFrame.TimestampDomain, (int)depthFrame.Number, depth_profile); colorData = colorData ?? new byte[colorFrame.Stride * colorFrame.Height]; colorFrame.CopyTo(colorData); color_sensor.AddVideoFrame(colorData, colorFrame.Stride, colorFrame.BitsPerPixel / 8, colorFrame.Timestamp, colorFrame.TimestampDomain, (int)colorFrame.Number, color_profile); } // Dispaly the frames that come from the SW device after synchronization using (var new_frames = sync.WaitForFrames()) { if (new_frames.Count == 2) { var colorFrame = new_frames.ColorFrame.DisposeWith(new_frames); var depthFrame = new_frames.DepthFrame.DisposeWith(new_frames); var colorizedDepth = colorizer.Process <VideoFrame>(depthFrame).DisposeWith(new_frames); // Render the frames. Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth); Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame); } } } }, token); } catch (Exception ex) { MessageBox.Show(ex.Message); Application.Current.Shutdown(); } }
public void Calibrate(Device dev = null) { try { if (dev == null) { Console.WriteLine($"{Environment.NewLine}Getting context..."); dev = new Context().QueryDevices().First(); } if (!IsTheDeviceD400Series(dev)) { return; } Console.WriteLine($"{Environment.NewLine}Calibration device {dev.Info[CameraInfo.Name]}" + $"{Environment.NewLine}\tSerial number: {dev.Info[CameraInfo.SerialNumber]}" + $"{Environment.NewLine}\tFirmware version: {dev.Info[CameraInfo.FirmwareVersion]}"); VideoStreamProfile depthProfile = dev.QuerySensors() .SelectMany(s => s.StreamProfiles) .Where(sp => sp.Stream == Stream.Depth) .Select(sp => sp.As <VideoStreamProfile>()) .Where(p => p.Width == 256 && p.Height == 144 && p.Framerate == 90 && p.Format == Format.Z16 ).Last(); Console.WriteLine($"{Environment.NewLine}Starting pipeline for calibration mode"); using (var pipeline = StartPipeline(depthProfile, dev)) { var aCalibratedDevice = AutoCalibratedDevice.FromDevice(dev); // 1. Calibration table before running on-chip calibration. Console.WriteLine($"{Environment.NewLine}1. Calibration table before running on-chip calibration."); var calTableBefore = aCalibratedDevice.CalibrationTable; Console.WriteLine("Step 1 : done"); // 2. Runs the on-chip self-calibration routine that returns pointer to new calibration table. //calibratoin config var calibrationSpeed = CalibrationSpeed.Slow; var calibrationScanParameter = CalibrationScanParameter.Intrinsic; var calibrationDataSampling = CalibrationDataSampling.WindowsAndLinux; string calibrationConfig = GetCalibrationConfig(calibrationSpeed, calibrationScanParameter, calibrationDataSampling); var timeout = _timeoutForCalibrationSpeed[calibrationSpeed]; float health = 100; var succeedOnChipCalibration = false; var abortRequested = false; byte[] calTableAfter = null; while (!succeedOnChipCalibration && !abortRequested) { Console.WriteLine($"{Environment.NewLine}2. Runs the on-chip self-calibration routine that returns pointer to new calibration table."); Console.WriteLine($"\t Format : {depthProfile.Format}"); Console.WriteLine($"\t Framerate : {depthProfile.Framerate}"); Console.WriteLine($"\t Width : {depthProfile.Width}"); Console.WriteLine($"\t Height : {depthProfile.Height}"); Console.WriteLine($"\t Calibration Speed : {calibrationSpeed}"); Console.WriteLine($"\t Calibration Scan Parameter: {calibrationScanParameter}"); Console.WriteLine($"\t Calibration Data Sampling : {calibrationDataSampling}"); Console.WriteLine($"\t Calibration Timeout : {TimeSpan.FromMilliseconds(timeout).ToString(@"mm\:ss\.fff")} (min:sec.millisec)"); var sw = new Stopwatch(); var thisCalibrationfault = false; try { sw.Start(); ProgressCallback pc = (x) => { Console.WriteLine("Progress: {0} percents", x); }; // The following line performs the same calibration flow but does not report progress" //calTableAfter = aCalibratedDevice.RunOnChipCalibration(calibrationConfig, out health, timeout); calTableAfter = aCalibratedDevice.RunOnChipCalibration(calibrationConfig, out health, pc, timeout); sw.Stop(); } catch (Exception ex) { sw.Stop(); thisCalibrationfault = true; Console.WriteLine($"\n\t Error during calibration: {ex.Message.Replace("\n", "\t")}"); Console.WriteLine($"\t Please try to change distance to target or light conditions{Environment.NewLine}"); if (ConsoleKey.N == ConsoleGetKey(new[] { ConsoleKey.Y, ConsoleKey.N }, @"Let's try calibrate one more time? (Y\N)" )) { Console.WriteLine(""); Console.WriteLine($"Calibration failed"); Console.WriteLine($"Stopping calibration pipeline..."); pipeline.Stop(); return; } } if (!thisCalibrationfault) { Console.WriteLine($"\n\t Time spend: {sw.Elapsed.ToString(@"mm\:ss\.fff")} (min:sec.millisec)"); Console.WriteLine($"\t Device health: {health} ({_deviceHealthDescription[GetDeviceHealth(health)]})"); var res = ConsoleGetKey(new[] { ConsoleKey.Y, ConsoleKey.N, ConsoleKey.A }, @"Accept calibration ? Yes/No/Abort"); Console.WriteLine("User's selection = {0}", res); if (res == ConsoleKey.A) { abortRequested = true; } else { succeedOnChipCalibration = (res == ConsoleKey.Y); } } } Console.WriteLine("Step 2 : done"); // 3. Toggle between calibration tables to assess which is better. This is optional. Console.WriteLine($"{Environment.NewLine}3. Toggle between calibration tables to assess which is better."); aCalibratedDevice.CalibrationTable = calTableAfter; Console.WriteLine("Step 3 : done"); // 4. burns the new calibration to FW persistently. Console.WriteLine(""); if (ConsoleKey.Y == ConsoleGetKey(new[] { ConsoleKey.Y, ConsoleKey.N }, @"4. Burns the new calibration to FW persistently. (Y\N)")) { aCalibratedDevice.WriteCalibration(); Console.WriteLine("Step 4 : done"); } else { Console.WriteLine("Step 4 : skipped"); } Console.WriteLine("Calibration complete"); Console.WriteLine($"{Environment.NewLine}Stopping calibration pipeline..."); pipeline.Stop(); } //} } catch (Exception ex) { Console.WriteLine($"{Environment.NewLine} Error during calibration:{Environment.NewLine} {ex.Message}"); } }
public void AddVideoFrame(byte[] pixels, int stride, int bpp, double timestamp, TimestampDomain domain, int frameNumber, VideoStreamProfile profile) { object error; IntPtr hglobal = Marshal.AllocHGlobal(profile.Height * stride); var del = new frame_deleter(p => { Marshal.FreeHGlobal(p); }); Marshal.Copy(pixels, 0, hglobal, profile.Height * stride); var s = new NativeMethods.SoftwareVideoFrame { pixels = hglobal, deleter = del, stride = stride, bpp = bpp, timestamp = timestamp, domain = domain, frame_number = frameNumber, profile = profile.m_instance.Handle }; NativeMethods.rs2_software_sensor_on_video_frame(m_instance, s, out error); }
public void AddVideoFrame <T>(T[] pixels, int stride, int bpp, double timestamp, TimestampDomain domain, int frameNumber, VideoStreamProfile profile) { //TODO: avoid copy by adding void* user_data to native methods, so we can pass GCHandle.ToIntPtr() and free in deleter IntPtr hglobal = Marshal.AllocHGlobal(profile.Height * stride); var handle = GCHandle.Alloc(pixels, GCHandleType.Pinned); try { NativeMethods.memcpy(hglobal, handle.AddrOfPinnedObject(), profile.Height * stride); } finally { handle.Free(); } AddVideoFrame(new SoftwareVideoFrame { pixels = hglobal, deleter = (p) => { Marshal.FreeHGlobal(p); }, stride = stride, bpp = bpp, timestamp = timestamp, domain = domain, frame_number = frameNumber, profile = profile.m_instance.Handle }); }