public void Start(FrameQueue queue) { Options[Option.FramesQueueSize].Value = 0; object error; NativeMethods.rs2_start_queue(m_instance, queue.m_instance.Handle, out error); }
/// <summary> /// stops streaming from specified configured device /// </summary> public void Stop() { object error; NativeMethods.rs2_stop(Handle, out error); m_callback = null; m_queue = null; }
public Colorizer() { object error; m_instance = new HandleRef(this, NativeMethods.rs2_create_colorizer(out error)); queue = new FrameQueue(); NativeMethods.rs2_start_processing_queue(m_instance.Handle, queue.m_instance.Handle, out error); }
public Align(Stream align_to) { object error; m_instance = new HandleRef(this, NativeMethods.rs2_create_align(align_to, out error)); queue = new FrameQueue(); NativeMethods.rs2_start_processing_queue(m_instance.Handle, queue.m_instance.Handle, out error); }
public TemporalFilter() { object error; m_instance = new HandleRef(this, NativeMethods.rs2_create_temporal_filter_block(out error)); queue = new FrameQueue(); NativeMethods.rs2_start_processing_queue(m_instance.Handle, queue.m_instance.Handle, out error); }
public PointCloud() { object error; m_instance = new HandleRef(this, NativeMethods.rs2_create_pointcloud(out error)); queue = new FrameQueue(); NativeMethods.rs2_start_processing_queue(m_instance.Handle, queue.m_instance.Handle, out error); }
public byte[] RunUVMapCalibration(FrameQueue left, FrameQueue color, FrameQueue depth, int px_py_only, out float ratio, out float angle) { object error; IntPtr rawDataBuffer = NativeMethods.rs2_run_uv_map_calibration(Handle, left.Handle, color.Handle, depth.Handle, px_py_only, out ratio, out angle, null, out error); return(GetByteArrayFromRawDataObject(rawDataBuffer)); }
public void Start(FrameQueue queue) { object error; NativeMethods.rs2_start_queue(m_instance, queue.m_instance.Handle, out error); m_queue = queue; m_callback = null; }
public byte[] RunFocalLengthCalibration(FrameQueue left, FrameQueue right, float target_width_mm, float target_height_mm, int adjust_both_sides, out float ratio, out float angle) { object error; IntPtr rawDataBuffer = NativeMethods.rs2_run_focal_length_calibration(Handle, left.Handle, right.Handle, target_width_mm, target_height_mm, adjust_both_sides, out ratio, out angle, null, out error); return(GetByteArrayFromRawDataObject(rawDataBuffer)); }
public DisparityTransform(bool transform_to_disparity = true) { object error; byte transform_direction = transform_to_disparity ? (byte)1 : (byte)0; m_instance = new HandleRef(this, NativeMethods.rs2_create_disparity_transform_block(transform_direction, out error)); queue = new FrameQueue(); NativeMethods.rs2_start_processing_queue(m_instance.Handle, queue.m_instance.Handle, out error); }
public float CalculateTargetZ(FrameQueue frame_queue1, FrameQueue frame_queue2, FrameQueue frame_queue3, float target_width_mm, float target_height_mm, ProgressCallback cb) { object error; rs2_update_progress_callback cb2 = (float progress, IntPtr u) => { cb((IntPtr)progress); }; return(NativeMethods.rs2_calculate_target_z(Handle, frame_queue1.Handle, frame_queue2.Handle, frame_queue3.Handle, target_width_mm, target_height_mm, cb2, out error)); }
public byte[] RunUVMapCalibration(FrameQueue left, FrameQueue color, FrameQueue depth, int px_py_only, out float ratio, out float angle, ProgressCallback cb) { object error; rs2_update_progress_callback cb2 = (float progress, IntPtr u) => { cb((IntPtr)progress); }; IntPtr rawDataBuffer = NativeMethods.rs2_run_uv_map_calibration(Handle, left.Handle, color.Handle, depth.Handle, px_py_only, out ratio, out angle, cb2, out error); return(GetByteArrayFromRawDataObject(rawDataBuffer)); }
public byte[] RunFocalLengthCalibration(FrameQueue left, FrameQueue right, float target_width_mm, float target_height_mm, int adjust_both_sides, out float ratio, out float angle, ProgressCallback cb) { object error; rs2_update_progress_callback cb2 = (float progress, IntPtr u) => { cb((IntPtr)progress); }; IntPtr rawDataBuffer = NativeMethods.rs2_run_focal_length_calibration(Handle, left.Handle, right.Handle, target_width_mm, target_height_mm, adjust_both_sides, out ratio, out angle, cb2, out error); return(GetByteArrayFromRawDataObject(rawDataBuffer)); }
/// <summary>start streaming from specified configured sensor</summary> /// <param name="cb">delegate to register as per-frame callback</param> // TODO: overload with state object and Action<Frame, object> callback to avoid allocations public void Start(FrameCallback cb) { object error; frame_callback cb2 = (IntPtr f, IntPtr u) => { using (var frame = Frame.Create(f)) { cb(frame); } }; m_callback = cb2; m_queue = null; NativeMethods.rs2_start(Handle, cb2, IntPtr.Zero, out error); }
static void Main(string[] args) { FrameQueue q = new FrameQueue(); using (var ctx = new Context()) { var devices = ctx.QueryDevices(); Console.WriteLine("There are " + devices.Count + " connected RealSense devices."); if (devices.Count == 0) { return; } var dev = devices[0]; Console.WriteLine("\nUsing device 0, an {0}", dev.Info[CameraInfo.Name]); Console.WriteLine(" Serial number: {0}", dev.Info[CameraInfo.SerialNumber]); Console.WriteLine(" Firmware version: {0}", dev.Info[CameraInfo.FirmwareVersion]); var depthSensor = dev.Sensors[0]; var sp = depthSensor.VideoStreamProfiles .Where(p => p.Stream == Stream.Depth) .OrderByDescending(p => p.Framerate) .Where(p => p.Width == 640 && p.Height == 480) .First(); depthSensor.Open(sp); depthSensor.Start(q); int one_meter = (int)(1f / depthSensor.DepthScale); var run = true; Console.CancelKeyPress += (s, e) => { e.Cancel = true; run = false; }; ushort[] depth = new ushort[sp.Width * sp.Height]; while (run) { using (var f = q.WaitForFrame() as VideoFrame) { f.CopyTo(depth); } var buffer = new char[(640 / 10 + 1) * (480 / 20)]; var coverage = new int[64]; int b = 0; for (int y = 0; y < 480; ++y) { for (int x = 0; x < 640; ++x) { ushort d = depth[x + y * 640]; if (d > 0 && d < one_meter) { ++coverage[x / 10]; } } if (y % 20 == 19) { for (int i = 0; i < coverage.Length; i++) { int c = coverage[i]; buffer[b++] = " .:nhBXWW"[c / 25]; coverage[i] = 0; } buffer[b++] = '\n'; } } Console.SetCursorPosition(0, 0); Console.WriteLine(); Console.Write(buffer); } depthSensor.Stop(); depthSensor.Close(); } }
/// <summary> /// Start the processing block, delivering frames to external queue /// </summary> /// <param name="queue"></param> public void Start(FrameQueue queue) { object error; NativeMethods.rs2_start_processing_queue(m_instance.Handle, queue.m_instance.Handle, out error); }
public float CalculateTargetZ(FrameQueue frame_queue1, FrameQueue frame_queue2, FrameQueue frame_queue3, float target_width_mm, float target_height_mm) { object error; return(NativeMethods.rs2_calculate_target_z(Handle, frame_queue1.Handle, frame_queue2.Handle, frame_queue3.Handle, target_width_mm, target_height_mm, null, out error)); }