Пример #1
0
        public CaptureWindow()
        {
            InitializeComponent();

            try
            {
                Action <VideoFrame> updateDepth;
                Action <VideoFrame> updateColor;

                pipeline  = new Pipeline();
                colorizer = new Colorizer();

                var cfg = new Config();
                cfg.EnableStream(Stream.Depth, 640, 480, Format.Z16, 30);
                cfg.EnableStream(Stream.Color, 640, 480, Format.Rgb8, 30);

                var profile = pipeline.Start(cfg);

                SetupWindow(profile, out updateDepth, out updateColor);

                // Setup the SW device and sensors
                var software_dev  = new SoftwareDevice();
                var depth_sensor  = software_dev.AddSensor("Depth");
                var depth_profile = depth_sensor.AddVideoStream(new SoftwareVideoStream
                {
                    type       = Stream.Depth,
                    index      = 0,
                    uid        = 100,
                    width      = 640,
                    height     = 480,
                    fps        = 30,
                    bpp        = 2,
                    format     = Format.Z16,
                    intrinsics = profile.GetStream(Stream.Depth).As <VideoStreamProfile>().GetIntrinsics()
                });
                var color_sensor  = software_dev.AddSensor("Color");
                var color_profile = color_sensor.AddVideoStream(new SoftwareVideoStream
                {
                    type       = Stream.Color,
                    index      = 0,
                    uid        = 101,
                    width      = 640,
                    height     = 480,
                    fps        = 30,
                    bpp        = 3,
                    format     = Format.Rgb8,
                    intrinsics = profile.GetStream(Stream.Color).As <VideoStreamProfile>().GetIntrinsics()
                });

                // Note about the Syncer: If actual FPS is significantly different from reported FPS in AddVideoStream
                // this can confuse the syncer and prevent it from producing synchronized pairs
                software_dev.SetMatcher(Matchers.Default);

                var sync = new Syncer();

                depth_sensor.Open(depth_profile);
                color_sensor.Open(color_profile);

                // Push the SW device frames to the syncer
                depth_sensor.Start(sync.SubmitFrame);
                color_sensor.Start(sync.SubmitFrame);

                var token = tokenSource.Token;

                ushort[] depthData = null;
                byte[]   colorData = null;

                var t = Task.Factory.StartNew(() =>
                {
                    while (!token.IsCancellationRequested)
                    {
                        // We use the frames that are captured from live camera as the input data for the SW device
                        using (var frames = pipeline.WaitForFrames())
                        {
                            var depthFrame = frames.DepthFrame.DisposeWith(frames);
                            var colorFrame = frames.ColorFrame.DisposeWith(frames);

                            depthData = depthData ?? new ushort[depthFrame.Width * depthFrame.Height];
                            depthFrame.CopyTo(depthData);
                            depth_sensor.AddVideoFrame(depthData, depthFrame.Stride, depthFrame.BitsPerPixel / 8, depthFrame.Timestamp,
                                                       depthFrame.TimestampDomain, (int)depthFrame.Number, depth_profile);

                            colorData = colorData ?? new byte[colorFrame.Stride * colorFrame.Height];
                            colorFrame.CopyTo(colorData);
                            color_sensor.AddVideoFrame(colorData, colorFrame.Stride, colorFrame.BitsPerPixel / 8, colorFrame.Timestamp,
                                                       colorFrame.TimestampDomain, (int)colorFrame.Number, color_profile);
                        }

                        // Dispaly the frames that come from the SW device after synchronization
                        using (var new_frames = sync.WaitForFrames())
                        {
                            if (new_frames.Count == 2)
                            {
                                var depthFrame = new_frames.DepthFrame.DisposeWith(new_frames);
                                var colorFrame = new_frames.ColorFrame.DisposeWith(new_frames);

                                VideoFrame colorizedDepth = colorizer.Process(depthFrame).As <VideoFrame>().DisposeWith(new_frames);

                                // Render the frames.
                                Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth);
                                Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame);
                            }
                        }
                    }
                }, token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }
        }
Пример #2
0
        public CaptureWindow()
        {
            InitializeComponent();

            try
            {
                Action <VideoFrame> updateDepth;
                Action <VideoFrame> updateColor;

                pipeline  = new Pipeline();
                colorizer = new Colorizer();

                var depthWidth  = 640;
                var depthHeight = 480;
                var depthFrames = 30;
                var depthFormat = Format.Z16;

                var colorWidth  = 640;
                var colorHeight = 480;
                var colorFrames = 30;
                using (var ctx = new Context())
                {
                    var devices = ctx.QueryDevices();
                    var dev     = devices[0];

                    Console.WriteLine("\nUsing device 0, an {0}", dev.Info[CameraInfo.Name]);
                    Console.WriteLine("    Serial number: {0}", dev.Info[CameraInfo.SerialNumber]);
                    Console.WriteLine("    Firmware version: {0}", dev.Info[CameraInfo.FirmwareVersion]);

                    var sensors     = dev.QuerySensors();
                    var depthSensor = sensors[0];
                    var colorSensor = sensors[1];

                    var depthProfiles = depthSensor.StreamProfiles
                                        .Where(p => p.Stream == Stream.Depth)
                                        .OrderBy(p => p.Framerate)
                                        .Select(p => p.As <VideoStreamProfile>());
                    VideoStreamProfile colorProfile = null;

                    // select color profile to have frameset equal or closer to depth frameset to syncer work smooth
                    foreach (var depthProfile in depthProfiles)
                    {
                        depthWidth   = depthProfile.Width;
                        depthHeight  = depthProfile.Height;
                        depthFrames  = depthProfile.Framerate;
                        depthFormat  = depthProfile.Format;
                        colorProfile = colorSensor.StreamProfiles
                                       .Where(p => p.Stream == Stream.Color)
                                       .OrderByDescending(p => p.Framerate)
                                       .Select(p => p.As <VideoStreamProfile>())
                                       .FirstOrDefault(p => p.Framerate == depthFrames);
                        if (colorProfile != null)
                        {
                            colorWidth  = colorProfile.Width;
                            colorHeight = colorProfile.Height;
                            colorFrames = colorProfile.Framerate;
                            break;
                        }
                    }
                    if (colorProfile == null)
                    {
                        // if no profile with the same framerate found, takes the first
                        colorProfile = colorSensor.StreamProfiles
                                       .Where(p => p.Stream == Stream.Color)
                                       .OrderByDescending(p => p.Framerate)
                                       .Select(p => p.As <VideoStreamProfile>()).FirstOrDefault();
                        if (colorProfile == null)
                        {
                            throw new InvalidOperationException($"Error while finding appropriate depth and color profiles");
                        }
                        colorWidth  = colorProfile.Width;
                        colorHeight = colorProfile.Height;
                        colorFrames = colorProfile.Framerate;
                    }
                }

                var cfg = new Config();
                cfg.EnableStream(Stream.Depth, depthWidth, depthHeight, depthFormat, depthFrames);
                cfg.EnableStream(Stream.Color, colorWidth, colorHeight, Format.Rgb8, colorFrames);

                var profile = pipeline.Start(cfg);

                SetupWindow(profile, out updateDepth, out updateColor);

                // Setup the SW device and sensors
                var software_dev  = new SoftwareDevice();
                var depth_sensor  = software_dev.AddSensor("Depth");
                var depth_profile = depth_sensor.AddVideoStream(new SoftwareVideoStream
                {
                    type       = Stream.Depth,
                    index      = 0,
                    uid        = 100,
                    width      = depthWidth,
                    height     = depthHeight,
                    fps        = depthFrames,
                    bpp        = 2,
                    format     = depthFormat,
                    intrinsics = profile.GetStream(Stream.Depth).As <VideoStreamProfile>().GetIntrinsics()
                });
                depth_sensor.AddReadOnlyOption(Option.DepthUnits, 1.0f / 5000);

                var color_sensor  = software_dev.AddSensor("Color");
                var color_profile = color_sensor.AddVideoStream(new SoftwareVideoStream
                {
                    type       = Stream.Color,
                    index      = 0,
                    uid        = 101,
                    width      = colorWidth,
                    height     = colorHeight,
                    fps        = colorFrames,
                    bpp        = 3,
                    format     = Format.Rgb8,
                    intrinsics = profile.GetStream(Stream.Color).As <VideoStreamProfile>().GetIntrinsics()
                });

                // Note about the Syncer: If actual FPS is significantly different from reported FPS in AddVideoStream
                // this can confuse the syncer and prevent it from producing synchronized pairs
                software_dev.SetMatcher(Matchers.Default);

                var sync = new Syncer();

                // The raw depth->metric units translation scale is required for Colorizer to work
                var realDepthSensor = profile.Device.QuerySensors().First(s => s.Is(Extension.DepthSensor));
                depth_sensor.AddReadOnlyOption(Option.DepthUnits, realDepthSensor.DepthScale);

                depth_sensor.Open(depth_profile);
                color_sensor.Open(color_profile);

                // Push the SW device frames to the syncer
                depth_sensor.Start(sync.SubmitFrame);
                color_sensor.Start(sync.SubmitFrame);

                var token = tokenSource.Token;

                ushort[] depthData = null;
                byte[]   colorData = null;

                var t = Task.Factory.StartNew(() =>
                {
                    while (!token.IsCancellationRequested)
                    {
                        // We use the frames that are captured from live camera as the input data for the SW device
                        using (var frames = pipeline.WaitForFrames())
                        {
                            var depthFrame = frames.DepthFrame.DisposeWith(frames);
                            var colorFrame = frames.ColorFrame.DisposeWith(frames);

                            depthData = depthData ?? new ushort[depthFrame.Width * depthFrame.Height];
                            depthFrame.CopyTo(depthData);
                            depth_sensor.AddVideoFrame(depthData, depthFrame.Stride, depthFrame.BitsPerPixel / 8, depthFrame.Timestamp,
                                                       depthFrame.TimestampDomain, (int)depthFrame.Number, depth_profile);

                            colorData = colorData ?? new byte[colorFrame.Stride * colorFrame.Height];
                            colorFrame.CopyTo(colorData);
                            color_sensor.AddVideoFrame(colorData, colorFrame.Stride, colorFrame.BitsPerPixel / 8, colorFrame.Timestamp,
                                                       colorFrame.TimestampDomain, (int)colorFrame.Number, color_profile);
                        }

                        // Dispaly the frames that come from the SW device after synchronization
                        using (var new_frames = sync.WaitForFrames())
                        {
                            if (new_frames.Count == 2)
                            {
                                var colorFrame = new_frames.ColorFrame.DisposeWith(new_frames);
                                var depthFrame = new_frames.DepthFrame.DisposeWith(new_frames);

                                var colorizedDepth = colorizer.Process <VideoFrame>(depthFrame).DisposeWith(new_frames);
                                // Render the frames.
                                Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth);
                                Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame);
                            }
                        }
                    }
                }, token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }
        }
Пример #3
0
        public CaptureWindow()
        {
            //Log.ToFile(LogSeverity.Debug, "1.log");

            try
            {
                pipeline  = new Pipeline();
                colorizer = new Colorizer();

                var cfg = new Config();
                cfg.EnableStream(Stream.Depth, 640, 480, Format.Z16, 30);
                cfg.EnableStream(Stream.Color, 640, 480, Format.Rgb8, 30);

                var profile = pipeline.Start(cfg);

                var software_dev  = new SoftwareDevice();
                var depth_sensor  = software_dev.AddSensor("Depth");
                var depth_profile = depth_sensor.AddVideoStream(new VideoStream
                {
                    type       = Stream.Depth,
                    index      = 0,
                    uid        = 100,
                    width      = 640,
                    height     = 480,
                    fps        = 30,
                    bpp        = 2,
                    fmt        = Format.Z16,
                    intrinsics = (profile.GetStream(Stream.Depth) as VideoStreamProfile).GetIntrinsics()
                });
                var color_sensor  = software_dev.AddSensor("Color");
                var color_profile = color_sensor.AddVideoStream(new VideoStream
                {
                    type       = Stream.Color,
                    index      = 0,
                    uid        = 101,
                    width      = 640,
                    height     = 480,
                    fps        = 30,
                    bpp        = 2,
                    fmt        = Format.Z16,
                    intrinsics = (profile.GetStream(Stream.Color) as VideoStreamProfile).GetIntrinsics()
                });
                // Note about the Syncer: If actual FPS is significantly different from reported FPS in AddVideoStream
                // this can confuse the syncer and prevent it from producing synchronized pairs
                software_dev.SetMatcher(Matchers.Default);

                var sync = new Syncer();

                depth_sensor.Open(depth_profile);
                color_sensor.Open(color_profile);

                depth_sensor.Start(f =>
                {
                    sync.SubmitFrame(f);
                    //Debug.WriteLine("D");
                });
                color_sensor.Start(f => {
                    sync.SubmitFrame(f);
                    //Debug.WriteLine("C");
                });

                var token = tokenSource.Token;

                var t = Task.Factory.StartNew(() =>
                {
                    while (!token.IsCancellationRequested)
                    {
                        var frames = pipeline.WaitForFrames();

                        var depth_frame = frames.DepthFrame;
                        var color_frame = frames.ColorFrame;

                        var bytes = new byte[depth_frame.Stride * depth_frame.Height];
                        depth_frame.CopyTo(bytes);
                        depth_sensor.AddVideoFrame(bytes, depth_frame.Stride, 2, depth_frame.Timestamp,
                                                   depth_frame.TimestampDomain, (int)depth_frame.Number,
                                                   depth_profile);

                        bytes = new byte[color_frame.Stride * color_frame.Height];
                        color_frame.CopyTo(bytes);
                        color_sensor.AddVideoFrame(bytes, color_frame.Stride, 2, depth_frame.Timestamp,
                                                   color_frame.TimestampDomain, (int)depth_frame.Number,
                                                   color_profile);

                        depth_frame.Dispose();
                        color_frame.Dispose();
                        frames.Dispose();

                        var new_frames = sync.WaitForFrames();
                        if (new_frames.Count == 2)
                        {
                            depth_frame = new_frames.DepthFrame;
                            color_frame = new_frames.ColorFrame;

                            var colorized_depth = colorizer.Colorize(depth_frame);

                            UploadImage(imgDepth, colorized_depth);
                            UploadImage(imgColor, color_frame);

                            depth_frame.Dispose();
                            colorized_depth.Dispose();
                            color_frame.Dispose();
                        }
                        new_frames.Dispose();
                    }
                }, token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }

            InitializeComponent();
        }