コード例 #1
0
        CameraState CamState = CameraState.Disconnected;  // 初始狀態設為斷線

        /// <summary>
        /// 初始化設定並連接RealSense相機
        /// </summary>
        internal void Connect()
        {
            DeviceList DeviceList = new Context().QueryDevices(); // Get a snapshot of currently connected devices

            if (DeviceList.Count == 0)
            {
                throw new Exception("No device detected. Is it plugged in?");
            }
            else if (CamState == CameraState.Disconnected)
            {
                for (int i = 0; i < DeviceList.Count; i++)
                {
                    adev = AdvancedDevice.FromDevice(DeviceList[i]);
                    cfg.EnableStream(Intel.RealSense.Stream.Depth, 1280, 720, Format.Z16, 30);
                    cfg.EnableStream(Intel.RealSense.Stream.Color, 1280, 720, Format.Bgr8);
                    Console.WriteLine("相機型號為:" + adev.Info[CameraInfo.Name] + "\n目前傳輸介面:" + adev.Info[CameraInfo.UsbTypeDescriptor], "相機連接成功!");
                    CamState = CameraState.Connected;
                    using (StreamReader sr = new StreamReader(@"D:\project\C#\LeaderWithRealSense\LeaderWithRealSense\setting.json"))
                    ////using (StreamReader sr = new StreamReader(@"C:\Users\EL404\Desktop\LabData\Realsense物件3D重建與模型訓練\Pre-Processing.json"))
                    ////using (StreamReader sr = new StreamReader(@"C:\Users\EL404\Desktop\LabData\Realsense物件3D重建與模型訓練\NewPreProcessing.json"))
                    {
                        string line = sr.ReadToEnd();
                        adev.JsonConfiguration = line;
                    }
                }
            }
        }
コード例 #2
0
        public MainWindow()
        {
            var args = Environment.GetCommandLineArgs();
            var dir  = System.IO.Directory.CreateDirectory(args[1] + FolderName);

            InitializeComponent();

            try
            {
                Action <VideoFrame> updateDepth;
                Action <VideoFrame> updateColor;

                // The colorizer processing block will be used to visualize the depth frames.
                colorizer = new Colorizer();

                this.Closing += control_Closing;

                string stringConfig = ConfigLoader.LoadConfig(args[2]);


                #region Record
                pipeline = new Pipeline();

                //var cfg = new Config();
                //cfg.EnableStream(Stream.Depth, 640, 480);
                //cfg.EnableStream(Stream.Color, 640, 480);

                var pp = pipeline.Start();

                SetupWindow(pp, out updateDepth, out updateColor);

                var selected_device = pp.Device;

                var config = AdvancedDevice.FromDevice(selected_device);

                config.JsonConfiguration = stringConfig;
                var depth_sensor = selected_device.Sensors[0];

                if (depth_sensor.Options.Supports(Option.LaserPower))
                {
                    var laserPower = depth_sensor.Options[Option.LaserPower];
                    laserPower.Value = laserPower.Max; // Set max power
                }


                bool disposed = true;

                Task.Factory.StartNew(() =>
                {
                    while (!tokenSource.Token.IsCancellationRequested)
                    {
                        using (var frames = pipeline.WaitForFrames())
                        {
                            var colorFrame = frames.ColorFrame.DisposeWith(frames);
                            var depthFrame = frames.DepthFrame.DisposeWith(frames);

                            var colorizedDepth = colorizer.Process <VideoFrame>(depthFrame).DisposeWith(frames);

                            Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth);
                            Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame);
                            Dispatcher.Invoke(() => { txtTimeStamp.Text = "Frames: " + tick++; });
                        }

                        if (disposed)
                        {
                            Task.Factory.StartNew(() =>
                            {
                                disposed = false;
                                var path = dir.FullName + "\\" + DateTime.Now.Ticks + ".bag";
                                using (new WaitAndDispose(1000, new RecordDevice(selected_device, path))) //$"log\\ros{tick++}.bag"
                                {
                                    disposed = true;
                                    Console.WriteLine("recorded " + path);
                                };
                            });
                        }
                    }
                }, tokenSource.Token);
                #endregion
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }
        }
コード例 #3
0
        void Init()
        {
            try
            {
                #region FILTERS

                spatialFilter = new SpatialFilter();
                spatialFilter.Options[Option.FilterMagnitude].Value   = 5.0F;
                spatialFilter.Options[Option.FilterSmoothAlpha].Value = 0.25F;
                spatialFilter.Options[Option.FilterSmoothDelta].Value = 50.0F;

                decimationFilter = new DecimationFilter();
                decimationFilter.Options[Option.FilterMagnitude].Value = 2.0F;

                holeFilter = new HoleFillingFilter();

                thresholdFilter = new ThresholdFilter();
                //thresholdFilter.Options[Option.MinDistance].Value = 0.73F;
                //thresholdFilter.Options[Option.MaxDistance].Value = 0.81F;

                #endregion

                align_to  = new Align(Intel.RealSense.Stream.Depth);
                colorizer = new Colorizer();
                pipeline  = new Pipeline();

                //CONFIG SETTINGS
                var cfg = new Config();
                cfg.EnableStream(Intel.RealSense.Stream.Depth, resolutionW, resolutionH, Format.Z16, FPS); //depth resolution manuel change
                cfg.EnableStream(Intel.RealSense.Stream.Color, 640, 480, Format.Rgb8, 30);
                pipelineProfile = pipeline.Start(cfg);                                                     //stream starting with user config

                var advancedDevice = AdvancedDevice.FromDevice(pipelineProfile.Device);                    //connected device
                //read device's configuration settings from json file
                advancedDevice.JsonConfiguration = File.ReadAllText(@"CustomConfig.json");
                selectedDevice = pipelineProfile.Device;

                #region Field Of View Info

                float[] dfov, cfov, irfov;

                var        depth_stream = pipelineProfile.GetStream <VideoStreamProfile>(Intel.RealSense.Stream.Depth);
                Intrinsics depthIntr    = depth_stream.GetIntrinsics();
                dfov = depthIntr.FOV; // float[2] - horizontal and vertical field of view in degrees

                var        color_stream = pipelineProfile.GetStream <VideoStreamProfile>(Intel.RealSense.Stream.Color);
                Intrinsics colorIntr    = color_stream.GetIntrinsics();
                cfov = colorIntr.FOV; // float[2] - horizontal and vertical field of view in degrees

                var        ir_stream = pipelineProfile.GetStream <VideoStreamProfile>(Intel.RealSense.Stream.Infrared);
                Intrinsics irIntr    = ir_stream.GetIntrinsics();
                irfov = irIntr.FOV; // float[2] - horizontal and vertical field of view in degrees

                lblDepthFov.Text    = "Depth FOV : " + "H = " + Convert.ToInt32(dfov[0]).ToString() + "° , " + "V = " + Convert.ToInt32(dfov[1]).ToString() + "°";
                lblColorFov.Text    = "RGB FOV   : " + "H = " + Convert.ToInt32(cfov[0]).ToString() + "° , " + "V = " + Convert.ToInt32(cfov[1]).ToString() + "°";
                lblInfraredFov.Text = "IR FOV   : " + "H = " + Convert.ToInt32(irfov[0]).ToString() + "° , " + "V = " + Convert.ToInt32(irfov[1]).ToString() + "°";


                #endregion


                //get primary screen resolutions
                screenWidth  = Convert.ToInt32(System.Windows.SystemParameters.PrimaryScreenWidth.ToString());
                screenHeight = Convert.ToInt32(System.Windows.SystemParameters.PrimaryScreenHeight.ToString());

                //camera started working. transfer image to interface
                SetupWindow(pipelineProfile, out updateDepth, out updateColor, out updateIR1, out updateIR2);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }