private void SetupProcessingBlock(Pipeline pipeline, Colorizer colorizer, DecimationFilter decimate, SpatialFilter spatial, TemporalFilter temp, HoleFillingFilter holeFill, ThresholdFilter threshold)
        {
            // Setup / start frame processing
            processingBlock = new CustomProcessingBlock((f, src) =>
            {
                // We create a FrameReleaser object that would track
                // all newly allocated .NET frames, and ensure deterministic finalization
                // at the end of scope.
                using (var releaser = new FramesReleaser())
                {
                    using (var frames = pipeline.WaitForFrames().DisposeWith(releaser))
                    {
                        var processedFrames = frames
                                              .ApplyFilter(decimate).DisposeWith(releaser)
                                              .ApplyFilter(spatial).DisposeWith(releaser)
                                              .ApplyFilter(temp).DisposeWith(releaser)
                                              .ApplyFilter(holeFill).DisposeWith(releaser)
                                              .ApplyFilter(colorizer).DisposeWith(releaser)
                                              .ApplyFilter(threshold).DisposeWith(releaser);

                        // Send it to the next processing stage
                        src.FrameReady(processedFrames);
                    }
                }
            });
        }
    void Awake()
    {
        Source.OnStart += OnSourceStart;
        Source.OnStop  += OnSourceStop;

        _block = new CustomProcessingBlock(ProcessFrame);
        _block.Start(OnFrame);
    }
Example #3
0
 private void OnDestroy()
 {
     if (_block != null)
     {
         _block.Dispose();
         _block = null;
     }
 }
 private void OnDestroy()
 {
     OnSourceStop();
     if (_block != null)
     {
         _block.Dispose();
         _block = null;
     }
 }
Example #5
0
        private CustomProcessingBlock SetupProcessingBlock(Pipeline pipeline, Colorizer colorizer, DecimationFilter decimate, SpatialFilter spatial, TemporalFilter temp, HoleFillingFilter holeFill, Align align_to)
        {
            CustomProcessingBlock processingBlock = null;

            if (showType == imgType.color)
            {
                processingBlock = new CustomProcessingBlock((f, src) =>
                {
                    using (var releaser = new FramesReleaser())
                    {
                        using (var frames = pipeline.WaitForFrames().DisposeWith(releaser))
                        {
                            var processedFrames = frames
                                                  .ApplyFilter(align_to).DisposeWith(releaser);
                            // Send it to the next processing stage
                            src.FramesReady(processedFrames);
                        }
                    }
                });
            }
            else if (showType == imgType.mix)
            {
                // Setup / start frame processing
                processingBlock = new CustomProcessingBlock((f, src) =>
                {
                    using (var releaser = new FramesReleaser())
                    {
                        using (var frames = pipeline.WaitForFrames().DisposeWith(releaser))
                        {
                            var processedFrames = frames
                                                  .ApplyFilter(align_to).DisposeWith(releaser)
                                                  .ApplyFilter(decimate).DisposeWith(releaser)
                                                  .ApplyFilter(spatial).DisposeWith(releaser)
                                                  .ApplyFilter(temp).DisposeWith(releaser)
                                                  .ApplyFilter(holeFill).DisposeWith(releaser)
                                                  .ApplyFilter(colorizer).DisposeWith(releaser);

                            // Send it to the next processing stage
                            src.FramesReady(processedFrames);
                        }
                    }
                });
            }

            return(processingBlock);
        }
Example #6
0
        private CustomProcessingBlock StartProcessingBlock(Pipeline pipe, PipelineProfile pp)
        {
            SetupFilters(out Colorizer colorizer, out DecimationFilter decimate, out SpatialFilter spatial, out TemporalFilter temp, out HoleFillingFilter holeFill, out Align align_to);

            CustomProcessingBlock processingBlock = SetupProcessingBlock(pipe, colorizer, decimate, spatial, temp, holeFill, align_to);

            processingBlock.Start(f =>
            {
                using (var frames2 = FrameSet.FromFrame(f))
                {
                    var depthintr   = (pp.GetStream(Stream.Depth) as VideoStreamProfile).GetIntrinsics();
                    var depth_frame = frames2.DepthFrame.DisposeWith(frames2);
                    float udist     = depth_frame.GetDistance(ePoint.X, ePoint.Y); //From
                    var point       = DeprojectPixelToPoint(depthintr, new PointF(ePoint.X, ePoint.Y), udist);
                    Console.WriteLine($"({point[0]},{point[1]},{point[2]})");
                }
            });
            return(processingBlock);
        }
Example #7
0
        private void CameraStart()
        {
            // Setup config settings
            var cfg = new Config();

            cfg.EnableStream(Stream.Depth, 1280, 720, Format.Z16, 30);
            cfg.EnableStream(Stream.Color, 1280, 720, Format.Bgr8, 30);

            // Pipeline start
            Pipeline        pipeline = new Pipeline();
            PipelineProfile pp       = pipeline.Start(cfg);

            using (var p = pp.GetStream(Stream.Color) as VideoStreamProfile)
                Img_main.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, System.Windows.Media.PixelFormats.Rgb24, null);
            Action <VideoFrame> updateColor = UpdateImage(Img_main);

            // Setup filter / alignment settings
            SetupFilters(out Colorizer colorizer, out DecimationFilter decimate, out SpatialFilter spatial, out TemporalFilter temp, out HoleFillingFilter holeFill, out Align align_to);
            // Setup frame processing
            CustomProcessingBlock processingBlock = SetupProcessingBlock(pipeline, colorizer, decimate, spatial, temp, holeFill, align_to);

            // Start frame processing
            StartProcessingBlock(processingBlock, pp, updateColor, pipeline);
        }
        public MainWindow()
        {
            InitializeComponent();

            try
            {
                var cfg = new Config();
                cfg.EnableStream(Stream.Depth, 640, 480);
                cfg.EnableStream(Stream.Color, Format.Rgb8);
                var pp = pipeline.Start(cfg);

                // Allocate bitmaps for rendring.
                // Since the sample aligns the depth frames to the color frames, both of the images will have the color resolution
                using (var p = pp.GetStream(Stream.Color) as VideoStreamProfile)
                {
                    imgColor.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
                    imgDepth.Source = new WriteableBitmap(p.Width, p.Height, 96d, 96d, PixelFormats.Rgb24, null);
                }
                var updateColor = UpdateImage(imgColor);
                var updateDepth = UpdateImage(imgDepth);

                // Create custom processing block
                // For demonstration purposes it will:
                // a. Get a frameset
                // b. Run post-processing on the depth frame
                // c. Combine the result back into a frameset
                // Processing blocks are inherently thread-safe and play well with
                // other API primitives such as frame-queues,
                // and can be used to encapsulate advanced operations.
                // All invokations are, however, synchronious so the high-level threading model
                // is up to the developer
                block = new CustomProcessingBlock((f, src) =>
                {
                    // We create a FrameReleaser object that would track
                    // all newly allocated .NET frames, and ensure deterministic finalization
                    // at the end of scope.
                    using (var releaser = new FramesReleaser())
                    {
                        var frames = FrameSet.FromFrame(f).DisposeWith(releaser);

                        var processedFrames = frames.ApplyFilter(decimate).DisposeWith(releaser)
                                              .ApplyFilter(spatial).DisposeWith(releaser)
                                              .ApplyFilter(temp).DisposeWith(releaser)
                                              .ApplyFilter(align).DisposeWith(releaser)
                                              .ApplyFilter(colorizer).DisposeWith(releaser);

                        var colorFrame     = processedFrames.ColorFrame.DisposeWith(releaser);
                        var colorizedDepth = processedFrames[Stream.Depth, Format.Rgb8].DisposeWith(releaser);

                        // Combine the frames into a single result
                        var res = src.AllocateCompositeFrame(colorizedDepth, colorFrame).DisposeWith(releaser);
                        // Send it to the next processing stage
                        src.FramesReady(res);
                    }
                });

                // Register to results of processing via a callback:
                block.Start(f =>
                {
                    using (var frames = FrameSet.FromFrame(f))
                    {
                        var colorFrame     = frames.ColorFrame.DisposeWith(frames);
                        var colorizedDepth = frames[Stream.Depth, Format.Rgb8].DisposeWith(frames);

                        Dispatcher.Invoke(DispatcherPriority.Render, updateDepth, colorizedDepth);
                        Dispatcher.Invoke(DispatcherPriority.Render, updateColor, colorFrame);
                    }
                });

                var token = tokenSource.Token;

                var t = Task.Factory.StartNew(() =>
                {
                    while (!token.IsCancellationRequested)
                    {
                        using (var frames = pipeline.WaitForFrames())
                        {
                            // Invoke custom processing block
                            block.ProcessFrames(frames);
                        }
                    }
                }, token);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                Application.Current.Shutdown();
            }

            InitializeComponent();
        }
Example #9
0
        private void StartProcessingBlock(CustomProcessingBlock processingBlock, PipelineProfile pp, Action <VideoFrame> updateColor, Pipeline pipeline)
        {
            float[,,] posMap = new float[1280, 720, 3];

            Size RS_depthSize = new Size(1280, 720);
            Mat  processMat   = new Mat(RS_depthSize, DepthType.Cv8U, 3);

            processingBlock.Start(f =>
            {
                using (var frames = FrameSet.FromFrame(f))
                {
                    //var color_frame = frames.ColorFrame.DisposeWith(frames);
                    //color_frame.CopyTo(processMat.DataPointer);

                    var depthintr   = (pp.GetStream(Stream.Depth) as VideoStreamProfile).GetIntrinsics();
                    var depth_frame = frames.DepthFrame.DisposeWith(frames);

                    //float depth = depth_frame.GetDistance((int)thePoint.X,(int)thePoint.Y); //From
                    //thePos = HelperClass.DeprojectPixelToPoint(depthintr, thePoint, depth);

                    unsafe
                    {
                        Int16 *pixelPtr_byte = (Int16 *)depth_frame.Data;
                        for (int i = 0; i < 1280; i++)
                        {
                            for (int j = 0; j < 720; j++)
                            {
                                var tmpF        = HelperClass.DeprojectPixelToPoint(depthintr, new PointF(i, j), (float)pixelPtr_byte[j * 1280 + i] / 1000f);
                                posMap[i, j, 0] = tmpF[0];
                                posMap[i, j, 1] = tmpF[1];
                                posMap[i, j, 2] = tmpF[2];
                            }
                        }
                    }
                    // Dispatcher.Invoke(DispatcherPriority.Render, updateColor, color_frame);//顯示用
                }
            });


            //start
            var token = _tokenSource.Token;
            Action <VideoFrame> updateOriginColor = UpdateImage(Img_main);
            var t = Task.Factory.StartNew(() =>
            {
                Mat color_orig   = new Mat(RS_depthSize, DepthType.Cv8U, 3);
                Mat color_resize = new Mat(RS_depthSize, DepthType.Cv8U, 3);

                yoloWrapper = new YoloWrapper("modle\\yolov3-tiny-3obj.cfg", "modle\\yolov3-tiny-3obj_3cup.weights", "modle\\obj.names");
                string detectionSystemDetail = string.Empty;
                if (!string.IsNullOrEmpty(yoloWrapper.EnvironmentReport.GraphicDeviceName))
                {
                    detectionSystemDetail = $"({yoloWrapper.EnvironmentReport.GraphicDeviceName})";
                }
                Console.WriteLine($"Detection System:{yoloWrapper.DetectionSystem}{detectionSystemDetail}");

                while (!token.IsCancellationRequested)
                {
                    using (var frames = pipeline.WaitForFrames())
                    {
                        if (showType == imgType.color)
                        {
                            VideoFrame color_frame = frames.ColorFrame.DisposeWith(frames);
                            color_frame.CopyTo(color_orig.DataPointer);

                            CvInvoke.WarpPerspective(color_orig, color_resize, matrix, new Size(1280, 720));


                            //CvInvoke.Imwrite("yolo1.png", color_resize);
                            //try { items = yoloWrapper.Detect(@"yolo1.png"); }
                            //catch { break; }
                            CvInvoke.Imwrite("yolo2.png", color_orig);
                            try { items = yoloWrapper.Detect(@"yolo2.png"); }
                            catch { break; }

                            CvInvoke.CvtColor(color_resize, color_resize, ColorConversion.Bgr2Rgb);
                            processingBlock.ProcessFrames(frames);

                            foreach (YoloItem item in items)
                            {
                                string name = item.Type;
                                //int x = item.X;
                                //int y = item.Y;
                                mapToRsImg(item.X, item.Y, out int x, out int y);
                                mapToRsImg(item.X + item.Width, item.Y + item.Height, out int x2, out int y2);

                                //int H = item.Height;
                                //int W = item.Width;
                                int H = y2 - y;
                                int W = x2 - x;

                                mapToRsImg(item.Center().X, item.Center().Y, out int Cx, out int Cy);
                                //Point center = item.Center();
                                Point center = new Point(Cx, Cy);

                                int evilLine   = 500;
                                int evilLinex1 = 580;
                                int evilLinex2 = 660;
                                if (showEvilLine)
                                {
                                    CvInvoke.Line(color_resize, new Point(0, evilLine), new Point(1280, evilLine), new MCvScalar(100, 100, 250), 2);         //以上代表可能在咖啡機
                                    CvInvoke.Line(color_resize, new Point(evilLinex1, 0), new Point(evilLinex1, evilLine), new MCvScalar(100, 100, 250), 2); //
                                    CvInvoke.Line(color_resize, new Point(evilLinex2, 0), new Point(evilLinex2, evilLine), new MCvScalar(100, 100, 250), 2); //
                                }

                                if (y > evilLine || x < evilLinex1 || x > evilLinex2) //代不再咖啡機附近
                                {
                                    if (item.Confidence < 0.5)                        //沒信心的東西就跳過,避免偵測到其他東西
                                    {
                                        continue;
                                    }
                                }


                                float[] objPos = new float[] { posMap[center.X, center.Y, 0], posMap[center.X, center.Y, 1], posMap[center.X, center.Y, 2] };

                                if (objPos[0] == 0 || objPos[1] == 0 || objPos[2] == 0)//不然會影響平均
                                {
                                    continue;
                                }

                                if (name == "blue cup")//index 0
                                {
                                    //evil color check
                                    MCvScalar clr = MyInvoke.GetColorM(color_resize, center.Y - 5, center.X);
                                    if (clr.V0 > clr.V2)//R>B  //YOLO搞錯
                                    {
                                        continue;
                                    }

                                    //  CvInvoke.PutText(color_resize, "B", new Point(x, y), FontFace.HersheySimplex, 1.2, new MCvScalar(80, 150, 220), 2);
                                    CvInvoke.PutText(color_resize, item.Confidence.ToString("0.0"), new Point(x, y), FontFace.HersheySimplex, 1.2, new MCvScalar(80, 150, 220), 3);
                                    CvInvoke.Rectangle(color_resize, new Rectangle(x, y, W, H), new MCvScalar(80, 150, 220), 3);

                                    process_actionOfCups(cups[0], mat_cup, TB_Bcup_msg, TB_Bcup_state, objPos, 10, 40);
                                    //this.Dispatcher.Invoke((Action)(() => { TB_Bcup.Text = $"({posMap[center.X, center.Y, 0].ToString("0.000")},{posMap[center.X, center.Y, 1].ToString("0.000")},{posMap[center.X, center.Y, 2].ToString("0.000")})"; }));
                                    CvInvoke.Circle(color_resize, center, 10, new MCvScalar(200, 200, 20), -1);
                                }
                                else if (name == "pink cup")//index 1
                                {
                                    //evil color check
                                    MCvScalar clr = MyInvoke.GetColorM(color_resize, center.Y - 5, center.X);
                                    if (clr.V0 < clr.V2)//R<B  //YOLO搞錯
                                    {
                                        continue;
                                    }

                                    // CvInvoke.PutText(color_resize, "P", new Point(x, y), FontFace.HersheySimplex, 1.2, new MCvScalar(250, 80, 80), 2);
                                    CvInvoke.PutText(color_resize, item.Confidence.ToString("0.0"), new Point(x, y), FontFace.HersheySimplex, 1.2, new MCvScalar(250, 80, 80), 3);
                                    CvInvoke.Rectangle(color_resize, new Rectangle(x, y, W, H), new MCvScalar(250, 80, 80), 3);

                                    process_actionOfCups(cups[1], mat_cup, TB_Pcup_msg, TB_Pcup_state, objPos, 60, 90);
                                    //this.Dispatcher.Invoke((Action)(() => { TB_Pcup.Text = $"({posMap[center.X, center.Y, 0].ToString("0.000")},{posMap[center.X, center.Y, 1].ToString("0.000")},{posMap[center.X, center.Y, 2].ToString("0.000")})"; }));
                                    CvInvoke.Circle(color_resize, center, 10, new MCvScalar(200, 200, 20), -1);
                                }
                            }//foreach cups
                            timeTick++;
                            this.Dispatcher.Invoke((Action)(() =>
                            {
                                img_cupState.Source = BitmapSourceConvert.ToBitmapSource(mat_cup);
                            }));
                            color_frame.CopyFrom(color_resize.DataPointer);
                            Dispatcher.Invoke(DispatcherPriority.Render, updateOriginColor, color_frame);
                        }
                        else if (showType == imgType.mix)//顯示 mix 圖
                        {
                            processingBlock.ProcessFrames(frames);
                        }
                    }
                }
            }, token);
        }
 public static CustomProcessingBlock StartCustomProcessingBlock(CustomProcessingBlock block, Action <Frame> cb)
 {
     block.Start(frame => cb(frame));
     return(block);
 }