Wrapper for the CvBlob detection functions. The Ptr property points to the label image of the cvb::cvLabel function.
Algorithm based on paper "A linear-time component-labeling algorithm using contour tracing technique" of Fu Chang, Chun-Jen Chen and Chi-Jen Lu.
Inheritance: Emgu.Util.UnmanagedObject
Exemple #1
1
      /*
      public void TestCodeBookBGModel()
      {
         using (Capture capture = new Capture())
         using (BGCodeBookModel<Bgr> model = new BGCodeBookModel<Bgr>())
         {
            ImageViewer viewer = new ImageViewer();
            Image<Gray, byte> fgMask = capture.QueryFrame().Convert<Gray, Byte>();

            Application.Idle += delegate(Object sender, EventArgs args)
            {
               Mat frame = capture.QueryFrame();
               model.Apply(frame);
               viewer.Image = model.ForegroundMask; 
            };
            viewer.ShowDialog();
         }
      }

      public void TestBlobTracking()
      {
         MCvFGDStatModelParams fgparam = new MCvFGDStatModelParams();
         fgparam.alpha1 = 0.1f;
         fgparam.alpha2 = 0.005f;
         fgparam.alpha3 = 0.1f;
         fgparam.delta = 2;
         fgparam.is_obj_without_holes = 1;
         fgparam.Lc = 32;
         fgparam.Lcc = 16;
         fgparam.minArea = 15;
         fgparam.N1c = 15;
         fgparam.N1cc = 25;
         fgparam.N2c = 25;
         fgparam.N2cc = 35;
         fgparam.perform_morphing = 0;
         fgparam.T = 0.9f;

         BlobTrackerAutoParam<Bgr> param = new BlobTrackerAutoParam<Bgr>();
         param.BlobDetector = new BlobDetector(Emgu.CV.CvEnum.BlobDetectorType.CC);
         param.FGDetector = new FGDetector<Bgr>(Emgu.CV.CvEnum.ForgroundDetectorType.Fgd, fgparam);
         param.BlobTracker = new BlobTracker(Emgu.CV.CvEnum.BLOBTRACKER_TYPE.MSFG);
         param.FGTrainFrames = 10;
         BlobTrackerAuto<Bgr> tracker = new BlobTrackerAuto<Bgr>(param);

         //MCvFont font = new MCvFont(Emgu.CV.CvEnum.FontFace.HersheySimplex, 1.0, 1.0);

         using(ImageViewer viewer = new ImageViewer())
         using (Capture capture = new Capture())
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {
               tracker.Process(capture.RetrieveBgrFrame());
               
               //Image<Bgr, Byte> img = capture.RetrieveBgrFrame();

               Image<Bgr, Byte> img = tracker.ForegroundMask.Convert<Bgr, Byte>();
               foreach (MCvBlob blob in tracker)
               {
                  img.Draw((Rectangle)blob, new Bgr(255.0, 255.0, 255.0), 2);
                  img.Draw(blob.ID.ToString(), Point.Round(blob.Center), CvEnum.FontFace.HersheySimplex, 1.0, new Bgr(255.0, 255.0, 255.0));
               }
               viewer.Image = img;
            };
            capture.Start();
            viewer.ShowDialog();
         }
      }*/
      
      public void TestCvBlob()
      {
         //MCvFont font = new MCvFont(Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, 0.5);
         using (CvTracks tracks = new CvTracks())
         using (ImageViewer viewer = new ImageViewer())
         using (Capture capture = new Capture())
         using (Mat fgMask = new Mat())
         {
            //BGStatModel<Bgr> bgModel = new BGStatModel<Bgr>(capture.QueryFrame(), Emgu.CV.CvEnum.BG_STAT_TYPE.GAUSSIAN_BG_MODEL);
            BackgroundSubtractorMOG2 bgModel = new BackgroundSubtractorMOG2(0, 0, true);
            //BackgroundSubstractorMOG bgModel = new BackgroundSubstractorMOG(0, 0, 0, 0);

            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {
               Mat frame = new Mat();
               capture.Retrieve(frame);
               bgModel.Apply(frame, fgMask);

               using (CvBlobDetector detector = new CvBlobDetector())
               using (CvBlobs blobs = new CvBlobs())
               {
                  detector.Detect(fgMask.ToImage<Gray, Byte>(), blobs);
                  blobs.FilterByArea(100, int.MaxValue);

                  tracks.Update(blobs, 20.0, 10, 0);

                  Image<Bgr, Byte> result = new Image<Bgr, byte>(frame.Size);

                  using (Image<Gray, Byte> blobMask = detector.DrawBlobsMask(blobs))
                  {
                     frame.CopyTo(result, blobMask);
                  }
                  //CvInvoke.cvCopy(frame, result, blobMask);

                  foreach (KeyValuePair<uint, CvTrack> pair in tracks)
                  {
                     if (pair.Value.Inactive == 0) //only draw the active tracks.
                     {
                        CvBlob b = blobs[pair.Value.BlobLabel];
                        Bgr color = detector.MeanColor(b, frame.ToImage<Bgr, Byte>());
                        result.Draw(pair.Key.ToString(), pair.Value.BoundingBox.Location, CvEnum.FontFace.HersheySimplex, 0.5, color);
                        result.Draw(pair.Value.BoundingBox, color, 2);
                        Point[] contour = b.GetContour();
                        result.Draw(contour, new Bgr(0, 0, 255), 1);
                     }
                  }

                  viewer.Image = frame.ToImage<Bgr, Byte>().ConcateVertical(fgMask.ToImage<Bgr, Byte>().ConcateHorizontal(result));
               }
            };
            capture.Start();
            viewer.ShowDialog();
         }
      }
Exemple #2
0
        public CameraTracking(int subtractionHistory, int subtractionThreshold, int frameBlurStrength, int largestDetectionHeightSizeDivisor, int largestDetectionWidthSizeDivisor, int smallestDetectionHeightSizeDivisor, int smallestDetectionWidthSizeDivisor)
        {
            FrameBlurStrength = frameBlurStrength;
            LargestDetectionHeightSizeDivisor  = largestDetectionHeightSizeDivisor;
            LargestDetectionWidthSizeDivisor   = largestDetectionWidthSizeDivisor;
            SmallestDetectionHeightSizeDivisor = smallestDetectionHeightSizeDivisor;
            SmallestDetectionWidthSizeDivisor  = smallestDetectionWidthSizeDivisor;

            try
            {
                _cameraCapture = new VideoCapture();

                // I had to set this by hand to match our camera as opencv doesn't always pull these properties correctly and sometimes shows funky frames or nothing at all
                // _cameraCapture.SetCaptureProperty(CapProp.FrameWidth, 1600);
                // _cameraCapture.SetCaptureProperty(CapProp.FrameHeight, 1200);
                // _cameraCapture.SetCaptureProperty(CapProp.FourCC, Emgu.CV.VideoWriter.Fourcc('Y', 'U', 'Y', '2'));

                _fgDetector   = new Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG2(subtractionHistory, subtractionThreshold);
                _blobDetector = new CvBlobDetector();
                _tracker      = new CvTracks();
                Ready         = true;
            }
            catch (Exception e)
            {
                Ready = false;
            }
        }
Exemple #3
0
        public mouse()
        {
            InitializeComponent();
            //CvInvoke.UseOpenCL = false;
            while (global.capture == null)
            {
            }
            try
            {
                capture = global.capture;    //new Capture();
                // capture = new Capture();
                //if (capture != null) capture.FlipHorizontal = !capture.FlipHorizontal;
                capture.ImageGrabbed += ProcessFrame;
            }
            catch (NullReferenceException excpt)
            {
                MessageBox.Show(excpt.Message);
            }

            // MessageBox.Show("" + Screen.PrimaryScreen.Bounds + "         __        " + capture.Width + " " + capture.Height);

            fgDetector       = new Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG2();
            blobDetector     = new Emgu.CV.Cvb.CvBlobDetector();
            cursor_history_x = new Queue <int>();
            cursor_history_y = new Queue <int>();

            //initilize queue with initial
            for (int i = 0; i < queue_cursor_length; i++)
            {
                cursor_history_x.Enqueue(Screen.PrimaryScreen.Bounds.Width / 2);
                cursor_history_y.Enqueue(Screen.PrimaryScreen.Bounds.Height / 2);
            }
        }
Exemple #4
0
 public gesture()
 {
     InitializeComponent();
     fow_prop = new gesture_recog();
     fow_prop.Show();
     //CvInvoke.UseOpenCL = false;
     try
     {
         grabber = global.capture;
     }
     catch (NullReferenceException excpt)
     {
         MessageBox.Show(excpt.Message);
     }
     grabber.QueryFrame();
     frameWidth  = grabber.Width;
     frameHeight = grabber.Height;
     //   detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);
     hsv_min = new Hsv(0, 45, 0);
     hsv_max = new Hsv(20, 254, 254);
     // YCrCb_min = new Ycc(0, 131, 80);
     //YCrCb_max = new Ycc(255, 185, 135);
     YCrCb_min = new Ycc(0, 130, 80);
     YCrCb_max = new Ycc(255, 185, 135);
     index     = 0;
     for (int i = 0; i < 10; i++)
     {
         na[i] = 1;
     }
     fgDetector        = new BackgroundSubtractorMOG2();
     blobDetector      = new Emgu.CV.Cvb.CvBlobDetector();
     Application.Idle += new EventHandler(FrameGrabber);
 }
Exemple #5
0
 public ObjectTracker(
     TrackerSettings settings)
 {
     _settings = settings;
     _foregroundDetector = new BackgroundSubtractorMOG2(_settings.BackgroundSubtractorHistory.Value, _settings.BackgroundSubtractorMaxComponents.Value, false);
     _blobDetector = new CvBlobDetector();
     _blobs = new CvBlobs();
     _tracks = new CvTracks();
     _trackedObjectIdentities = new Dictionary<uint, TrackedObject>();
 }
Exemple #6
0
        void Run()
        {
            try
            {
                _cameraCapture = new VideoCapture(1);


                _fgDetector   = new Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG2();
                _blobDetector = new CvBlobDetector();
                _tracker      = new CvTracks();

                Application.Idle += ProcessFrame;
            }
            catch (Exception e)
            {
            }
        }
Exemple #7
0
        public camera()
        {
            InitializeComponent();
            CvInvoke.UseOpenCL = false;
            try
            {
                capture = global.capture;
                capture.ImageGrabbed += ProcessFrame;
            }
            catch (NullReferenceException excpt)
            {
                MessageBox.Show(excpt.Message);
            }

            fgDetector   = new Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG2();
            blobDetector = new Emgu.CV.Cvb.CvBlobDetector();
        }
Exemple #8
0
        void Run()
        {
            try
            {
                _cameraCapture = new Capture();
            }
            catch (Exception e)
            {
                MessageBox.Show(e.Message);
                return;
            }

            _fgDetector   = new Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG2();
            _blobDetector = new CvBlobDetector();
            //_tracker = new BlobTrackerAuto<Bgr>();

            Application.Idle += ProcessFrame;
        }
Exemple #9
0
        void Run()
        {
            try
            {
                _cameraCapture = new VideoCapture();
            }
            catch (Exception e)
            {
                MessageBox.Show(e.Message);
                return;
            }

            _fgDetector   = new BackgroundSubtractorMOG2();
            _blobDetector = new CvBlobDetector();
            _tracker      = new CvTracks();

            Application.Idle += ProcessFrame;
        }
Exemple #10
0
 public void Dispose()
 {
     if (_foregroundDetector == null) return;
     try
     {
         _blobDetector.Dispose();
         _blobs.Dispose();
         _tracks.Dispose();
         ((IDisposable)_foregroundDetector).Dispose();
     }
     catch (Exception ex)
     {
         Log.Error("Exception disposing foreground detector", ex);
     }
     _blobDetector = null;
     _blobs = null;
     _tracks = null;
     _foregroundDetector = null;
 }
Exemple #11
0
        static void Main(string[] args)
        {
            if (args.Length == 0)
            {
                Console.WriteLine("Please pass in camera name and address");
                return;
            }

            var codeFiles = @"C:\Users\jakka\Documents\code.txt";

            _code = File.ReadAllText(codeFiles);


            AppDomain.CurrentDomain.ProcessExit += CurrentDomain_ProcessExit;

            _fgDetector   = new BackgroundSubtractorMOG2();
            _blobDetector = new CvBlobDetector();
            _tracker      = new CvTracks();

            _name = args[0];
            var address = args[1];

            var fn = Path.Combine(Path.GetTempPath(), "survel");

            if (!Directory.Exists(fn))
            {
                Directory.CreateDirectory(fn);
            }
            else
            {
                //foreach (var f in Directory.GetFiles(fn))
                //{
                //   File.Delete(f);
                //}
            }

            Task.Run(async() =>
            {
                await _processor(address, fn);
            });

            _watcher(_name, fn).GetAwaiter().GetResult();
        }
Exemple #12
0
        public CameraTracking(int subtractionHistory, int subtractionThreshold, int frameBlurStrength, int largestDetectionHeightSizeDivisor, int largestDetectionWidthSizeDivisor, int smallestDetectionHeightSizeDivisor, int smallestDetectionWidthSizeDivisor)
        {
            Debug.WriteLine("CameraTracking:: Initializing");

            if (largestDetectionHeightSizeDivisor > smallestDetectionHeightSizeDivisor ||
                largestDetectionWidthSizeDivisor > smallestDetectionWidthSizeDivisor)
            {
                throw new Exception("The large detection divisors should be smaller then the smallest detection divisors!");
            }

            this.frameBlurStrength = frameBlurStrength;
            this.largestDetectionHeightSizeDivisor  = largestDetectionHeightSizeDivisor;
            this.largestDetectionWidthSizeDivisor   = largestDetectionWidthSizeDivisor;
            this.smallestDetectionHeightSizeDivisor = smallestDetectionHeightSizeDivisor;
            this.smallestDetectionWidthSizeDivisor  = smallestDetectionWidthSizeDivisor;

            try
            {
                CameraTracking._cameraCapture = new VideoCapture();

                // I had to set this by hand to match our camera as opencv doesn't always pull these properties correctly
                // and sometimes shows funky frames or nothing at all
                // CameraTracking._cameraCapture.SetCaptureProperty(CapProp.FrameWidth, 1600);
                // CameraTracking._cameraCapture.SetCaptureProperty(CapProp.FrameHeight, 1200);
                // CameraTracking._cameraCapture.SetCaptureProperty(CapProp.FourCC, Emgu.CV.VideoWriter.Fourcc('Y', 'U', 'Y', '2'));

                CameraTracking._fgDetector   = new Emgu.CV.BackgroundSubtractorMOG2(subtractionHistory, subtractionThreshold);
                CameraTracking._blobDetector = new CvBlobDetector();
                CameraTracking._tracker      = new CvTracks();
                this.Ready = true;
                Debug.WriteLine("CameraTracking:: Camera Initialized");
            }
            catch (Exception e)
            {
                throw new Exception("Unable to initialize the webcam!");
            }
        }
Exemple #13
0
        static void Main(string[] args)
        {
            _cameraCapture = new VideoCapture(1);


            _fgDetector   = new Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG2();
            _blobDetector = new CvBlobDetector();
            _tracker      = new CvTracks();


            Task.Run(() =>
            {
                DetectFaces();
            })
            .ContinueWith((p) =>
            {
                if (p != null && p.IsFaulted)
                {
                    Console.WriteLine(p.Exception.InnerException.Message);
                }
            });

            Task.Run(() =>
            {
                IdentifyFaces();
            })
            .ContinueWith((p) =>
            {
                if (p != null && p.IsFaulted)
                {
                    Console.WriteLine(p.Exception.InnerException.Message);
                }
            });

            Console.ReadKey();
        }
        protected override void inicializarVariaveis()
        {
            base.inicializarVariaveis();
            mDetector = new Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG2();
             mBlobDetector = new CvBlobDetector();

             Size vTamanhoDasImagens = mImagemColorida.Size;
            mImagemCinzaSemPlanoDeFundo = new Mat();
            mImagemDoPlanoDeFundo = new Mat(vTamanhoDasImagens.Width, vTamanhoDasImagens.Height, DepthType.Cv32F, 3);
            mImagemSemPlanoDeFundo = null;// = cvCreateImage(gTamanhoDaImagem, IPL_DEPTH_32F, 3);;
            mCopiaImagemPlanoDeFundo = null;
            mImagemBinariaSemPlanoDeFundo = new Mat();
            vHist = new Emgu.CV.UI.HistogramBox();
            vHist.Show();
            vHist.Visible = true;
            mPrimeiraExecucao = true;
            dicionarioMonitores = new Dictionary<int, MonitorDePessoa>();
            dicionarioBlobs = new Dictionary<int, MCvBlob>();
        }
        public static WriteableBitmap DrawBlobBoundingBoxsAroundCroppedBitmap(WriteableBitmap writeableBitmap,
           WriteableBitmap gradientBitmapRef, WriteableBitmap realBitmapRef, int returnBitmapIndex)
        {
            Bitmap normalBitmap = BitmapFromWriteableBitmap(writeableBitmap);
               var cvImage = new Image<Gray, byte>(new Bitmap(normalBitmap));

               if (cvImage != null)
               {
               Image<Gray, byte> greyImg = cvImage.Convert<Gray, byte>();

               Image<Gray, Byte> greyThreshImg = greyImg.ThresholdBinaryInv(new Gray(150), new Gray(255));

               Emgu.CV.Cvb.CvBlobs resultingImgBlobs = new Emgu.CV.Cvb.CvBlobs();
               Emgu.CV.Cvb.CvBlobDetector bDetect = new Emgu.CV.Cvb.CvBlobDetector();
               uint numWebcamBlobsFound = bDetect.Detect(greyThreshImg, resultingImgBlobs);

               Image<Rgb, byte> blobImg = greyThreshImg.Convert<Rgb, byte>();
               Rgb red = new Rgb(255, 0, 0);

               int blobNumber = 0;

               foreach (Emgu.CV.Cvb.CvBlob targetBlob in resultingImgBlobs.Values)
               {
                   int imageArea = blobImg.Width*blobImg.Height;
                   int blobArea = targetBlob.Area;
                   int blobBoundingBoxArea = targetBlob.BoundingBox.Width*targetBlob.BoundingBox.Height;

                   if (blobArea > 200.0 && blobBoundingBoxArea < (imageArea*0.99))
                   {
                       blobNumber++;
                       Rectangle rectangle = targetBlob.BoundingBox;
                       Rect convertedRect = new Rect(rectangle.X - 10, rectangle.Y - 10, rectangle.Width + 20,
                           rectangle.Height + 20);
                       //BitmapColorer.DrawRectangle(writeableBitmap, convertedRect);

                       writeableBitmap = writeableBitmap.Crop(rectangle.X - 10, rectangle.Y - 10, rectangle.Width + 20,
                           rectangle.Height + 20);
                       gradientBitmapRef = gradientBitmapRef.Crop(rectangle.X - 10, rectangle.Y - 10,
                           rectangle.Width + 20, rectangle.Height + 20);
                       realBitmapRef = realBitmapRef.Crop(rectangle.X - 10, rectangle.Y - 10, rectangle.Width + 20,
                           rectangle.Height + 20);
                   }
               }
               }
               if (returnBitmapIndex == 1)
               {
               return writeableBitmap;
               }
               else if (returnBitmapIndex == 2)
               {
               return gradientBitmapRef;
               }
               else
               {
               return realBitmapRef;
               }
        }
 public void mapear()
 {
     mDetector = new Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG2();
     mBlobDetector = new CvBlobDetector();
     _capture = new Capture(mNomeDoArquivo);
     //_capture = new Capture();
     inicializarKalman();
     Application.Idle += ProcessFrame;
 }
        /// <summary>
        /// 图漾粗略找合适的轮廓
        /// </summary>
        /// <param name="point_3d">三维点</param>
        public static void getContours(float[] point_3d)
        {
            GLB.TitleStr = "";
            int AREA = GLB.BUFW * GLB.BUFH;                //总面积
            var gray = GLB.MyFrame.Convert <Gray, Byte>(); //灰度化

            Emgu.CV.Cvb.CvBlobs        myBlobs = new Emgu.CV.Cvb.CvBlobs();
            Emgu.CV.Cvb.CvBlobDetector bd      = new Emgu.CV.Cvb.CvBlobDetector();
            uint n = bd.Detect(gray, myBlobs);//发现区块

            ////遍历各区块:
            for (uint i = 1; i <= myBlobs.Count; i++)
            {
                int         area   = myBlobs[i].Area;                                                  //获取面积
                RotatedRect rect   = CvInvoke.MinAreaRect(new VectorOfPoint(myBlobs[i].GetContour())); //最小矩形
                float       width  = rect.Size.Width;                                                  //像素宽
                float       height = rect.Size.Height;                                                 //像素长
                if (height < width)
                {
                    float temp = height;
                    height = width;
                    width  = temp;
                }
                float H2W = height / width;
                if (area > 0.02 * AREA && area < 0.75 * AREA && H2W > 1 && H2W < 2)//通过面积 长宽比 初略筛选
                {
                    if (getProduceInfo(myBlobs[i], point_3d) == true)
                    {
                        if (ProduceMacth() == true)//匹配成功
                        {
                            //////#########################################队列求均值--获取中心坐标#################################################
                            //GLB.avgCameraPoint3.Enqueue(new Point3(GLB.camera_device_point.X, GLB.camera_device_point.Y, GLB.camera_device_point.Z));
                            //if (GLB.avgCameraPoint3.Count > 5)
                            //{
                            //    GLB.avgCameraPoint3.Dequeue();
                            //}
                            //else
                            //{
                            //    return ;
                            //}
                            //GLB.camera_device_point.Z = (int)GLB.avgCameraPoint3.Average(o => o.Z);//中心点的深度//Z
                            //GLB.camera_device_point.Y = (int)GLB.avgCameraPoint3.Average(o => o.Y);//Y
                            //GLB.camera_device_point.X = (int)GLB.avgCameraPoint3.Average(o => o.X);//X
                            //RotatedRect boxCenter = new RotatedRect(new PointF((float )GLB.obj.jd.Average(o => o.X), (float)GLB.obj.jd.Average(o => o.Y)), new Size(8, 8), 0);
                            //CvInvoke.Ellipse(GLB.MyFrame, boxCenter, new MCvScalar(255, 0, 0), 4);//在中心画一个小圆
                            //CvInvoke.PutText(GLB.MyFrame, "x:" + (float)GLB.obj.jd.Average(o => o.X) + "y:" + (float)GLB.obj.jd.Average(o => o.Y) + "XC=" + GLB.obj.xCenter + "YC=" + GLB.obj.yCenter + "Depth=" + GLB.obj.Depth, new System.Drawing.Point((int)GLB.obj.jd.Average(o => o.X) - 176, (int)GLB.obj.jd.Average(o => o.Y) + 25), Emgu.CV.CvEnum.FontFace.HersheyDuplex, .75, new MCvScalar(255, 255, 255), 2);//深度显示

                            ////////队列求均值
                            //GLB.avgAngle.Enqueue((float)GLB.obj.Angle);
                            //if (GLB.avgAngle.Count > 5)
                            //{
                            //    GLB.avgAngle.Dequeue();
                            //}
                            //else
                            //{
                            //    return ;
                            //}
                            //GLB.obj.Angle = GLB.avgAngle.Average();//旋转角
                            //CvInvoke.PutText(GLB.MyFrame, "Angl=" + GLB.obj.Angle, new System.Drawing.Point((int)GLB.obj.jd[3].X, (int)GLB.obj.jd[3].Y), Emgu.CV.CvEnum.FontFace.HersheyDuplex, .75, new MCvScalar(0, 0, 255), 2);


                            ////#########################################坐标换算#################################################
                            GLB.robot_device_point.X = GLB.MatTuYangCam[0] * GLB.camera_device_point.X + GLB.MatTuYangCam[1] * GLB.camera_device_point.Y + GLB.MatTuYangCam[2];
                            GLB.robot_device_point.Y = GLB.MatTuYangCam[3] * GLB.camera_device_point.X + GLB.MatTuYangCam[4] * GLB.camera_device_point.Y + GLB.MatTuYangCam[5];
                            GLB.robot_device_point.Z = 2818 - GLB.camera_device_point.Z;
                            GLB.device_angl         += -2.6f;//相机与机器人夹角2.6度
                            GLB.device_angl          = (float)(GLB.device_angl * Math.PI / 180f);
                            //限定范围
                            if (GLB.robot_device_point.X < -600 || GLB.robot_device_point.X > 600 ||
                                GLB.robot_device_point.Y < -2200 || GLB.robot_device_point.Y > -800 ||
                                GLB.robot_device_point.Z < 280 || GLB.robot_device_point.Z > 1100)
                            {
                                GLB.Match_success = false;
                                GLB.TitleStr     += ",但是超出范围";
                            }
                            else
                            {
                                GLB.Match_success = true;
                            }
                        }
                        else
                        {
                            GLB.Match_success = false;
                        }
                    }
                }
            }
        }
 private void Form1_Load(object sender, EventArgs e)
 {
     blobDetector = new CvBlobDetector();
 }
        public static WriteableBitmap DrawBlobBoundingBoxsCV(WriteableBitmap writeableBitmap, WriteableBitmap gradientBitmapRef, WriteableBitmap realBitmapRef)
        {
            Bitmap normalBitmap = BitmapFromWriteableBitmap(writeableBitmap);
               var cvImage = new Image<Gray, byte>(new Bitmap(normalBitmap));

               //var classifications = ClassifyBitmap( writeableBitmap, cvImage );
               if (cvImage != null)
               {
               // This takes our nice looking color png and converts it to black and white
               Image<Gray, byte> greyImg = cvImage.Convert<Gray, byte>();

               // We again threshold it based on brightness...BUT WE INVERT THE PNG. BLOB DETECTOR DETECTS WHITE NOT BLACK
               // this will esentially eliminate the color differences
               // you could also do cool things like threshold only certain colors here for a color based blob detector
               Image<Gray, Byte> greyThreshImg = greyImg.ThresholdBinaryInv(new Gray(150), new Gray(255));

               Emgu.CV.Cvb.CvBlobs resultingImgBlobs = new Emgu.CV.Cvb.CvBlobs();
               Emgu.CV.Cvb.CvBlobDetector bDetect = new Emgu.CV.Cvb.CvBlobDetector();
               uint numWebcamBlobsFound = bDetect.Detect(greyThreshImg, resultingImgBlobs);

               // This is a simple way of just drawing all blobs reguardless of their size and not iterating through them
               // It draws on top of whatever you input. I am inputting the threshold image. Specifying an alpha to draw with of 0.5 so its half transparent.
               //Emgu.CV.Image<Bgr, byte> blobImg = bDetect.DrawBlobs(webcamThreshImg, resultingWebcamBlobs, Emgu.CV.Cvb.CvBlobDetector.BlobRenderType.Default, 0.5);

               // Here we can iterate through each blob and use the slider to set a threshold then draw a red box around it
               Image<Rgb, byte> blobImg = greyThreshImg.Convert<Rgb, byte>();
               Rgb red = new Rgb(255, 0, 0);

               int blobNumber = 0;

               // Lets try and iterate the blobs?
               foreach (Emgu.CV.Cvb.CvBlob targetBlob in resultingImgBlobs.Values)
               {
                   int imageArea = blobImg.Width*blobImg.Height;
                   int blobArea = targetBlob.Area;
                   int blobBoundingBoxArea = targetBlob.BoundingBox.Width*targetBlob.BoundingBox.Height;

                   // Only use blobs with area greater than some threshold
                   // If the blob bounding rect is basically size of the whole image ignore it for now
                   if (blobArea > 200.0 && blobBoundingBoxArea < (imageArea*0.99))
                   {
                       Rectangle rectangle = targetBlob.BoundingBox;

                       int CentroidX = (int)targetBlob.Centroid.X;
                       int CentroidY = (int)targetBlob.Centroid.Y;

                       int croppedWidth = rectangle.Width + 50;
                       int croppedHeight = rectangle.Height + 50;

                       int CroppedX = CentroidX - (int)(croppedWidth / 2.0);
                       int CroppedY = CentroidY - (int)(croppedHeight / 2.0);

                       var croppedBlobBitmap = writeableBitmap.Crop(CroppedX, CroppedY, croppedWidth, croppedHeight);
                       var croppedGradientBlobBitmap = gradientBitmapRef.Crop(CroppedX, CroppedY, croppedWidth, croppedHeight);
                       var croppedRealBitmapRef = realBitmapRef.Crop(CroppedX, CroppedY, croppedWidth, croppedHeight);

                       double blobAngle = -RadianToDegree(CalculateBlobAngle(targetBlob));

                       CroppedX = (int) (croppedWidth/2.0);
                       CroppedY = (int) (croppedHeight/2.0);

                       var rotatedandCroppedBlobBitmap = RotateWriteableBitmap(croppedBlobBitmap, blobAngle);
                       var rotatedGradientBlobBitmap = RotateColorWriteableBitmap(croppedGradientBlobBitmap, blobAngle);
                       var rotatedRealBitmapRef = RotateColorWriteableBitmap(croppedRealBitmapRef, blobAngle);

                       var refinedBitmap = DrawBlobBoundingBoxsAroundCroppedBitmap(rotatedandCroppedBlobBitmap, rotatedGradientBlobBitmap, rotatedRealBitmapRef, 1);
                       rotatedGradientBlobBitmap = DrawBlobBoundingBoxsAroundCroppedBitmap(rotatedandCroppedBlobBitmap, rotatedGradientBlobBitmap, rotatedRealBitmapRef, 2);
                       rotatedRealBitmapRef = DrawBlobBoundingBoxsAroundCroppedBitmap(rotatedandCroppedBlobBitmap, rotatedGradientBlobBitmap, rotatedRealBitmapRef, 3);

                       var areaCheck = refinedBitmap.PixelHeight*refinedBitmap.PixelWidth;

                       if (areaCheck >= 200)
                       {
                           blobNumber++;

                           var thresholded = refinedBitmap.Clone();
                           ThresholdBitmap(thresholded, 10, false);
                           BitmapColorer.ColorBitmap(thresholded);

                           System.Windows.Media.Color blobColor = PixelColorOfCentralBlob(thresholded);
                           BitmapColorer.EraseAllButCertainColorandWhite(thresholded, blobColor);

                           var shouldFlip = shouldFlipThresholdedBitmap(thresholded, blobColor);

                           if (shouldFlip)
                           {
                               thresholded = thresholded.Rotate(180);
                               rotatedGradientBlobBitmap = rotatedGradientBlobBitmap.Rotate(180);
                               rotatedRealBitmapRef = rotatedRealBitmapRef.Rotate(180);
                           }

                           var orientedBitmap = NormalizeBitmapSize(thresholded);
                           var orientedGradientBitmap = NormalizeBitmapSize(rotatedGradientBlobBitmap);
                           var orientedRealBitmap = NormalizeBitmapSize(rotatedRealBitmapRef);

                           ApplyBlobMaskToOtherBitmaps(orientedBitmap, orientedGradientBitmap, orientedRealBitmap, blobColor);

                           string fileName1 = saveDirectory + "\\croppedBlob" + blobNumber + ".png";

                           ExtensionMethods.Save(orientedRealBitmap, fileName1);
                       }
                   }
               }
               }

               return writeableBitmap;
        }
Exemple #20
-1
      void Run()
      {
         try
         {
            _cameraCapture = new VideoCapture();
         }
         catch (Exception e)
         {
            MessageBox.Show(e.Message);
            return;
         }

         _fgDetector = new Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG2();
         _blobDetector = new CvBlobDetector();
         _tracker = new CvTracks();

         Application.Idle += ProcessFrame;
      }