Пример #1
0
        private void Button1_Click(object sender, EventArgs e)
        {
            if (!IsRunning)
            {
                if (capture.CaptureSource == VideoCapture.CaptureModuleType.Camera && capture.IsOpened)
                {
                    tokenSource = new CancellationTokenSource();

                    currentFrame          = new Image <Bgr, byte>(capture.Width, capture.Height);
                    processedFrame        = new Image <Bgr, byte>(capture.Width, capture.Height);
                    processedResizedFrame = new Image <Gray, byte>(capture.Width / 2, capture.Height / 2);
                    //imagesQueue = new ConcurrentQueue<Image<Bgr, byte>>();
                    //imagesCircQueue = new CircularQueue<Image<Bgr, byte>>(10);
                    imagesCircQ = new CircularImagesQueue(10, new Size(capture.Width, capture.Height));

                    oImageUtils = new ImageUtils();


                    capture.Start();

                    darkImage = SaveDarkFieldImage("darkfield.bmp");
                    oImageUtils.SetDarkFieldImage(darkImage);
                    swGlobal = Stopwatch.StartNew();
                    StartImageProcessing();
                    button1.Text = "Stop";
                    IsRunning    = true;

                    AlertService.Init();
                    eyeWatcher = new EyeWatcher(ref eyeRatios, 1000);
                    eyeWatcher.evBlinkDetected += OnReceivedMessage;
                    AlertService.evAlert       += OnReceivedMessage;
                }
                else
                {
                    IsRunning = false;
                    OnReceivedMessage("Camera was not initialized", EventArgs.Empty);
                    button1.Text = "Start";
                    capture.Stop();
                    AlertService.CloseAlertService();
                    if (eyeWatcher != null)
                    {
                        eyeWatcher.CloseEyeWatcher();
                    }
                }
            }
            else
            {
                tokenSource.Cancel();
                IsRunning    = false;
                button1.Text = "Start";
                capture.Stop();
                AlertService.CloseAlertService();
                eyeWatcher.CloseEyeWatcher();
            }
        }
Пример #2
0
        private void EmguFaceDetector(string path)
        {
            if (capture == null)
            {
                capture = new Emgu.CV.VideoCapture(0);
            }
            capture.ImageGrabbed += Capture_ImageGrabbed;
            capture.Start();

            emguFaceClassifier = new Emgu.CV.CascadeClassifier(@"./haarcascade/haarcascade_frontalface_alt.xml");
        }
Пример #3
0
 private void StartCaptureButton(object sender, EventArgs e)
 {
     start_capture.Enabled = false;
     stop_capture.Enabled  = true;
     if (capture == null)
     {
         capture = new Emgu.CV.VideoCapture(0);
     }
     capture.ImageGrabbed += ImageReceived;
     capture.Start();
 }
Пример #4
0
 private void toolStripButton6_Click(object sender, EventArgs e)
 {
     if (VideoState == VideoState.Running)
     {
         capture.Pause();
         VideoState = VideoState.Paused;
     }
     else
     {
         capture.Start();
         VideoState = VideoState.Running;
     }
     UpdateUI();
 }
Пример #5
0
        private void OpenFile(string fileName)
        {
            Mat imgOriginal = null;

            try
            {
                imgOriginal = new Mat(fileName, ImreadModes.Color);
            }
            catch (Exception ex)
            {
                capture = new VideoCapture(fileName);
                if (capture == null)
                {
                    //lblTrackedColor.Text = "unable to open image, error: " + ex.Message;
                    return;
                }

                MaxFrames = Convert.ToInt32(capture.GetCaptureProperty(CapProp.FrameCount)) - 1;
                ActFrame  = 0;

                capture.ImageGrabbed += Capture_ImageGrabbed;
                capture.Start();
                VideoState = VideoState.Running;
            }

            if (imgOriginal == null)
            {
                if (capture == null)
                {
                    //lblTrackedColor.Text = "unable to open image";
                    return;
                }
                //imgOriginal = capture.QueryFrame();
            }
            else
            {
                ProcessFrame(imgOriginal);
            }

            UpdateUI();
        }
Пример #6
0
        private async Task CaptureCamera(CancellationToken token)
        {
            if (capture == null)
            {
                capture = new OpenCvSharp.VideoCapture(CaptureDevice.DShow);
            }

            capture.Open(0);

            m_capture.Start();

            if (capture.IsOpened())
            //  if(m_capture.IsOpened)
            {
                while (!token.IsCancellationRequested)
                {
                    using MemoryStream memoryStream = capture.RetrieveMat().Flip(FlipMode.Y).ToMemoryStream();
                    //  using MemoryStream memoryStream = m_capture.QueryFrame()..RetrieveMat().Flip(FlipMode.Y).ToMemoryStream();


                    await Application.Current.Dispatcher.InvokeAsync(() =>
                    {
                        var imageSource = new BitmapImage();

                        imageSource.BeginInit();
                        imageSource.CacheOption  = BitmapCacheOption.OnLoad;
                        imageSource.StreamSource = memoryStream;
                        imageSource.EndInit();

                        OpenCVSharpImageSource.Source = imageSource;
                    });

                    var bitmapImage = new Bitmap(memoryStream);

                    await ParseWebCamFrame(bitmapImage, token);
                }

                capture.Release();
            }
        }
Пример #7
0
        private void button1_Click(object sender, EventArgs e)
        {
            if (_capture != null)
            {
                if (_captureInProgress)
                {  //stop the capture
                    button1.Text   = "Stop Face Capture";
                    timer1.Enabled = true;
                    timer1.Start();
                    _capture.Pause();
                }
                else
                {
                    //start the capture
                    timer1.Enabled = false;
                    button1.Text   = "Start Face Capture";
                    timer1.Stop();
                    _capture.Start();
                }

                _captureInProgress = !_captureInProgress;
            }
        }
Пример #8
0
      /*
      public void TestGpuVibe()
      {
         int warmUpFrames = 20;

         GpuVibe<Gray> vibe = null;
         Image<Gray, Byte> mask = null;
         using (ImageViewer viewer = new ImageViewer()) //create an image viewer
         using (Capture capture = new Capture()) //create a camera captue
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {  
               //run this until application closed (close button click on image viewer)
               
               using(Image<Bgr, byte> frame = capture.RetrieveBgrFrame(0))
               using (CudaImage<Bgr, byte> gpuFrame = new CudaImage<Bgr, byte>(frame))
               using (CudaImage<Gray, Byte> gpuGray = gpuFrame.Convert<Gray, Byte>())
               {
                  if (warmUpFrames > 0)
                  {
                     warmUpFrames--;
                     return;
                  }
                  
                  if (vibe == null)
                  {
                     vibe = new GpuVibe<Gray>(1234567, gpuGray, null);
                     return;
                  }
                  else
                  {
                     vibe.Apply(gpuGray, null);
                     if (mask == null)
                        mask = new Image<Gray, byte>(vibe.ForgroundMask.Size);

                     vibe.ForgroundMask.Download(mask);
                     viewer.Image = frame.ConcateHorizontal(mask.Convert<Bgr, Byte>()); //draw the image obtained from camera

                  }
               }
            };
            capture.Start();
            viewer.ShowDialog(); //show the image viewer
         }
      }

      public void TestGpuBackgroundModel()
      {
         int warmUpFrames = 20;
         int totalFrames = 0;
         
         //CudaBackgroundSubtractorMOG2<Bgr>  bgModel = null;
         //CudaBackgroundSubtractorMOG<Bgr> bgModel = null;
         CudaBackgroundSubtractorGMG<Bgr> bgModel = null;
         //CudaBackgroundSubtractorFGD<Bgr> bgModel = null;

         Image<Gray, Byte> mask = null;
         using (ImageViewer viewer = new ImageViewer()) //create an image viewer
         using (Capture capture = new Capture()) //create a camera captue
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {
               //run this until application closed (close button click on image viewer)
               totalFrames++;

               if (viewer != null && !viewer.IsDisposed)
               {
                  if (viewer.InvokeRequired)
                  {
                     viewer.Invoke((Action)delegate { viewer.Text = String.Format("Processing {0}th frame.", totalFrames); });
                  }
                  else
                  {
                     viewer.Text = String.Format("Processing {0}th frame.", totalFrames); 
                  }
               }
               
               using (Image<Bgr, byte> frame = capture.RetrieveBgrFrame(0))
               using (CudaImage<Bgr, byte> gpuFrame = new CudaImage<Bgr, byte>(frame))
               {
                  if (warmUpFrames > 0)
                  {
                     warmUpFrames--;
                     return;
                  }

                  if (bgModel == null)
                  {
                     //bgModel = new CudaBackgroundSubtractorMOG2<Bgr>(500, 16, true);
                     //bgModel = new CudaBackgroundSubtractorMOG<Bgr>(200, 5, 0.7, 0);
                     bgModel = new CudaBackgroundSubtractorGMG<Bgr>(120, 0.8);
                     bgModel.Apply(gpuFrame, -1.0f, null);
                     //bgModel = new CudaBackgroundSubtractorFGD<Bgr>(128, 15, 25, 64, 25, 40, true, 1, 0.1f, 0.005f, 0.1f, 2.0f, 0.9f, 15.0f);
                     //bgModel.Apply(gpuFrame, -1.0f);
                     
                     return;
                  }
                  else
                  {
                     bgModel.Apply(gpuFrame, -1.0f, null);
                     //bgModel.Apply(gpuFrame, -1.0f);
                     
                     if (mask == null)
                        mask = new Image<Gray, byte>(bgModel.ForgroundMask.Size);

                     bgModel.ForgroundMask.Download(mask);
                     Image<Bgr, Byte> result = frame.ConcateHorizontal(mask.Convert<Bgr, Byte>());
                     if (viewer != null && !viewer.IsDisposed)
                     {
                        if (viewer.InvokeRequired)
                        {
                           viewer.Invoke((Action)delegate { viewer.Image = result; });
                        }
                        else
                        {
                           viewer.Image = result; //draw the image obtained from camera
                        }
                     }

                  }
               }
            };
            capture.Start();
            viewer.ShowDialog(); //show the image viewer
         }
      }*/

      public void CameraTest()
      {
         using (ImageViewer viewer = new ImageViewer()) //create an image viewer
         using (VideoCapture capture = new VideoCapture()) //create a camera captue
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {  //run this until application closed (close button click on image viewer)
               Mat m = new Mat();
               capture.Retrieve(m);
               viewer.Image = m; //draw the image obtained from camera
            };
            capture.Start();
            viewer.ShowDialog(); //show the image viewer
         }
      }
Пример #9
0
      /*
      public void TestCodeBookBGModel()
      {
         using (Capture capture = new Capture())
         using (BGCodeBookModel<Bgr> model = new BGCodeBookModel<Bgr>())
         {
            ImageViewer viewer = new ImageViewer();
            Image<Gray, byte> fgMask = capture.QueryFrame().Convert<Gray, Byte>();

            Application.Idle += delegate(Object sender, EventArgs args)
            {
               Mat frame = capture.QueryFrame();
               model.Apply(frame);
               viewer.Image = model.ForegroundMask; 
            };
            viewer.ShowDialog();
         }
      }

      public void TestBlobTracking()
      {
         MCvFGDStatModelParams fgparam = new MCvFGDStatModelParams();
         fgparam.alpha1 = 0.1f;
         fgparam.alpha2 = 0.005f;
         fgparam.alpha3 = 0.1f;
         fgparam.delta = 2;
         fgparam.is_obj_without_holes = 1;
         fgparam.Lc = 32;
         fgparam.Lcc = 16;
         fgparam.minArea = 15;
         fgparam.N1c = 15;
         fgparam.N1cc = 25;
         fgparam.N2c = 25;
         fgparam.N2cc = 35;
         fgparam.perform_morphing = 0;
         fgparam.T = 0.9f;

         BlobTrackerAutoParam<Bgr> param = new BlobTrackerAutoParam<Bgr>();
         param.BlobDetector = new BlobDetector(Emgu.CV.CvEnum.BlobDetectorType.CC);
         param.FGDetector = new FGDetector<Bgr>(Emgu.CV.CvEnum.ForgroundDetectorType.Fgd, fgparam);
         param.BlobTracker = new BlobTracker(Emgu.CV.CvEnum.BLOBTRACKER_TYPE.MSFG);
         param.FGTrainFrames = 10;
         BlobTrackerAuto<Bgr> tracker = new BlobTrackerAuto<Bgr>(param);

         //MCvFont font = new MCvFont(Emgu.CV.CvEnum.FontFace.HersheySimplex, 1.0, 1.0);

         using(ImageViewer viewer = new ImageViewer())
         using (Capture capture = new Capture())
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {
               tracker.Process(capture.RetrieveBgrFrame());
               
               //Image<Bgr, Byte> img = capture.RetrieveBgrFrame();

               Image<Bgr, Byte> img = tracker.ForegroundMask.Convert<Bgr, Byte>();
               foreach (MCvBlob blob in tracker)
               {
                  img.Draw((Rectangle)blob, new Bgr(255.0, 255.0, 255.0), 2);
                  img.Draw(blob.ID.ToString(), Point.Round(blob.Center), CvEnum.FontFace.HersheySimplex, 1.0, new Bgr(255.0, 255.0, 255.0));
               }
               viewer.Image = img;
            };
            capture.Start();
            viewer.ShowDialog();
         }
      }*/
      
      public void TestCvBlob()
      {
         //MCvFont font = new MCvFont(Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, 0.5);
         using (CvTracks tracks = new CvTracks())
         using (ImageViewer viewer = new ImageViewer())
         using (VideoCapture capture = new VideoCapture())
         using (Mat fgMask = new Mat())
         {
            //BGStatModel<Bgr> bgModel = new BGStatModel<Bgr>(capture.QueryFrame(), Emgu.CV.CvEnum.BG_STAT_TYPE.GAUSSIAN_BG_MODEL);
            BackgroundSubtractorMOG2 bgModel = new BackgroundSubtractorMOG2(0, 0, true);
            //BackgroundSubstractorMOG bgModel = new BackgroundSubstractorMOG(0, 0, 0, 0);

            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {
               Mat frame = new Mat();
               capture.Retrieve(frame);
               bgModel.Apply(frame, fgMask);

               using (CvBlobDetector detector = new CvBlobDetector())
               using (CvBlobs blobs = new CvBlobs())
               {
                  detector.Detect(fgMask.ToImage<Gray, Byte>(), blobs);
                  blobs.FilterByArea(100, int.MaxValue);

                  tracks.Update(blobs, 20.0, 10, 0);

                  Image<Bgr, Byte> result = new Image<Bgr, byte>(frame.Size);

                  using (Image<Gray, Byte> blobMask = detector.DrawBlobsMask(blobs))
                  {
                     frame.CopyTo(result, blobMask);
                  }
                  //CvInvoke.cvCopy(frame, result, blobMask);

                  foreach (KeyValuePair<uint, CvTrack> pair in tracks)
                  {
                     if (pair.Value.Inactive == 0) //only draw the active tracks.
                     {
                        CvBlob b = blobs[pair.Value.BlobLabel];
                        Bgr color = detector.MeanColor(b, frame.ToImage<Bgr, Byte>());
                        result.Draw(pair.Key.ToString(), pair.Value.BoundingBox.Location, CvEnum.FontFace.HersheySimplex, 0.5, color);
                        result.Draw(pair.Value.BoundingBox, color, 2);
                        Point[] contour = b.GetContour();
                        result.Draw(contour, new Bgr(0, 0, 255), 1);
                     }
                  }

                  viewer.Image = frame.ToImage<Bgr, Byte>().ConcateVertical(fgMask.ToImage<Bgr, Byte>().ConcateHorizontal(result));
               }
            };
            capture.Start();
            viewer.ShowDialog();
         }
      }
Пример #10
0
      public void TestFileCapturePause()
      {
         
         int totalFrames1 = 0;
         String fileName = EmguAssert.GetFile("tree.avi");
         String fileName2 = fileName.Replace("tree.avi", "tree2.avi");
         File.Copy(fileName, fileName2, true);

         VideoCapture capture1 = new VideoCapture(fileName);
        
         //capture one will continute capturing all the frames.
         EventHandler captureHandle1 = delegate
         {
            Mat img = new Mat();
            capture1.Retrieve(img);
            totalFrames1++;
            Trace.WriteLine(String.Format("capture 1 frame {0}: {1}", totalFrames1, DateTime.Now.ToString()));
         };
         capture1.ImageGrabbed += captureHandle1;
         capture1.Start();

         System.Threading.Thread.Sleep(2);



         int totalFrames2 = 0;
         VideoCapture capture2 = new VideoCapture(fileName2);
         int counter = 0;
         //capture 2 will capture 2 frames, pause for 1 seconds, then continute;
         EventHandler captureHandle = delegate
         {
            counter++;
            totalFrames2++;

            bool needPause = (counter >= 2);
            if (needPause)
            {
               capture2.Pause();
               counter = 0;
            }

            Mat img = new Mat();
             capture2.Retrieve(img);
            Trace.WriteLine(String.Format("capture 2 frame {0}: {1}", totalFrames2, DateTime.Now.ToString()));

            if (needPause)
            {
               System.Threading.ThreadPool.QueueUserWorkItem(delegate
                  {
                     Trace.WriteLine("Sleep for 1 sec");
                     System.Threading.Thread.Sleep(1000);
                     capture2.Start();
                  });
            }

         };

         capture2.ImageGrabbed += captureHandle;
         capture2.Start();


         //int totalFrames = 69;
         Stopwatch s = Stopwatch.StartNew();
         while (! (totalFrames1 == totalFrames2))
         {
            System.Threading.Thread.Sleep(1000);

            if (s.ElapsedMilliseconds > 120 * 1000)
            {
               EmguAssert.IsTrue(false, "Unable to finished reading frames in 2 mins");
               break;
            }
         }
         capture1.Dispose();
         capture2.Dispose();
      }