Start() public method

Start the grab process in a sperate thread. Once started, use the ImageGrabbed event handler and RetrieveGrayFrame/RetrieveBgrFrame to obtain the images.
public Start ( ) : void
return void
Exemplo n.º 1
1
      /*
      public void TestCodeBookBGModel()
      {
         using (Capture capture = new Capture())
         using (BGCodeBookModel<Bgr> model = new BGCodeBookModel<Bgr>())
         {
            ImageViewer viewer = new ImageViewer();
            Image<Gray, byte> fgMask = capture.QueryFrame().Convert<Gray, Byte>();

            Application.Idle += delegate(Object sender, EventArgs args)
            {
               Mat frame = capture.QueryFrame();
               model.Apply(frame);
               viewer.Image = model.ForegroundMask; 
            };
            viewer.ShowDialog();
         }
      }

      public void TestBlobTracking()
      {
         MCvFGDStatModelParams fgparam = new MCvFGDStatModelParams();
         fgparam.alpha1 = 0.1f;
         fgparam.alpha2 = 0.005f;
         fgparam.alpha3 = 0.1f;
         fgparam.delta = 2;
         fgparam.is_obj_without_holes = 1;
         fgparam.Lc = 32;
         fgparam.Lcc = 16;
         fgparam.minArea = 15;
         fgparam.N1c = 15;
         fgparam.N1cc = 25;
         fgparam.N2c = 25;
         fgparam.N2cc = 35;
         fgparam.perform_morphing = 0;
         fgparam.T = 0.9f;

         BlobTrackerAutoParam<Bgr> param = new BlobTrackerAutoParam<Bgr>();
         param.BlobDetector = new BlobDetector(Emgu.CV.CvEnum.BlobDetectorType.CC);
         param.FGDetector = new FGDetector<Bgr>(Emgu.CV.CvEnum.ForgroundDetectorType.Fgd, fgparam);
         param.BlobTracker = new BlobTracker(Emgu.CV.CvEnum.BLOBTRACKER_TYPE.MSFG);
         param.FGTrainFrames = 10;
         BlobTrackerAuto<Bgr> tracker = new BlobTrackerAuto<Bgr>(param);

         //MCvFont font = new MCvFont(Emgu.CV.CvEnum.FontFace.HersheySimplex, 1.0, 1.0);

         using(ImageViewer viewer = new ImageViewer())
         using (Capture capture = new Capture())
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {
               tracker.Process(capture.RetrieveBgrFrame());
               
               //Image<Bgr, Byte> img = capture.RetrieveBgrFrame();

               Image<Bgr, Byte> img = tracker.ForegroundMask.Convert<Bgr, Byte>();
               foreach (MCvBlob blob in tracker)
               {
                  img.Draw((Rectangle)blob, new Bgr(255.0, 255.0, 255.0), 2);
                  img.Draw(blob.ID.ToString(), Point.Round(blob.Center), CvEnum.FontFace.HersheySimplex, 1.0, new Bgr(255.0, 255.0, 255.0));
               }
               viewer.Image = img;
            };
            capture.Start();
            viewer.ShowDialog();
         }
      }*/
      
      public void TestCvBlob()
      {
         //MCvFont font = new MCvFont(Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, 0.5);
         using (CvTracks tracks = new CvTracks())
         using (ImageViewer viewer = new ImageViewer())
         using (Capture capture = new Capture())
         using (Mat fgMask = new Mat())
         {
            //BGStatModel<Bgr> bgModel = new BGStatModel<Bgr>(capture.QueryFrame(), Emgu.CV.CvEnum.BG_STAT_TYPE.GAUSSIAN_BG_MODEL);
            BackgroundSubtractorMOG2 bgModel = new BackgroundSubtractorMOG2(0, 0, true);
            //BackgroundSubstractorMOG bgModel = new BackgroundSubstractorMOG(0, 0, 0, 0);

            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {
               Mat frame = new Mat();
               capture.Retrieve(frame);
               bgModel.Apply(frame, fgMask);

               using (CvBlobDetector detector = new CvBlobDetector())
               using (CvBlobs blobs = new CvBlobs())
               {
                  detector.Detect(fgMask.ToImage<Gray, Byte>(), blobs);
                  blobs.FilterByArea(100, int.MaxValue);

                  tracks.Update(blobs, 20.0, 10, 0);

                  Image<Bgr, Byte> result = new Image<Bgr, byte>(frame.Size);

                  using (Image<Gray, Byte> blobMask = detector.DrawBlobsMask(blobs))
                  {
                     frame.CopyTo(result, blobMask);
                  }
                  //CvInvoke.cvCopy(frame, result, blobMask);

                  foreach (KeyValuePair<uint, CvTrack> pair in tracks)
                  {
                     if (pair.Value.Inactive == 0) //only draw the active tracks.
                     {
                        CvBlob b = blobs[pair.Value.BlobLabel];
                        Bgr color = detector.MeanColor(b, frame.ToImage<Bgr, Byte>());
                        result.Draw(pair.Key.ToString(), pair.Value.BoundingBox.Location, CvEnum.FontFace.HersheySimplex, 0.5, color);
                        result.Draw(pair.Value.BoundingBox, color, 2);
                        Point[] contour = b.GetContour();
                        result.Draw(contour, new Bgr(0, 0, 255), 1);
                     }
                  }

                  viewer.Image = frame.ToImage<Bgr, Byte>().ConcateVertical(fgMask.ToImage<Bgr, Byte>().ConcateHorizontal(result));
               }
            };
            capture.Start();
            viewer.ShowDialog();
         }
      }
        private void YokalmaSistemi_Load(object sender, EventArgs e)
        {
            Capture capture1 = new Capture();

            capture1.Start();
            if (capture1 == null)
            {
                MessageBox.Show("Kamera Açılamadı");
            }
            else
            {
                capture1.ImageGrabbed += (a, b) =>
                {
                    var            image      = capture1.RetrieveBgrFrame();
                    var            grayimage1 = image.Convert <Gray, byte>();
                    HaarCascade    haaryuz    = new HaarCascade("haarcascade_frontalface_default.xml");
                    MCvAvgComp[][] Yuzler     = grayimage1.DetectHaarCascade(haaryuz, 1.2, 5, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(15, 15));
                    MCvFont        font       = new MCvFont(FONT.CV_FONT_HERSHEY_COMPLEX, 0.5, 0.5);
                    foreach (MCvAvgComp yuz in Yuzler[0])
                    {
                        var sadeyuz = grayimage1.Copy(yuz.rect).Convert <Gray, byte>().Resize(100, 100, INTER.CV_INTER_CUBIC);
                        pic_kucuk_res.Image = sadeyuz.ToBitmap();
                        if (train.IsTrained)
                        {
                            name = train.Recognise(sadeyuz);
                            int match_value = (int)train.Get_Eigen_Distance;
                            image.Draw(name + " ", ref font, new Point(yuz.rect.X - 2, yuz.rect.Y - 2), new Bgr(Color.SteelBlue));
                        }
                        image.Draw(yuz.rect, new Bgr(Color.Purple), 2);
                        //  textBox1.Text = name;
                    }
                    pic_kamera.Image = image.ToBitmap();
                };
            }
        }
Exemplo n.º 3
0
        public Camera(int num)
        {
            lens = new Capture(num);
            lens.ImageGrabbed += this.Process;

            lens.Start();
        }
Exemplo n.º 4
0
        public RileeCapture()
        {
            InitializeComponent();

            btnDrawMasterImage.Enabled = false;

            //initialization for recognition boxes
            _limgMasters.Add(imbMaster1);
            _limgMasters.Add(imbMaster2);
            _limgMasters.Add(imbMaster3);
            _limgMasters.Add(imbMaster4);
            _limgMasters.Add(imbMaster5);

            try
            {
                _capture = new Capture();
                _capture.ImageGrabbed += ProcessFrame;
                _capture.Start();
                tslStatus.Text = "Capture started";
                //flip horizontal to natural
                //if ((_capture != null)&&(!_capture.FlipHorizontal)) _capture.FlipHorizontal = true;
            }
            catch (NullReferenceException ex)
            {
                tslStatus.Text = "Capture initialization failed...";
                MessageBox.Show(ex.Message);
            }
        }
Exemplo n.º 5
0
        public Admin()
        {
            InitializeComponent();

            face = new HaarCascade("haarcascade_frontalface_default.xml");

            Loaded += (s, e) =>
            {
                this.DataContext = CommonData.PicturesVM;
                if (grabber == null)
                {
                    CommonData.LoadSavedData();
                    //check how many faces we already have
                    _countFaces = CommonData.PicturesVM.Pictures.Count;

                    grabber = new Capture();
                    grabber.QueryFrame();
                    grabber.Start();
                }
                else
                {
                    grabber.Start();
                }

            };
            Unloaded += (s, e) =>
            {
                grabber.Stop();
            };

            CompositionTarget.Rendering += CompositionTarget_Rendering;
        }
Exemplo n.º 6
0
 /// <summary>
 /// loading of the form
 /// </summary>
 public Form1()
 {
     try
     {
         IC = new IntrinsicCameraParameters();
     }
     catch (Exception ex)
     {
         MessageBox.Show("Error: " + ex.Message);
     }
     InitializeComponent();
     //fill line colour array
     Random R = new Random();
     for (int i = 0; i < line_colour_array.Length; i++)
     {
         line_colour_array[i] = new Bgr(R.Next(0, 255), R.Next(0, 255), R.Next(0, 255));
     }
     //set up cature as normal
     try
     {
         _Capture = new Capture();
         _Capture.ImageGrabbed += new Emgu.CV.Capture.GrabEventHandler(_Capture_ImageGrabbed);
         _Capture.Start();
     }
     catch (Exception ex)
     {
         MessageBox.Show("Error: " + ex.Message);
     }
 }
Exemplo n.º 7
0
        public OgrKayit()
        {
            InitializeComponent();
            updateComboBoxClass();
            Capture capture = new Capture();

            capture.Start();
            if (capture == null)
            {
                MessageBox.Show("Kamera Açılamadı");
            }
            else
            {
                capture.ImageGrabbed += (a, b) =>
                {
                    var            image     = capture.RetrieveBgrFrame();
                    var            grayimage = image.Convert <Gray, byte>();
                    HaarCascade    haaryuz   = new HaarCascade("haarcascade_frontalface_default.xml");
                    MCvAvgComp[][] Yuzler    = grayimage.DetectHaarCascade(haaryuz, 1.2, 5, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(15, 15));
                    MCvFont        font      = new MCvFont(FONT.CV_FONT_HERSHEY_COMPLEX, 0.5, 0.5);
                    foreach (MCvAvgComp yuz in Yuzler[0])
                    {
                        var sadeyuz = grayimage.Copy(yuz.rect).Convert <Gray, byte>().Resize(100, 100, INTER.CV_INTER_CUBIC);
                        image.Draw(yuz.rect, new Bgr(Color.Red), 2);
                        pic_kucuk_res.Image = sadeyuz.ToBitmap();
                    }
                    pic_box_kamera.Image = image.ToBitmap();
                };
            }
        }
Exemplo n.º 8
0
        public AIRecognition()
        {
            InitializeComponent();

            _faceClassifier = new CascadeClassifier("haarcascade_frontalface_default.xml");

            Loaded += (s, e) =>
            {
                _vmodel.Pictures.Clear();
                _vmodel.PersonRecognized = 0;
                this.DataContext = _vmodel;

                if (grabber == null)
                {
                    CommonData.LoadSavedData();
                    //check how many faces we already have
                    _countFaces = CommonData.PicturesVM.Pictures.Count;

                    grabber = new Capture();
                    grabber.QueryFrame();
                    grabber.Start();
                }
                else
                {
                    grabber.Start();
                }

            };
            Unloaded += (s, e) =>
            {
                grabber.Stop();
            };

            CompositionTarget.Rendering += CompositionTarget_Rendering;
        }
Exemplo n.º 9
0
 private void Window_Loaded(object sender, RoutedEventArgs e)
 {
     Capture capture = new Capture(7);
     capture.Start();
     ComponentDispatcher.ThreadIdle += (o, arg) =>
         {
             var img = capture.QueryFrame();
             Emgu.CV.Contour<Bgr> con = new Contour<Bgr>(new MemStorage());
             Display.Source = BitmapSourceConvert.ToBitmapSource(img);
         };
 }
Exemplo n.º 10
0
 private void button1_Click(object sender, EventArgs e)
 {
     button1.Enabled = false;
     button2.Enabled = true;
     button3.Enabled = true;
     if (captura == null)
     {
         captura = new Emgu.CV.Capture(0);
     }
     captura.ImageGrabbed += activarWebCam;
     captura.Start();
 }
 public void StartCamera(int cameraIndex)
 {
     if (isCapturing)
     {
         return;
     }
     CvInvoke.UseOpenCL = false;
     camera = new Capture(cameraIndex);
     camera.ImageGrabbed += CapOnImageGrabbed;
     camera.Start();
     isCapturing = true;
 }
Exemplo n.º 12
0
 public Form1()
 {
     InitializeComponent();
     try
     {
         _capture = new Capture();
         _capture.ImageGrabbed += ProcessFrame;
         _capture.Start();
         processing = false;
     }
     catch (NullReferenceException excpt)
     {
         MessageBox.Show(excpt.Message);
     }
 }
Exemplo n.º 13
0
        int _width; //width of chessboard no. squares in width - 1

        #endregion Fields

        #region Constructors

        public Form1()
        {
            InitializeComponent();

            CvInvoke.UseOpenCL = false;
            //set up cature as normal
            try
            {
                _capture = new Capture();
                _capture.ImageGrabbed += ProcessFrame;
                _capture.Start();
            }
            catch (NullReferenceException excpt)
            {
                MessageBox.Show(excpt.Message);
            }
        }
Exemplo n.º 14
0
        public static void StartCapture()
        {
            CvInvoke.cvNamedWindow("Capture");
            try {
                _capture = new Capture (1);

            } catch (NullReferenceException excpt) {
                Console.Out.WriteLine (excpt.Message);
                _capture = new Capture (0);

            } finally {
                watch = Stopwatch.StartNew ();
                Application.Idle += ProcessFrame;
                _capture.Start ();

            }
        }
Exemplo n.º 15
0
        public bool GetVideoData(System.Windows.Forms.PictureBox picturebox1, System.Windows.Forms.PictureBox picturebox2)
        {
            CameraHasData = false;
            frm.SetText(frm.Controls["textBoxImageY"], "0");
            frm.SetText(frm.Controls["textBoxDeg"], "0");
            frm.SetText(frm.Controls["textBoxImageX"], "0");

            capture.Start();
            int cappturecounter = 1;

            while (!CameraHasData || cappturecounter <= 20)
            {
                GetCameraXY(picturebox1, picturebox2);
                cappturecounter++;
            }
            capture.Stop();
            return(true);
        }
Exemplo n.º 16
0
 public void InitVideoCapture(string path)
 {
     try
     {
         m_FrameMat = new Mat();
         m_VideoCaptureFilename = path;
         m_VideoCaptureInterface = null;
         m_VideoCaptureInterface = new Capture(m_VideoCaptureFilename);
         m_VideoCaptureInterface.SetCaptureProperty(CapProp.FrameHeight, 640);
         m_VideoCaptureInterface.SetCaptureProperty(CapProp.FrameWidth, 360);
         m_VideoCaptureInterface.SetCaptureProperty(CapProp.Fps, 5);
         m_VideoCaptureInterface.ImageGrabbed += VideoCaptureInterface_ImageGrabbed;
         m_VideoCaptureFrameCount = (int)m_VideoCaptureInterface.GetCaptureProperty(CapProp.FrameCount);
         m_VideoCaptureInterface.Start();
     }
     catch (Exception e)
     {
     }
 }
Exemplo n.º 17
0
        private void StartCapture()
        {
            try
            {
                _cameraCapture = new Capture();
            }
            catch (Exception e)
            {
                MessageBox.Show(e.Message);
                return;
            }
            if (_cameraCapture != null)
            {
                _cameraCapture.ImageGrabbed += _cameraCapture_ImageGrabbed;
                _cameraCapture.Start();

                startBtn.Text = "Stop";
            }
        }
Exemplo n.º 18
0
        private void buttonStart_Click(object sender, EventArgs e)
        {
            try
            {
                capture = new Capture();
                capture.FlipHorizontal = true;

                timer = new DispatcherTimer();
                //Event for processing each frame in 20 ms interval
                timer.Tick += ProcessFrame;
                timer.Interval = new TimeSpan(0, 0, 0, 0, 20);
                timer.Start();

                capture.Start();
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }

            buttonStart.Enabled = false;
            buttonStop.Enabled = true;
        }
Exemplo n.º 19
0
        public SalmonCounter()
        {
            InitializeComponent();
            _capture = new Capture(videoOne);

            counter = new Counter(_capture.Width);

            bImage = new BlobImage();
            fgDetector = new ForegroundDetector(bImage);
            sTracker = new SalmonTracker(bImage, counter);

            watch = new Stopwatch();
            time = new TimeSpan();

            FPS = (int)_capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.Fps);
            frameCount = (int)_capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameCount);
            pictureBox1.Width = _capture.Width;
            pictureBox1.Height = _capture.Height;

            show.Width = _capture.Width;
            show.Height = _capture.Height;

            //msec between frames
            msec = (int)(1000 / FPS);

            //set the event handler
            _capture.ImageGrabbed += grabImage;
            _capture.Start();
            watch.Start();
            _frame = new Mat();

            //Start foregroundSegmenter tread and salmon tracker thread
            backgroundSubtractorThread = new Thread(fgDetector.detect);
            backgroundSubtractorThread.Start();
            sTrackerThread = new Thread(sTracker.updateSalmons);
            sTrackerThread.Start();
        }
        public YokalmaSistemi()
        {
            InitializeComponent();
            list_view_var_olanlar.Columns.Add("Adı Soyadı", 100);
            ComboBoxUpdate();
            Capture capture = new Capture();

            capture.Start();
            if (capture == null)
            {
                MessageBox.Show("Kamera Açılamadı");
            }
            else
            {
                capture.ImageGrabbed += (a, b) =>
                {
                    var            image     = capture.RetrieveBgrFrame();
                    var            grayimage = image.Convert <Gray, byte>();
                    HaarCascade    haaryuz   = new HaarCascade("haarcascade_frontalface_default.xml");
                    MCvAvgComp[][] Yuzler    = grayimage.DetectHaarCascade(haaryuz, 1.2, 5, HAAR_DETECTION_TYPE.DO_CANNY_PRUNING, new Size(15, 15));
                    MCvFont        font      = new MCvFont(FONT.CV_FONT_HERSHEY_COMPLEX, 0.5, 0.5);
                    foreach (MCvAvgComp yuz in Yuzler[0])
                    {
                        var sadeyuz = grayimage.Copy(yuz.rect).Convert <Gray, byte>().Resize(100, 100, INTER.CV_INTER_CUBIC);
                        pic_kucuk_res.Image = sadeyuz.ToBitmap();
                        if (train.IsTrained)
                        {
                            name = train.Recognise(sadeyuz);
                            int match_value = (int)train.Get_Eigen_Distance;
                            image.Draw(name + " ", ref font, new Point(yuz.rect.X - 2, yuz.rect.Y - 2), new Bgr(Color.SteelBlue));
                        }
                        image.Draw(yuz.rect, new Bgr(Color.Purple), 2);
                    }
                    pic_kamera.Image = image.ToBitmap();
                };
            }
        }
        public async void StartCapture()
        {
            // Can only access the first camera without CL Eye SDK
            if (_camera.TrackerId == 0 && !_camera.Design)
            {
                _capture = new Capture(_camera.TrackerId);
                _ctsCameraCalibration = new CancellationTokenSource();
                CancellationToken token = _ctsCameraCalibration.Token;

                _capture.Start();
                try
                {
                    // needed to avoid bitmapsource access violation?
                    _captureTask = Task.Run(() =>
                    {
                        while (!token.IsCancellationRequested)
                        {
                            ImageGrabbed();
                        }
                    }, token);
                    await _captureTask;
                }
                catch (OperationCanceledException)
                {

                }
                catch (Exception ex)
                {
                    Console.WriteLine(ex.StackTrace);
                }
                finally
                {
                    _capture.Stop();
                    _capture.Dispose();
                }
            }
        }
Exemplo n.º 22
0
 private void useWebCam_Click(object sender, EventArgs e)
 {
     //set up cature as normal
     useWebCam= true;
     useNao = false;
     try
     {
         _Capture = new Capture();
         _Capture.ImageGrabbed += new Emgu.CV.Capture.GrabEventHandler(_Capture_ImageGrabbed);
         _Capture.Start();
     }
     catch (Exception ex)
     {
         MessageBox.Show("Error: " + ex.Message);
     }
     
     useWebCamButton.Enabled = false;
     useNaoButton.Enabled = true;
 }
Exemplo n.º 23
0
        //hardHatToolStripMenuItem_Click ends
        //To start capturing from webcam
        private void startBtn_Click(object sender, EventArgs e)
        {
            Console.Out.WriteLine("Start Webcam...");
            //Initialize camera to the webcam capture

            try
            {
                camera = new Capture(0);
                //camera.FlipHorizontal = true;

                //Start the webcam
                camera.Start();
                camcapture = true;
                //Start capturing
                //Create new thread for capturing images
                webcam = new Thread(captureImageFromWebcam);
                webcam.IsBackground = true;
                webcam.Start();
            }
            catch (Exception ex)
            {
                Console.Out.WriteLine("Error in camera initialization: " + ex.Message);
            }
        }
Exemplo n.º 24
0
        private void button1_Click(object sender, EventArgs e)
        {
            if (_captureInProgress)
            {
                //stop the capture
                button1.Text = "Start";
                camListComboBox.Enabled = true;
                fpsBox.Enabled = true;
                widthBox.Enabled = true;
                heightBox.Enabled = true;
                _capture.Dispose();
                _frames = 0;
                _fps = 0;
                _dFps = 1;
            }
            else
            {
                try
                {
                    _capture = new Capture(_camIndex);
                    _capture.ImageGrabbed += ProcessFrame;
                }
                catch (NullReferenceException excpt)
                {
                    MessageBox.Show(excpt.Message);
                }
                //start the capture
                button1.Text = "Stop";
                camListComboBox.Enabled = false;
                fpsBox.Enabled = false;
                widthBox.Enabled = false;
                heightBox.Enabled = false;
                if (String.IsNullOrWhiteSpace(fpsBox.Text))
                {
                    fpsBox.Text = sfps.ToString();
                    printConsole("FPS set automaticaly: " + fpsBox.Text + "\n");
                }
                else
                {
                    sfps = Convert.ToInt32(fpsBox.Text);
                }
                if (String.IsNullOrWhiteSpace(widthBox.Text) || String.IsNullOrWhiteSpace(heightBox.Text))
                {
                    widthBox.Text = width.ToString();
                    heightBox.Text = height.ToString();
                    printConsole("Frame size set automaticaly: " + widthBox.Text + " x " + heightBox.Text+ "\n");
                }
                else
                {
                    width = Convert.ToInt32(widthBox.Text);
                    height = Convert.ToInt32(heightBox.Text);
                }
                _capture.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.Fps, sfps);
                _capture.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameWidth, width);
                _capture.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameHeight, height);
                _capture.Start();
                _currentTime = DateTime.Now.Ticks;
            }

            _captureInProgress = !_captureInProgress;
        }
Exemplo n.º 25
0
        public void Detect()
        {
            capture = new Capture();
            capture.FlipHorizontal = true;
            capture.Start();

            timer = new DispatcherTimer();
            timer.Tick += ProcessFrame;
            timer.Interval = new TimeSpan(0, 0, 0, 0, 20);
            timer.Start();
        }
Exemplo n.º 26
0
Arquivo: MainForm.cs Projeto: akx/ltag
 private void StartOrStopCapture(bool flag)
 {
     if (flag)
     {
         Util.Dispose(ref _capture);
         _capture = new Capture((int) cameraIndexUpDown.Value);
         _captureProps = new CapturePropertyProxy(_capture);
         _capture.ImageGrabbed += ProcessGrabbedImage;
         _capture.Start();
         SetStatus("Capture started.");
     }
     else
     {
         if(_capture != null) _capture.Stop();
         Util.Dispose(ref _capture);
         SetStatus("Capture stopped.");
     }
     UpdatePropertyGrids();
 }
Exemplo n.º 27
0
 private void Form1_Load(object sender, EventArgs e)
 {
     //serPort = new SerialPort(ComPort);
     //serPort.BaudRate = 9600;
     //serPort.DataBits = 8;
     //serPort.Parity = Parity.None;
     //serPort.StopBits = StopBits.One;
     //serPort.Open();
     //serPort.DataReceived += new SerialDataReceivedEventHandler(serPort_DataReceived);
     _capture = new Capture();
     _capture.ImageGrabbed += Display_Captured;
     _capture.Start();
 }
Exemplo n.º 28
0
        private void recordVideoToolStripMenuItem_Click_1(object sender, EventArgs e)
        {
            //set up filter
            SF.Filter = "Video Files|*.avi;*.mp4;*.mpg";
            //Get information about the video file save location
            if (SF.ShowDialog() == System.Windows.Forms.DialogResult.OK)
            {
                //check to see if capture exists if it does dispose of it
                if (_Capture != null)
                {
                    if (_Capture.GrabProcessState == System.Threading.ThreadState.Running) _Capture.Stop(); //Stop urrent capture if running
                    _Capture.Dispose();//dispose of current capture
                }
                try
                {
                    //record the save location
                    this.Text = "Saving Video: " + SF.FileName; //display the save method and location

                    //set the current video state
                    CurrentState = VideoMethod.Recording;

                    //set up new capture
                    _Capture = new Capture(); //Use the default device
                    _Capture.ImageGrabbed += ProcessFrame; //attach event call to process frames

                    //get/set the capture video information

                    Frame_width = (int)_Capture.GetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_WIDTH);
                    Frame_Height = (int)_Capture.GetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT);

                    FrameRate = 15; //Set the framerate manually as a camera would retun 0 if we use GetCaptureProperty()

                    //Set up a video writer component
                    /*                                        ---USE----
                    /* VideoWriter(string fileName, int compressionCode, int fps, int width, int height, bool isColor)
                     *
                     * Compression code.
                     *      Usually computed using CvInvoke.CV_FOURCC. On windows use -1 to open a codec selection dialog.
                     *      On Linux, use CvInvoke.CV_FOURCC('I', 'Y', 'U', 'V') for default codec for the specific file name.
                     *
                     * Compression code.
                     *      -1: allows the user to choose the codec from a dialog at runtime
                     *       0: creates an uncompressed AVI file (the filename must have a .avi extension)
                     *
                     * isColor.
                     *      true if this is a color video, false otherwise
                     */
                    VW = new VideoWriter(@SF.FileName, -1, (int)FrameRate, Frame_width, Frame_Height, true);

                    //set up the trackerbar
                    UpdateVideo_CNTRL(false);//disable the trackbar

                    //set up the button and images
                    play_pause_BTN1.BackgroundImage = VideoCapture.Properties.Resources.Record;
                    recordstate = false;

                    //Start aquring from the webcam
                    _Capture.Start();

                }
                catch (NullReferenceException excpt)
                {
                    MessageBox.Show(excpt.Message);
                }
            }
        }
Exemplo n.º 29
0
      /*
      public void TestGpuVibe()
      {
         int warmUpFrames = 20;

         GpuVibe<Gray> vibe = null;
         Image<Gray, Byte> mask = null;
         using (ImageViewer viewer = new ImageViewer()) //create an image viewer
         using (Capture capture = new Capture()) //create a camera captue
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {  
               //run this until application closed (close button click on image viewer)
               
               using(Image<Bgr, byte> frame = capture.RetrieveBgrFrame(0))
               using (CudaImage<Bgr, byte> gpuFrame = new CudaImage<Bgr, byte>(frame))
               using (CudaImage<Gray, Byte> gpuGray = gpuFrame.Convert<Gray, Byte>())
               {
                  if (warmUpFrames > 0)
                  {
                     warmUpFrames--;
                     return;
                  }
                  
                  if (vibe == null)
                  {
                     vibe = new GpuVibe<Gray>(1234567, gpuGray, null);
                     return;
                  }
                  else
                  {
                     vibe.Apply(gpuGray, null);
                     if (mask == null)
                        mask = new Image<Gray, byte>(vibe.ForgroundMask.Size);

                     vibe.ForgroundMask.Download(mask);
                     viewer.Image = frame.ConcateHorizontal(mask.Convert<Bgr, Byte>()); //draw the image obtained from camera

                  }
               }
            };
            capture.Start();
            viewer.ShowDialog(); //show the image viewer
         }
      }

      public void TestGpuBackgroundModel()
      {
         int warmUpFrames = 20;
         int totalFrames = 0;
         
         //CudaBackgroundSubtractorMOG2<Bgr>  bgModel = null;
         //CudaBackgroundSubtractorMOG<Bgr> bgModel = null;
         CudaBackgroundSubtractorGMG<Bgr> bgModel = null;
         //CudaBackgroundSubtractorFGD<Bgr> bgModel = null;

         Image<Gray, Byte> mask = null;
         using (ImageViewer viewer = new ImageViewer()) //create an image viewer
         using (Capture capture = new Capture()) //create a camera captue
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {
               //run this until application closed (close button click on image viewer)
               totalFrames++;

               if (viewer != null && !viewer.IsDisposed)
               {
                  if (viewer.InvokeRequired)
                  {
                     viewer.Invoke((Action)delegate { viewer.Text = String.Format("Processing {0}th frame.", totalFrames); });
                  }
                  else
                  {
                     viewer.Text = String.Format("Processing {0}th frame.", totalFrames); 
                  }
               }
               
               using (Image<Bgr, byte> frame = capture.RetrieveBgrFrame(0))
               using (CudaImage<Bgr, byte> gpuFrame = new CudaImage<Bgr, byte>(frame))
               {
                  if (warmUpFrames > 0)
                  {
                     warmUpFrames--;
                     return;
                  }

                  if (bgModel == null)
                  {
                     //bgModel = new CudaBackgroundSubtractorMOG2<Bgr>(500, 16, true);
                     //bgModel = new CudaBackgroundSubtractorMOG<Bgr>(200, 5, 0.7, 0);
                     bgModel = new CudaBackgroundSubtractorGMG<Bgr>(120, 0.8);
                     bgModel.Apply(gpuFrame, -1.0f, null);
                     //bgModel = new CudaBackgroundSubtractorFGD<Bgr>(128, 15, 25, 64, 25, 40, true, 1, 0.1f, 0.005f, 0.1f, 2.0f, 0.9f, 15.0f);
                     //bgModel.Apply(gpuFrame, -1.0f);
                     
                     return;
                  }
                  else
                  {
                     bgModel.Apply(gpuFrame, -1.0f, null);
                     //bgModel.Apply(gpuFrame, -1.0f);
                     
                     if (mask == null)
                        mask = new Image<Gray, byte>(bgModel.ForgroundMask.Size);

                     bgModel.ForgroundMask.Download(mask);
                     Image<Bgr, Byte> result = frame.ConcateHorizontal(mask.Convert<Bgr, Byte>());
                     if (viewer != null && !viewer.IsDisposed)
                     {
                        if (viewer.InvokeRequired)
                        {
                           viewer.Invoke((Action)delegate { viewer.Image = result; });
                        }
                        else
                        {
                           viewer.Image = result; //draw the image obtained from camera
                        }
                     }

                  }
               }
            };
            capture.Start();
            viewer.ShowDialog(); //show the image viewer
         }
      }*/

      public void CameraTest()
      {
         using (ImageViewer viewer = new ImageViewer()) //create an image viewer
         using (Capture capture = new Capture()) //create a camera captue
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {  //run this until application closed (close button click on image viewer)
               Mat m = new Mat();
               capture.Retrieve(m);
               viewer.Image = m; //draw the image obtained from camera
            };
            capture.Start();
            viewer.ShowDialog(); //show the image viewer
         }
      }
Exemplo n.º 30
0
      public void TestFileCapturePause()
      {
         
         int totalFrames1 = 0;

         Capture capture1 = new Capture(EmguAssert.GetFile("tree.avi"));
        
         //capture one will continute capturing all the frames.
         EventHandler captureHandle1 = delegate
         {
            Mat img = new Mat();
            capture1.Retrieve(img);
            totalFrames1++;
            Trace.WriteLine(String.Format("capture 1 frame {0}: {1}", totalFrames1, DateTime.Now.ToString()));
         };
         capture1.ImageGrabbed += captureHandle1;
         capture1.Start();

         System.Threading.Thread.Sleep(2);
         int totalFrames2 = 0;
         Capture capture2 = new Capture(EmguAssert.GetFile("tree.avi"));
         int counter = 0;
         //capture 2 will capture 2 frames, pause for 1 seconds, then continute;
         EventHandler captureHandle = delegate
         {
            counter++;
            totalFrames2++;

            bool needPause = (counter >= 2);
            if (needPause)
            {
               capture2.Pause();
               counter = 0;
            }

            Mat img = new Mat();
             capture2.Retrieve(img);
            Trace.WriteLine(String.Format("capture 2 frame {0}: {1}", totalFrames2, DateTime.Now.ToString()));

            if (needPause)
            {
               System.Threading.ThreadPool.QueueUserWorkItem(delegate
                  {
                     Trace.WriteLine("Sleep for 1 sec");
                     System.Threading.Thread.Sleep(1000);
                     capture2.Start();
                  });
            }

         };

         capture2.ImageGrabbed += captureHandle;
         capture2.Start();


         //int totalFrames = 69;
         Stopwatch s = Stopwatch.StartNew();
         while (! (totalFrames1 == totalFrames2))
         {
            System.Threading.Thread.Sleep(1000);

            if (s.ElapsedMilliseconds > 120 * 1000)
            {
               EmguAssert.IsTrue(false, "Unable to finished reading frames in 2 mins");
               break;
            }
         }
         capture1.Dispose();
         capture2.Dispose();
      }
Exemplo n.º 31
0
        protected Boolean openWebCam(int NomCamera, int indexResolution)
        {
            LimiteTerrain.Clear();
            ratioCmParPixel = new double[2] { 1, 1 };
            /* Ouvre le flux vidéo et initialise le EventHandler */

            // TODO : selection de la caméra
            _capture = new Capture(); // Utiliser la webcam de base

            // Evenement lors de la reception d'une image
            _capture.ImageGrabbed += ProcessFrame;

            // Passage en MPG
            _capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FOURCC, CvInvoke.CV_FOURCC('M', 'J', 'P', 'G'));
            // Resolution
            VideoCaptureDevice tmpVideo = new VideoCaptureDevice(VideoCaptureDevices[NomCamera].MonikerString);

            _capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_WIDTH, tmpVideo.VideoCapabilities[indexResolution].FrameSize.Width);
            _capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT, tmpVideo.VideoCapabilities[indexResolution].FrameSize.Height);

            _capture.Start();

            return true;
        }