Esempio n. 1
0
        public static List <Mat> LoadVideo()
        {
            Emgu.CV.VideoCapture videoCapture;
            List <Mat>           framesFromVideo = new List <Mat>();

            FileOp.LoadFromFile((s, path) =>
            {
                videoCapture = new Emgu.CV.VideoCapture(path);
                while (true)
                {
                    Mat mat = new Mat();
                    videoCapture.Read(mat);
                    if (mat == null || mat.Rows == 0)
                    {
                        return;
                    }

                    framesFromVideo.Add(mat);

                    for (int p = 0; p < 4; p++)
                    {
                        videoCapture.Read(mat);
                    }
                }
            });
            return(framesFromVideo);
        }
Esempio n. 2
0
        private bool LoadVideo()
        {
            String szExe = System.Reflection.Assembly.GetExecutingAssembly().GetName().CodeBase;

            m_szExeDir = new Uri(System.IO.Path.GetDirectoryName(szExe)).LocalPath;

            m_szLeftVideo  = Path.Combine(new string[] { m_szExeDir, "data", "left.mp4" });
            m_szRightVideo = Path.Combine(new string[] { m_szExeDir, "data", "right.mp4" });
            if (!File.Exists(m_szLeftVideo) || !File.Exists(m_szRightVideo))
            {
                return(false);
            }

            Emgu.CV.VideoCapture lCap = new Emgu.CV.VideoCapture(m_szLeftVideo);
            Emgu.CV.VideoCapture rCap = new Emgu.CV.VideoCapture(m_szRightVideo);
            if (!lCap.IsOpened || !rCap.IsOpened)
            {
                return(false);
            }

            m_imgLeft  = lCap.QueryFrame();
            m_imgRight = rCap.QueryFrame();

            m_szLeftImg  = new Size(m_imgLeft.Cols, m_imgLeft.Rows);
            m_szRightImg = new Size(m_imgRight.Cols, m_imgRight.Rows);
            lCap.Stop();
            rCap.Stop();
            //DrawLeftImage(ref m_imgLeft);
            return(true);
        }
Esempio n. 3
0
        private void dispatcherTimer_Tick(object sender, EventArgs e)
        {
            if (m_capture == null)
            {
                m_capture = new Emgu.CV.VideoCapture();
            }



            using (Image <Bgr, byte> nextFrame = m_capture.QueryFrame().ToImage <Bgr, Byte>())
            {
                if (nextFrame != null)
                {
                    Image <Gray, byte> grayframe = nextFrame.Convert <Gray, byte>();


                    TestImage1.Source = ToBitmapSource(nextFrame);


                    if (takeSnapshot)
                    {
                        nextFrame.Save(SnapshotLocation_textbox.Text + "\\image" + snapshotIndex.ToString().PadLeft(3, '0') + ".jpg");
                        takeSnapshot = !takeSnapshot;
                        snapshotIndex++;
                    }
                }
            }
        }
Esempio n. 4
0
 public ImageCapture()
 {
     lock (ReadSettingsLocker)
     {
         capture = new VideoCapture(0);
         thread  = new Thread(new ParameterizedThreadStart(grab));
     }
 }
Esempio n. 5
0
        private void FrameProcedure(object sender, EventArgs e)
        {
            camera = new Emgu.CV.VideoCapture();
            _Users.Add("");
            //Frame = camera.QueryFrame().Resize(320,240,Emgu.CV.CvEnum.Inter.Cubic);

            throw new NotImplementedException();
        }
Esempio n. 6
0
 private void Capture_ImageGrabbed(object sender, EventArgs e)
 {
     capture = (sender as VideoCapture);
     if (capture != null)
     {
         capture.Retrieve(currentFrame);
         imagesCircQ.Enqueue(currentFrame);
         areGetNewImage.Set();
     }
 }
Esempio n. 7
0
 public void UpdateCamera(int CameraNumber)
 {
     lock (ReadSettingsLocker)
     {
         if (this.CameraNumber != CameraNumber)
         {
             capture = new VideoCapture(CameraNumber);
         }
     }
 }
        private void EmguFaceDetector(string path)
        {
            if (capture == null)
            {
                capture = new Emgu.CV.VideoCapture(0);
            }
            capture.ImageGrabbed += Capture_ImageGrabbed;
            capture.Start();

            emguFaceClassifier = new Emgu.CV.CascadeClassifier(@"./haarcascade/haarcascade_frontalface_alt.xml");
        }
Esempio n. 9
0
 public CameraCapture(int index, PictureBox imageBox, int width, int height, int interval = 10)
 {
     _frameImage = new Mat();
     Interval    = interval;
     _imageBox   = imageBox;
     _camera     = new Emgu.CV.VideoCapture(index);
     //_camera.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FRAME_WIDTH, width);
     //_camera.SetCaptureProperty(CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT, height);
     _refreshMethodInvoker = Refresh;
     _refreshThread        = new Thread(CallBack);
 }
Esempio n. 10
0
 private void StartCaptureButton(object sender, EventArgs e)
 {
     start_capture.Enabled = false;
     stop_capture.Enabled  = true;
     if (capture == null)
     {
         capture = new Emgu.CV.VideoCapture(0);
     }
     capture.ImageGrabbed += ImageReceived;
     capture.Start();
 }
Esempio n. 11
0
        public void StartCamera()
        {
            capture = new Emgu.CV.VideoCapture();

            /*
             * capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_AUTO_EXPOSURE, 0);
             *
             * capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_BRIGHTNESS, 33);
             * capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_CONTRAST, 54);
             * capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_EXPOSURE, -7);
             */
        }
Esempio n. 12
0
        // OnEnable is called just after the object is enabled
        void OnEnable()
        {
            if (useCamera)
            {
                capture = new Emgu.CV.VideoCapture(cameraId);
            }
            else if (videoPath != "")
            {
                capture         = new Emgu.CV.VideoCapture(videoPath);
                videoFrameCount = (int)capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameCount);
                videoCaptureFps = (int)capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.Fps);
            }
            else
            {
                return;
            }

            cc = new CascadeClassifier(CASCADE_PATH);
        }
Esempio n. 13
0
        public Form1()
        {
            facePath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.Desktop), "haarcascade_frontalface_default.xml");
            face     = new CascadeClassifier(facePath);

            InitializeComponent();
            CvInvoke.UseOpenCL = false;
            try
            {
                _capture = new Emgu.CV.VideoCapture(1);
                _capture.ImageGrabbed += ProcessFrame;
            }
            catch (NullReferenceException excpt)
            {
                MessageBox.Show(excpt.Message);
            }

            _frame = new Mat();
        }
Esempio n. 14
0
 public CameraCapture()
 {
    InitializeComponent();
    CvInvoke.UseOpenCL = false;
    try
    {
       _capture = new VideoCapture();
       _capture.ImageGrabbed += ProcessFrame;
    }
    catch (NullReferenceException excpt)
    {
       MessageBox.Show(excpt.Message);
    }
    _frame = new Mat();
    _grayFrame = new Mat();
    _smallGrayFrame = new Mat();
    _smoothedGrayFrame = new Mat();
    _cannyFrame = new Mat();
 }
Esempio n. 15
0
        protected override void OnShown(EventArgs e)
        {
            try
            {
                if (capt == null)
                {
                    capt = new Emgu.CV.VideoCapture();
                }
            }
            catch (NullReferenceException excpt)
            {
                MessageBox.Show(excpt.Message);
            }
            if (capt != null)
            {
                Application.Idle += new EventHandler(GetFrame);
            }

            base.OnShown(e);
        }
Esempio n. 16
0
        private void OpenFile(string fileName)
        {
            Mat imgOriginal = null;

            try
            {
                imgOriginal = new Mat(fileName, ImreadModes.Color);
            }
            catch (Exception ex)
            {
                capture = new VideoCapture(fileName);
                if (capture == null)
                {
                    //lblTrackedColor.Text = "unable to open image, error: " + ex.Message;
                    return;
                }

                MaxFrames = Convert.ToInt32(capture.GetCaptureProperty(CapProp.FrameCount)) - 1;
                ActFrame  = 0;

                capture.ImageGrabbed += Capture_ImageGrabbed;
                capture.Start();
                VideoState = VideoState.Running;
            }

            if (imgOriginal == null)
            {
                if (capture == null)
                {
                    //lblTrackedColor.Text = "unable to open image";
                    return;
                }
                //imgOriginal = capture.QueryFrame();
            }
            else
            {
                ProcessFrame(imgOriginal);
            }

            UpdateUI();
        }
Esempio n. 17
0
        private bool InitObjects()
        {
            bool bRes = false;

            capture = new VideoCapture(0, VideoCapture.API.DShow);

            if (capture.CaptureSource == VideoCapture.CaptureModuleType.Camera && capture.IsOpened)
            {
                capture.SetCaptureProperty(CapProp.FrameWidth, 640);
                capture.SetCaptureProperty(CapProp.FrameHeight, 480);
                capture.SetCaptureProperty(CapProp.Fps, SettingsHolder.Instance.FPS);
                capture.ImageGrabbed += Capture_ImageGrabbed;
                bRes = true;
            }
            else
            {
                bRes = false;
            }

            return(bRes);
        }
Esempio n. 18
0
        private void btm_Start_Click(object sender, EventArgs e)
        {
            SF.Filter = "All files (*.*)|*.*";
            if (SF.ShowDialog() == DialogResult.OK)
            {
                int Index = SF.FileName.IndexOf(".");
                SaveFileName = (Index == -1 ? String.Concat(SF.FileName, ext) : String.Concat(SF.FileName.Substring(0, SF.FileName.IndexOf(".")), ext));
                SF.Dispose();
                if (ext == ".jpg")
                {
                    btm_Play.Enabled  = false;
                    btm_Pause.Enabled = false;
                    TSFaceDetection.TSFaceDetection tSFaceDetection = new TSFaceDetection.TSFaceDetection();
                    tSFaceDetection.FaceReduction(Text_FilePath.Text, "D:\\test.txt", SaveFileName);
                    showFrame();
                }
                else
                {
                    btm_Play.Enabled  = false;
                    btm_Pause.Enabled = false;
                    objVideoCapture   = null;
                    objVideoCapture   = new Emgu.CV.VideoCapture(Text_FilePath.Text);
                    TSFaceDetection.TSFaceDetection tSFaceDetection = new TSFaceDetection.TSFaceDetection();
                    tSFaceDetection.FaceReduction(Text_FilePath.Text, "D:\\test.txt", SaveFileName);

                    objVideoCapture_Before = new Emgu.CV.VideoCapture(Text_FilePath.Text);
                    objVideoCapture_After  = new Emgu.CV.VideoCapture(SaveFileName);
                    FrameCount             = 0;
                    objVideoCapture_Before.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.PosFrames, FrameCount);
                    objVideoCapture_After.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.PosFrames, FrameCount);
                    showFrame();

                    btm_Play.Enabled  = true;
                    btm_Pause.Enabled = true;
                }
            }
        }
Esempio n. 19
0
      public void Process()
      {
         Mat m = new Mat();
         while (true)
         {
            if (_captureEnabled)
            {
               if (_capture == null)
                  _capture = new VideoCapture();

               //Read the camera data to the mat
               //Must use VideoCapture.Read function for UWP to read image from capture.
               _capture.Read(m);
               if (!m.IsEmpty)
               {
                  //some simple image processing, let just invert the pixels
                  CvInvoke.BitwiseNot(m, m);

                  //The data in the mat that is read from the camera will 
                  //be drawn to the Image control
                  CvInvoke.WinrtImshow();
               }
            }
            else
            {
               if (_capture != null)
               {
                  _capture.Dispose();
                  _capture = null;
               }

               Task t = Task.Delay(100);
               t.Wait();
            }
         }
      }
Esempio n. 20
0
      public void TestFileCaptureNonAscii()
      {
         String fileName = EmguAssert.GetFile("tree.avi");
         String newName = fileName.Replace("tree.avi", "树.avi");
         File.Copy(fileName, newName, true);
         VideoCapture capture = new VideoCapture(EmguAssert.GetFile(newName));
         int counter = 0;
         using (Mat m = new Mat())
         while (capture.Grab())
         {
            capture.Retrieve(m);
            counter++;
         }

         Trace.WriteLine(String.Format("{0} frames found in file {1}", counter, newName));
      }
Esempio n. 21
0
 private void btn_detect_Click(object sender, EventArgs e)
 {
     camera = new Emgu.CV.VideoCapture();
     camera.QueryFrame();
     Application.Idle += new EventHandler(FrameProcedure);
 }
Esempio n. 22
0
      public void TestFileCapturePause()
      {
         
         int totalFrames1 = 0;
         String fileName = EmguAssert.GetFile("tree.avi");
         String fileName2 = fileName.Replace("tree.avi", "tree2.avi");
         File.Copy(fileName, fileName2, true);

         VideoCapture capture1 = new VideoCapture(fileName);
        
         //capture one will continute capturing all the frames.
         EventHandler captureHandle1 = delegate
         {
            Mat img = new Mat();
            capture1.Retrieve(img);
            totalFrames1++;
            Trace.WriteLine(String.Format("capture 1 frame {0}: {1}", totalFrames1, DateTime.Now.ToString()));
         };
         capture1.ImageGrabbed += captureHandle1;
         capture1.Start();

         System.Threading.Thread.Sleep(2);



         int totalFrames2 = 0;
         VideoCapture capture2 = new VideoCapture(fileName2);
         int counter = 0;
         //capture 2 will capture 2 frames, pause for 1 seconds, then continute;
         EventHandler captureHandle = delegate
         {
            counter++;
            totalFrames2++;

            bool needPause = (counter >= 2);
            if (needPause)
            {
               capture2.Pause();
               counter = 0;
            }

            Mat img = new Mat();
             capture2.Retrieve(img);
            Trace.WriteLine(String.Format("capture 2 frame {0}: {1}", totalFrames2, DateTime.Now.ToString()));

            if (needPause)
            {
               System.Threading.ThreadPool.QueueUserWorkItem(delegate
                  {
                     Trace.WriteLine("Sleep for 1 sec");
                     System.Threading.Thread.Sleep(1000);
                     capture2.Start();
                  });
            }

         };

         capture2.ImageGrabbed += captureHandle;
         capture2.Start();


         //int totalFrames = 69;
         Stopwatch s = Stopwatch.StartNew();
         while (! (totalFrames1 == totalFrames2))
         {
            System.Threading.Thread.Sleep(1000);

            if (s.ElapsedMilliseconds > 120 * 1000)
            {
               EmguAssert.IsTrue(false, "Unable to finished reading frames in 2 mins");
               break;
            }
         }
         capture1.Dispose();
         capture2.Dispose();
      }
Esempio n. 23
0
 protected CaptureEmgu(VideoCapture capture)
 {
     _capture = capture;
 }
Esempio n. 24
0
 /// <summary>
 /// Create a Capture frame source
 /// </summary>
 /// <param name="capture">The capture object that will be converted to a FrameSource</param>
 public CaptureFrameSource(VideoCapture capture)
 {
    _ptr = VideoStabInvoke.VideostabCaptureFrameSourceCreate(capture, ref FrameSourcePtr);
    CaptureSource = capture.CaptureSource;
 }
Esempio n. 25
0
      /*
      public void TestCodeBookBGModel()
      {
         using (Capture capture = new Capture())
         using (BGCodeBookModel<Bgr> model = new BGCodeBookModel<Bgr>())
         {
            ImageViewer viewer = new ImageViewer();
            Image<Gray, byte> fgMask = capture.QueryFrame().Convert<Gray, Byte>();

            Application.Idle += delegate(Object sender, EventArgs args)
            {
               Mat frame = capture.QueryFrame();
               model.Apply(frame);
               viewer.Image = model.ForegroundMask; 
            };
            viewer.ShowDialog();
         }
      }

      public void TestBlobTracking()
      {
         MCvFGDStatModelParams fgparam = new MCvFGDStatModelParams();
         fgparam.alpha1 = 0.1f;
         fgparam.alpha2 = 0.005f;
         fgparam.alpha3 = 0.1f;
         fgparam.delta = 2;
         fgparam.is_obj_without_holes = 1;
         fgparam.Lc = 32;
         fgparam.Lcc = 16;
         fgparam.minArea = 15;
         fgparam.N1c = 15;
         fgparam.N1cc = 25;
         fgparam.N2c = 25;
         fgparam.N2cc = 35;
         fgparam.perform_morphing = 0;
         fgparam.T = 0.9f;

         BlobTrackerAutoParam<Bgr> param = new BlobTrackerAutoParam<Bgr>();
         param.BlobDetector = new BlobDetector(Emgu.CV.CvEnum.BlobDetectorType.CC);
         param.FGDetector = new FGDetector<Bgr>(Emgu.CV.CvEnum.ForgroundDetectorType.Fgd, fgparam);
         param.BlobTracker = new BlobTracker(Emgu.CV.CvEnum.BLOBTRACKER_TYPE.MSFG);
         param.FGTrainFrames = 10;
         BlobTrackerAuto<Bgr> tracker = new BlobTrackerAuto<Bgr>(param);

         //MCvFont font = new MCvFont(Emgu.CV.CvEnum.FontFace.HersheySimplex, 1.0, 1.0);

         using(ImageViewer viewer = new ImageViewer())
         using (Capture capture = new Capture())
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {
               tracker.Process(capture.RetrieveBgrFrame());
               
               //Image<Bgr, Byte> img = capture.RetrieveBgrFrame();

               Image<Bgr, Byte> img = tracker.ForegroundMask.Convert<Bgr, Byte>();
               foreach (MCvBlob blob in tracker)
               {
                  img.Draw((Rectangle)blob, new Bgr(255.0, 255.0, 255.0), 2);
                  img.Draw(blob.ID.ToString(), Point.Round(blob.Center), CvEnum.FontFace.HersheySimplex, 1.0, new Bgr(255.0, 255.0, 255.0));
               }
               viewer.Image = img;
            };
            capture.Start();
            viewer.ShowDialog();
         }
      }*/
      
      public void TestCvBlob()
      {
         //MCvFont font = new MCvFont(Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, 0.5);
         using (CvTracks tracks = new CvTracks())
         using (ImageViewer viewer = new ImageViewer())
         using (VideoCapture capture = new VideoCapture())
         using (Mat fgMask = new Mat())
         {
            //BGStatModel<Bgr> bgModel = new BGStatModel<Bgr>(capture.QueryFrame(), Emgu.CV.CvEnum.BG_STAT_TYPE.GAUSSIAN_BG_MODEL);
            BackgroundSubtractorMOG2 bgModel = new BackgroundSubtractorMOG2(0, 0, true);
            //BackgroundSubstractorMOG bgModel = new BackgroundSubstractorMOG(0, 0, 0, 0);

            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {
               Mat frame = new Mat();
               capture.Retrieve(frame);
               bgModel.Apply(frame, fgMask);

               using (CvBlobDetector detector = new CvBlobDetector())
               using (CvBlobs blobs = new CvBlobs())
               {
                  detector.Detect(fgMask.ToImage<Gray, Byte>(), blobs);
                  blobs.FilterByArea(100, int.MaxValue);

                  tracks.Update(blobs, 20.0, 10, 0);

                  Image<Bgr, Byte> result = new Image<Bgr, byte>(frame.Size);

                  using (Image<Gray, Byte> blobMask = detector.DrawBlobsMask(blobs))
                  {
                     frame.CopyTo(result, blobMask);
                  }
                  //CvInvoke.cvCopy(frame, result, blobMask);

                  foreach (KeyValuePair<uint, CvTrack> pair in tracks)
                  {
                     if (pair.Value.Inactive == 0) //only draw the active tracks.
                     {
                        CvBlob b = blobs[pair.Value.BlobLabel];
                        Bgr color = detector.MeanColor(b, frame.ToImage<Bgr, Byte>());
                        result.Draw(pair.Key.ToString(), pair.Value.BoundingBox.Location, CvEnum.FontFace.HersheySimplex, 0.5, color);
                        result.Draw(pair.Value.BoundingBox, color, 2);
                        Point[] contour = b.GetContour();
                        result.Draw(contour, new Bgr(0, 0, 255), 1);
                     }
                  }

                  viewer.Image = frame.ToImage<Bgr, Byte>().ConcateVertical(fgMask.ToImage<Bgr, Byte>().ConcateHorizontal(result));
               }
            };
            capture.Start();
            viewer.ShowDialog();
         }
      }
Esempio n. 26
0
      /*
      public void TestPyrLK()
      {
         const int MAX_CORNERS = 500;
         Capture c = new Capture();
         ImageViewer viewer = new ImageViewer();
         Image<Gray, Byte> oldImage = null;
         Image<Gray, Byte> currentImage = null;
         Application.Idle += new EventHandler(delegate(object sender, EventArgs e)
         {
            if (oldImage == null)
            {
               oldImage = c.QueryGrayFrame();
            }

            currentImage = c.QueryGrayFrame();
            Features2D.GFTTDetector detector = new Features2D.GFTTDetector(MAX_CORNERS, 0.05, 3, 3);
            
            //PointF[] features = oldImage.GoodFeaturesToTrack(MAX_CORNERS, 0.05, 3.0, 3, false, 0.04)[0];
            PointF[] shiftedFeatures;
            Byte[] status;
            float[] trackErrors;
            CvInvoke.CalcOpticalFlowPyrLK(oldImage, currentImage, features, new Size(9, 9), 3, new MCvTermCriteria(20, 0.05),
               out shiftedFeatures, out status, out trackErrors);

            Image<Gray, Byte> displayImage = currentImage.Clone();
            for (int i = 0; i < features.Length; i++)
               displayImage.Draw(new LineSegment2DF(features[i], shiftedFeatures[i]), new Gray(), 2);

            oldImage = currentImage;
            viewer.Image = displayImage;
         });
         viewer.ShowDialog();
      }*/

     
      public void TestPyrLKGPU()
      {
         if (!CudaInvoke.HasCuda)
            return;

         const int MAX_CORNERS = 500;
         VideoCapture c = new VideoCapture();
         ImageViewer viewer = new ImageViewer();
         GpuMat oldImage = null;
         GpuMat currentImage = null;
         using (CudaGoodFeaturesToTrackDetector detector = new CudaGoodFeaturesToTrackDetector(DepthType.Cv8U, 1, MAX_CORNERS, 0.05, 3.0, 3, false, 0.04))
         using (CudaDensePyrLKOpticalFlow flow = new CudaDensePyrLKOpticalFlow(new Size(21, 21), 3, 30, false))
         {
            Application.Idle += new EventHandler(delegate(object sender, EventArgs e)
            {
               if (oldImage == null)
               {
                  Mat bgrFrame = c.QueryFrame();
                  using (GpuMat oldBgrImage = new GpuMat(bgrFrame))
                  {
                     oldImage = new GpuMat();
                     CudaInvoke.CvtColor(oldBgrImage, oldImage, ColorConversion.Bgr2Gray);
                  }
               }

               using (Mat tmpFrame = c.QueryFrame())
               using (GpuMat tmp = new GpuMat(tmpFrame))
               {
                  currentImage = new GpuMat();
                  CudaInvoke.CvtColor(tmp, currentImage, ColorConversion.Bgr2Gray);
               }
               using (GpuMat f = new GpuMat())
               
               using (GpuMat vertex = new GpuMat())
               using (GpuMat colors = new GpuMat())
               using(GpuMat corners = new GpuMat())
               {
                  flow.Calc(oldImage, currentImage, f);

                  //CudaInvoke.CreateOpticalFlowNeedleMap(u, v, vertex, colors);
                  detector.Detect(oldImage, corners, null);
                  //GpuMat<float> detector.Detect(oldImage, null);
                  /*
                  //PointF[] features = oldImage.GoodFeaturesToTrack(MAX_CORNERS, 0.05, 3.0, 3, false, 0.04)[0];
                  PointF[] shiftedFeatures;
                  Byte[] status;
                  float[] trackErrors;
                  OpticalFlow.PyrLK(oldImage, currentImage, features, new Size(9, 9), 3, new MCvTermCriteria(20, 0.05),
                     out shiftedFeatures, out status, out trackErrors);
                  */

                  Mat displayImage = new Mat();
                  currentImage.Download(displayImage);
                      
                  /*
                  for (int i = 0; i < features.Length; i++)
                     displayImage.Draw(new LineSegment2DF(features[i], shiftedFeatures[i]), new Gray(), 2);
                  */
                  oldImage = currentImage;
                  viewer.Image = displayImage;
               }
            });
            viewer.ShowDialog();
         }
      }
Esempio n. 27
0
      /*
      public void TestGpuVibe()
      {
         int warmUpFrames = 20;

         GpuVibe<Gray> vibe = null;
         Image<Gray, Byte> mask = null;
         using (ImageViewer viewer = new ImageViewer()) //create an image viewer
         using (Capture capture = new Capture()) //create a camera captue
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {  
               //run this until application closed (close button click on image viewer)
               
               using(Image<Bgr, byte> frame = capture.RetrieveBgrFrame(0))
               using (CudaImage<Bgr, byte> gpuFrame = new CudaImage<Bgr, byte>(frame))
               using (CudaImage<Gray, Byte> gpuGray = gpuFrame.Convert<Gray, Byte>())
               {
                  if (warmUpFrames > 0)
                  {
                     warmUpFrames--;
                     return;
                  }
                  
                  if (vibe == null)
                  {
                     vibe = new GpuVibe<Gray>(1234567, gpuGray, null);
                     return;
                  }
                  else
                  {
                     vibe.Apply(gpuGray, null);
                     if (mask == null)
                        mask = new Image<Gray, byte>(vibe.ForgroundMask.Size);

                     vibe.ForgroundMask.Download(mask);
                     viewer.Image = frame.ConcateHorizontal(mask.Convert<Bgr, Byte>()); //draw the image obtained from camera

                  }
               }
            };
            capture.Start();
            viewer.ShowDialog(); //show the image viewer
         }
      }

      public void TestGpuBackgroundModel()
      {
         int warmUpFrames = 20;
         int totalFrames = 0;
         
         //CudaBackgroundSubtractorMOG2<Bgr>  bgModel = null;
         //CudaBackgroundSubtractorMOG<Bgr> bgModel = null;
         CudaBackgroundSubtractorGMG<Bgr> bgModel = null;
         //CudaBackgroundSubtractorFGD<Bgr> bgModel = null;

         Image<Gray, Byte> mask = null;
         using (ImageViewer viewer = new ImageViewer()) //create an image viewer
         using (Capture capture = new Capture()) //create a camera captue
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {
               //run this until application closed (close button click on image viewer)
               totalFrames++;

               if (viewer != null && !viewer.IsDisposed)
               {
                  if (viewer.InvokeRequired)
                  {
                     viewer.Invoke((Action)delegate { viewer.Text = String.Format("Processing {0}th frame.", totalFrames); });
                  }
                  else
                  {
                     viewer.Text = String.Format("Processing {0}th frame.", totalFrames); 
                  }
               }
               
               using (Image<Bgr, byte> frame = capture.RetrieveBgrFrame(0))
               using (CudaImage<Bgr, byte> gpuFrame = new CudaImage<Bgr, byte>(frame))
               {
                  if (warmUpFrames > 0)
                  {
                     warmUpFrames--;
                     return;
                  }

                  if (bgModel == null)
                  {
                     //bgModel = new CudaBackgroundSubtractorMOG2<Bgr>(500, 16, true);
                     //bgModel = new CudaBackgroundSubtractorMOG<Bgr>(200, 5, 0.7, 0);
                     bgModel = new CudaBackgroundSubtractorGMG<Bgr>(120, 0.8);
                     bgModel.Apply(gpuFrame, -1.0f, null);
                     //bgModel = new CudaBackgroundSubtractorFGD<Bgr>(128, 15, 25, 64, 25, 40, true, 1, 0.1f, 0.005f, 0.1f, 2.0f, 0.9f, 15.0f);
                     //bgModel.Apply(gpuFrame, -1.0f);
                     
                     return;
                  }
                  else
                  {
                     bgModel.Apply(gpuFrame, -1.0f, null);
                     //bgModel.Apply(gpuFrame, -1.0f);
                     
                     if (mask == null)
                        mask = new Image<Gray, byte>(bgModel.ForgroundMask.Size);

                     bgModel.ForgroundMask.Download(mask);
                     Image<Bgr, Byte> result = frame.ConcateHorizontal(mask.Convert<Bgr, Byte>());
                     if (viewer != null && !viewer.IsDisposed)
                     {
                        if (viewer.InvokeRequired)
                        {
                           viewer.Invoke((Action)delegate { viewer.Image = result; });
                        }
                        else
                        {
                           viewer.Image = result; //draw the image obtained from camera
                        }
                     }

                  }
               }
            };
            capture.Start();
            viewer.ShowDialog(); //show the image viewer
         }
      }*/

      public void CameraTest()
      {
         using (ImageViewer viewer = new ImageViewer()) //create an image viewer
         using (VideoCapture capture = new VideoCapture()) //create a camera captue
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {  //run this until application closed (close button click on image viewer)
               Mat m = new Mat();
               capture.Retrieve(m);
               viewer.Image = m; //draw the image obtained from camera
            };
            capture.Start();
            viewer.ShowDialog(); //show the image viewer
         }
      }
Esempio n. 28
0
 public static void TestCaptureFrameSource()
 {
    ImageViewer viewer = new ImageViewer();
    using (VideoCapture capture = new VideoCapture())
    using (CaptureFrameSource frameSource = new CaptureFrameSource(capture))
    {
       Application.Idle += delegate(object sender, EventArgs e)
       {
          Mat frame = frameSource.NextFrame();
          if (frame != null)
             viewer.Image = frame;
       };
       viewer.ShowDialog();
    }
 }
Esempio n. 29
0
      /*
      public void CameraTest2()
      {
         using (ImageViewer viewer = new ImageViewer())
         using (Capture capture = new Capture())
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {
               Image<Bgr, Byte> img = capture.RetrieveBgrFrame(0);
               img = img.Resize(0.8, Emgu.CV.CvEnum.Inter.Linear);
               Image<Gray, Byte> gray = img.Convert<Gray, Byte>();
               gray._EqualizeHist();
               viewer.Image = gray;

               capture.Pause();
               System.Threading.ThreadPool.QueueUserWorkItem(delegate
               {
                  Thread.Sleep(1000);
                  capture.Start();
               });
            };
            capture.Start();
            viewer.ShowDialog();
         }
      }*/

      public void CameraTest3()
      {
         ImageViewer viewer = new ImageViewer();
         using (VideoCapture capture = new VideoCapture())
         {
            Application.Idle += delegate(object sender, EventArgs e)
            {
               Mat frame = capture.QueryFrame();
               if (frame != null)
               {
                  Bitmap bmp = frame.ToImage<Bgr, Byte>().ToBitmap();

                  viewer.Image = new Image<Bgr, Byte>(bmp);
                  
               }
            };
            viewer.ShowDialog();
         }
      }
Esempio n. 30
0
      /*
      public static void TestOnePassVideoStabilizerCamera()
      {
         ImageViewer viewer = new ImageViewer();
         using (Capture capture = new Capture())
         using (GaussianMotionFilter motionFilter = new GaussianMotionFilter())
         //using (Features2D.FastDetector detector = new Features2D.FastDetector(10, true))
         using (Features2D.SURF detector = new Features2D.SURF(500, false))
         //using (Features2D.ORBDetector detector = new Features2D.ORBDetector(500))
         using (OnePassStabilizer stabilizer = new OnePassStabilizer(capture))
         {
            stabilizer.SetMotionFilter(motionFilter);
            //motionEstimator.SetDetector(detector);

            //stabilizer.SetMotionEstimator(motionEstimator);
            Application.Idle += delegate(object sender, EventArgs e)
            {
               Image<Bgr, byte> frame = stabilizer.NextFrame();
               if (frame != null)
                  viewer.Image = frame;
            };
            viewer.ShowDialog();
         }
      }*/

      public static void TestOnePassVideoStabilizer()
      {
         ImageViewer viewer = new ImageViewer();
         using (VideoCapture capture = new VideoCapture("tree.avi"))
         using (CaptureFrameSource frameSource = new CaptureFrameSource(capture))
         using (OnePassStabilizer stabilizer = new OnePassStabilizer(frameSource))
         {
            Stopwatch watch = new Stopwatch();
            //stabilizer.SetMotionEstimator(motionEstimator);
            Application.Idle += delegate(object sender, EventArgs e)
            {
               watch.Reset();
               watch.Start();
               Mat frame = stabilizer.NextFrame();
               watch.Stop();
               if (watch.ElapsedMilliseconds < 200)
               {
                  Thread.Sleep(200 - (int)watch.ElapsedMilliseconds);
               }
               if (frame != null)
                  viewer.Image = frame;
            };
            viewer.ShowDialog();
         }
      }
Esempio n. 31
0
 public void TestCaptureFromFile()
 {
    using (VideoCapture capture = new VideoCapture(EmguAssert.GetFile( "tree.avi")))
    using (VideoWriter writer = new VideoWriter("tree_invert.avi", 10, new Size(capture.Width, capture.Height), true))
    {
       int maxCount = 10;
       Mat img = new Mat();
       while (capture.Grab() && maxCount > 0)
       {
          capture.Retrieve(img);
          CvInvoke.BitwiseNot(img, img);
          writer.Write(img);
          maxCount--;
       }
    }
 }
Esempio n. 32
0
 public void TestPlayVideo()
 {
    VideoCapture capture = new VideoCapture("car.avi");
    ImageViewer viewer = new ImageViewer(null);
     
    Application.Idle += delegate(Object sender, EventArgs e)
    {
       Mat m = capture.QueryFrame();
       if (m != null && !m.IsEmpty)
       {
          viewer.Image = m;
          Thread.Sleep(300);
       }
    };
    viewer.ShowDialog();
 }
Esempio n. 33
0
 public void TestCapture()
 {
    VideoCapture capture = new VideoCapture("abc.efg");
    Mat image = capture.QueryFrame();
 }
Esempio n. 34
0
 public static void TestTwoPassVideoStabilizer()
 {
    ImageViewer viewer = new ImageViewer();
    using (VideoCapture capture = new VideoCapture("tree.avi"))
    using (GaussianMotionFilter motionFilter = new GaussianMotionFilter(15, -1.0f))
    //using (Features2D.FastDetector detector = new Features2D.FastDetector(10, true))
    //using (Features2D.SURF detector = new Features2D.SURF(500, false))
    //using (Features2D.ORBDetector detector = new Features2D.ORBDetector(500))
    using (CaptureFrameSource frameSource = new CaptureFrameSource(capture))
    using (TwoPassStabilizer stabilizer = new TwoPassStabilizer(frameSource))
    {
       Stopwatch watch = new Stopwatch();
       //stabilizer.SetMotionEstimator(motionEstimator);
       Application.Idle += delegate(object sender, EventArgs e)
       {
          watch.Reset();
          watch.Start();
          Mat frame = stabilizer.NextFrame();
          watch.Stop();
          if (watch.ElapsedMilliseconds < 200)
          {
             Thread.Sleep(200 - (int) watch.ElapsedMilliseconds);
          }
          if (frame != null)
             viewer.Image = frame;
       };
       viewer.ShowDialog();
    }
 }
Esempio n. 35
-1
      public Form1()
      {
         InitializeComponent();

         //try to create the capture
         if (_capture == null)
         {
            try
            {
               _capture = new VideoCapture();
            }
            catch (NullReferenceException excpt)
            {   //show errors if there is any
               MessageBox.Show(excpt.Message);
            }
         }

         if (_capture != null) //if camera capture has been successfully created
         {
            _motionHistory = new MotionHistory(
                1.0, //in second, the duration of motion history you wants to keep
                0.05, //in second, maxDelta for cvCalcMotionGradient
                0.5); //in second, minDelta for cvCalcMotionGradient

            _capture.ImageGrabbed += ProcessFrame;
            _capture.Start();
         }
      }
Esempio n. 36
-1
      public Form1()
      {
         InitializeComponent();

         try
         {
            _capture = new VideoCapture();
            _capture.ImageGrabbed += ProcessFrame;
         }
         catch (NullReferenceException excpt)
         {
            MessageBox.Show(excpt.Message);
         }
         UpdateMessage(String.Empty);
      }
Esempio n. 37
-1
      void Run()
      {
         try
         {
            _cameraCapture = new VideoCapture();
         }
         catch (Exception e)
         {
            MessageBox.Show(e.Message);
            return;
         }

         _fgDetector = new Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG2();
         _blobDetector = new CvBlobDetector();
         _tracker = new CvTracks();

         Application.Idle += ProcessFrame;
      }