Beispiel #1
0
        private void dispatcherTimer_Tick(object sender, EventArgs e)
        {
            if (m_capture == null)
            {
                m_capture = new Emgu.CV.VideoCapture();
            }



            using (Image <Bgr, byte> nextFrame = m_capture.QueryFrame().ToImage <Bgr, Byte>())
            {
                if (nextFrame != null)
                {
                    Image <Gray, byte> grayframe = nextFrame.Convert <Gray, byte>();


                    TestImage1.Source = ToBitmapSource(nextFrame);


                    if (takeSnapshot)
                    {
                        nextFrame.Save(SnapshotLocation_textbox.Text + "\\image" + snapshotIndex.ToString().PadLeft(3, '0') + ".jpg");
                        takeSnapshot = !takeSnapshot;
                        snapshotIndex++;
                    }
                }
            }
        }
Beispiel #2
0
        private void timer1_tick(object sender, EventArgs e)
        {
            tick_count++;

            var dstMat = new Mat();
            var frame  = _capture.QueryFrame();

            CvInvoke.Resize(frame, dstMat, new Size(640, 480), interpolation: Inter.Cubic);
            Image <Bgr, Byte> currentframe = dstMat.ToImage <Bgr, byte>();

            if (tick_count == 1 || tick_count > 5)
            {
                detectFace(currentframe);

                if (tick_count != 1)
                {
                    tick_count = 0;
                }
            }
            else
            {
                foreach (Rectangle faceFound in faceDetected)
                {
                    currentframe.Draw(faceFound, new Bgr(Color.Cyan), 2);
                }
                imageBox1.Image = currentframe;
            }
        }
Beispiel #3
0
        private bool LoadVideo()
        {
            String szExe = System.Reflection.Assembly.GetExecutingAssembly().GetName().CodeBase;

            m_szExeDir = new Uri(System.IO.Path.GetDirectoryName(szExe)).LocalPath;

            m_szLeftVideo  = Path.Combine(new string[] { m_szExeDir, "data", "left.mp4" });
            m_szRightVideo = Path.Combine(new string[] { m_szExeDir, "data", "right.mp4" });
            if (!File.Exists(m_szLeftVideo) || !File.Exists(m_szRightVideo))
            {
                return(false);
            }

            Emgu.CV.VideoCapture lCap = new Emgu.CV.VideoCapture(m_szLeftVideo);
            Emgu.CV.VideoCapture rCap = new Emgu.CV.VideoCapture(m_szRightVideo);
            if (!lCap.IsOpened || !rCap.IsOpened)
            {
                return(false);
            }

            m_imgLeft  = lCap.QueryFrame();
            m_imgRight = rCap.QueryFrame();

            m_szLeftImg  = new Size(m_imgLeft.Cols, m_imgLeft.Rows);
            m_szRightImg = new Size(m_imgRight.Cols, m_imgRight.Rows);
            lCap.Stop();
            rCap.Stop();
            //DrawLeftImage(ref m_imgLeft);
            return(true);
        }
Beispiel #4
0
        // Update is called once per frame
        void Update()
        {
            if (capture == null || cc == null)
            {
                return;
            }


            if (!useCamera)
            {//seek to correct video frame
                var vframe = (Time.time * videoCaptureFps) % videoFrameCount;
                capture.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.PosFrames, vframe);
            }

            using (Image <Bgr, byte> img = capture.QueryFrame().ToImage <Bgr, byte>())
            {
                imageSize = img.Size;

                var faces = cc.DetectMultiScale(img, 1.1, 10);
                foreach (Rectangle face in faces)
                {
                    faceRectCenter = GetRectCenter(face);
                    DrawFaceMarkers(img, face);
                }

                if (texture is null)
                {
                    texture = new Texture2D(img.Width, img.Height);
                }
                texture.LoadImage(img.ToJpegData());

                if (imageBox != null && imageBox.isActiveAndEnabled)
                {
                    imageBox.texture = texture;
                }
            }

            playerData.EyePosition = FacePos;
        }
Beispiel #5
0
        private void GetFrame(object sender, EventArgs e)
        {
            Image <Bgr, Byte> frame = capt.QueryFrame().ToImage <Bgr, Byte>();

            imageBox1.Image = frame;
            if (sw)
            {
                if (!System.IO.Directory.Exists(System.IO.Path.Combine(Application.StartupPath, "Images")))
                {
                    try
                    {
                        System.IO.Directory.CreateDirectory(System.IO.Path.Combine(Application.StartupPath, "Images"));
                    }
                    catch { }
                }
                var fn = System.IO.Path.Combine(Application.StartupPath, "Images",
                                                DateTime.Now.Hour.ToString().PadLeft(2, '0')
                                                + DateTime.Now.Minute.ToString().PadLeft(2, '0')
                                                + DateTime.Now.Second.ToString().PadLeft(2, '0')
                                                + DateTime.Now.Millisecond.ToString() + ".jpg");
                frame.Save(fn);
            }
        }
Beispiel #6
0
 private void btn_detect_Click(object sender, EventArgs e)
 {
     camera = new Emgu.CV.VideoCapture();
     camera.QueryFrame();
     Application.Idle += new EventHandler(FrameProcedure);
 }
        public void showFrame()
        {
            if (this.InvokeRequired)
            {
                try
                {
                    ShowFrameDelegate SD = new ShowFrameDelegate(showFrame);
                    this.BeginInvoke(SD);
                }
                catch { }
            }
            else
            {
                if (ext == ".jpg")
                {
                    Emgu.CV.Mat matBefore = CvInvoke.Imread(Text_FilePath.Text);
                    Emgu.CV.Mat matAfter  = CvInvoke.Imread(SaveFileName);

                    if (matBefore != null && matAfter != null)
                    {
                        // bitmap是GDI+的物件,所以使用一般釋放記憶體的方式(dispose)對於bitmap是無效的
                        // 要能夠成功的釋放GDI+物件所佔用的記憶體,必須要使用Windows GDI元件,才能有效的釋放掉bitmap所使用的記憶體
                        // 將Bitmap物件轉換為平台指標
                        IntPtr gdibitmapBefore = matBefore.Bitmap.GetHbitmap();
                        IntPtr gdibitmapAfter  = matAfter.Bitmap.GetHbitmap();
                        // 將bitmap放入至PictureBox的Image屬性
                        File_Before.Image = Image.FromHbitmap(gdibitmapBefore);
                        File_After.Image  = Image.FromHbitmap(gdibitmapAfter);
                        // 進行Bitmap資源的釋放
                        DeleteObject(gdibitmapBefore);
                        DeleteObject(gdibitmapAfter);
                    }
                    if (matBefore != null)
                    {
                        matBefore.Dispose();
                    }
                    if (matAfter != null)
                    {
                        matAfter.Dispose();
                    }
                    GC.Collect();
                }
                else if (FrameCount == 0 || State == (int)PlayState.PlayState_Play)
                {
                    Emgu.CV.Mat matBefore = objVideoCapture_Before.QueryFrame();
                    Emgu.CV.Mat matAfter  = objVideoCapture_After.QueryFrame();

                    if (matBefore != null && matAfter != null)
                    {
                        FrameCount = objVideoCapture_Before.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.PosFrames);

                        // bitmap是GDI+的物件,所以使用一般釋放記憶體的方式(dispose)對於bitmap是無效的
                        // 要能夠成功的釋放GDI+物件所佔用的記憶體,必須要使用Windows GDI元件,才能有效的釋放掉bitmap所使用的記憶體
                        // 將Bitmap物件轉換為平台指標
                        IntPtr gdibitmapBefore = matBefore.Bitmap.GetHbitmap();
                        IntPtr gdibitmapAfter  = matAfter.Bitmap.GetHbitmap();
                        // 將bitmap放入至PictureBox的Image屬性
                        File_Before.Image = Image.FromHbitmap(gdibitmapBefore);
                        File_After.Image  = Image.FromHbitmap(gdibitmapAfter);
                        // 進行Bitmap資源的釋放
                        DeleteObject(gdibitmapBefore);
                        DeleteObject(gdibitmapAfter);
                    }
                    if (matBefore != null)
                    {
                        matBefore.Dispose();
                    }
                    if (matAfter != null)
                    {
                        matAfter.Dispose();
                    }
                    GC.Collect();
                }
            }
        }
Beispiel #8
0
 public void TestPlayVideo()
 {
    VideoCapture capture = new VideoCapture("car.avi");
    ImageViewer viewer = new ImageViewer(null);
     
    Application.Idle += delegate(Object sender, EventArgs e)
    {
       Mat m = capture.QueryFrame();
       if (m != null && !m.IsEmpty)
       {
          viewer.Image = m;
          Thread.Sleep(300);
       }
    };
    viewer.ShowDialog();
 }
Beispiel #9
0
 public void TestCapture()
 {
    VideoCapture capture = new VideoCapture("abc.efg");
    Mat image = capture.QueryFrame();
 }
Beispiel #10
0
      /*
      public void CameraTest2()
      {
         using (ImageViewer viewer = new ImageViewer())
         using (Capture capture = new Capture())
         {
            capture.ImageGrabbed += delegate(object sender, EventArgs e)
            {
               Image<Bgr, Byte> img = capture.RetrieveBgrFrame(0);
               img = img.Resize(0.8, Emgu.CV.CvEnum.Inter.Linear);
               Image<Gray, Byte> gray = img.Convert<Gray, Byte>();
               gray._EqualizeHist();
               viewer.Image = gray;

               capture.Pause();
               System.Threading.ThreadPool.QueueUserWorkItem(delegate
               {
                  Thread.Sleep(1000);
                  capture.Start();
               });
            };
            capture.Start();
            viewer.ShowDialog();
         }
      }*/

      public void CameraTest3()
      {
         ImageViewer viewer = new ImageViewer();
         using (VideoCapture capture = new VideoCapture())
         {
            Application.Idle += delegate(object sender, EventArgs e)
            {
               Mat frame = capture.QueryFrame();
               if (frame != null)
               {
                  Bitmap bmp = frame.ToImage<Bgr, Byte>().ToBitmap();

                  viewer.Image = new Image<Bgr, Byte>(bmp);
                  
               }
            };
            viewer.ShowDialog();
         }
      }
Beispiel #11
0
      /*
      public void TestPyrLK()
      {
         const int MAX_CORNERS = 500;
         Capture c = new Capture();
         ImageViewer viewer = new ImageViewer();
         Image<Gray, Byte> oldImage = null;
         Image<Gray, Byte> currentImage = null;
         Application.Idle += new EventHandler(delegate(object sender, EventArgs e)
         {
            if (oldImage == null)
            {
               oldImage = c.QueryGrayFrame();
            }

            currentImage = c.QueryGrayFrame();
            Features2D.GFTTDetector detector = new Features2D.GFTTDetector(MAX_CORNERS, 0.05, 3, 3);
            
            //PointF[] features = oldImage.GoodFeaturesToTrack(MAX_CORNERS, 0.05, 3.0, 3, false, 0.04)[0];
            PointF[] shiftedFeatures;
            Byte[] status;
            float[] trackErrors;
            CvInvoke.CalcOpticalFlowPyrLK(oldImage, currentImage, features, new Size(9, 9), 3, new MCvTermCriteria(20, 0.05),
               out shiftedFeatures, out status, out trackErrors);

            Image<Gray, Byte> displayImage = currentImage.Clone();
            for (int i = 0; i < features.Length; i++)
               displayImage.Draw(new LineSegment2DF(features[i], shiftedFeatures[i]), new Gray(), 2);

            oldImage = currentImage;
            viewer.Image = displayImage;
         });
         viewer.ShowDialog();
      }*/

     
      public void TestPyrLKGPU()
      {
         if (!CudaInvoke.HasCuda)
            return;

         const int MAX_CORNERS = 500;
         VideoCapture c = new VideoCapture();
         ImageViewer viewer = new ImageViewer();
         GpuMat oldImage = null;
         GpuMat currentImage = null;
         using (CudaGoodFeaturesToTrackDetector detector = new CudaGoodFeaturesToTrackDetector(DepthType.Cv8U, 1, MAX_CORNERS, 0.05, 3.0, 3, false, 0.04))
         using (CudaDensePyrLKOpticalFlow flow = new CudaDensePyrLKOpticalFlow(new Size(21, 21), 3, 30, false))
         {
            Application.Idle += new EventHandler(delegate(object sender, EventArgs e)
            {
               if (oldImage == null)
               {
                  Mat bgrFrame = c.QueryFrame();
                  using (GpuMat oldBgrImage = new GpuMat(bgrFrame))
                  {
                     oldImage = new GpuMat();
                     CudaInvoke.CvtColor(oldBgrImage, oldImage, ColorConversion.Bgr2Gray);
                  }
               }

               using (Mat tmpFrame = c.QueryFrame())
               using (GpuMat tmp = new GpuMat(tmpFrame))
               {
                  currentImage = new GpuMat();
                  CudaInvoke.CvtColor(tmp, currentImage, ColorConversion.Bgr2Gray);
               }
               using (GpuMat f = new GpuMat())
               
               using (GpuMat vertex = new GpuMat())
               using (GpuMat colors = new GpuMat())
               using(GpuMat corners = new GpuMat())
               {
                  flow.Calc(oldImage, currentImage, f);

                  //CudaInvoke.CreateOpticalFlowNeedleMap(u, v, vertex, colors);
                  detector.Detect(oldImage, corners, null);
                  //GpuMat<float> detector.Detect(oldImage, null);
                  /*
                  //PointF[] features = oldImage.GoodFeaturesToTrack(MAX_CORNERS, 0.05, 3.0, 3, false, 0.04)[0];
                  PointF[] shiftedFeatures;
                  Byte[] status;
                  float[] trackErrors;
                  OpticalFlow.PyrLK(oldImage, currentImage, features, new Size(9, 9), 3, new MCvTermCriteria(20, 0.05),
                     out shiftedFeatures, out status, out trackErrors);
                  */

                  Mat displayImage = new Mat();
                  currentImage.Download(displayImage);
                      
                  /*
                  for (int i = 0; i < features.Length; i++)
                     displayImage.Draw(new LineSegment2DF(features[i], shiftedFeatures[i]), new Gray(), 2);
                  */
                  oldImage = currentImage;
                  viewer.Image = displayImage;
               }
            });
            viewer.ShowDialog();
         }
      }