private void ProcessFrame(object sender, EventArgs arg) { if (_capture != null && _capture.Ptr != IntPtr.Zero) { _capture.Retrieve(_frame, 0); } }
private void M_capture_ImageGrabbed(object sender, EventArgs e) { // Console.WriteLine("test: " + startIndex.ToString()); // startIndex++; destin = SaveRecordingLocation_textbox.Text; if (fileChanged) { // destin = SaveRecordingLocation_textbox.Text; totalFrames = m_capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameCount); fps = m_capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.Fps); int fourcc = Convert.ToInt32(m_capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.FourCC)); int frameHeight = Convert.ToInt32(m_capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameHeight)); int frameWidth = Convert.ToInt32(m_capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameWidth)); string destination = destin + i + ".avi"; videoWriter = new Emgu.CV.VideoWriter(destination, Emgu.CV.VideoWriter.Fourcc('I', 'Y', 'U', 'V'), fps, new System.Drawing.Size(frameWidth, frameHeight), true); fileChanged = false; } Emgu.CV.Mat m = new Emgu.CV.Mat(); m_capture.Retrieve(m); // pictureBox1.Image = m.ToImage<Bgr, byte>().Bitmap; videoWriter.Write(m); //throw new NotImplementedException(); }
private void Capture_ImageGrabbed(object sender, EventArgs e) { ActFrame = Convert.ToInt32(capture.GetCaptureProperty(CapProp.PosFrames)); capture.Retrieve(Frame); //ProcessFrame(frame); ProcessFrameBallSearch(Frame); }
private void Capture_ImageGrabbed(object sender, EventArgs e) { capture = (sender as VideoCapture); if (capture != null) { capture.Retrieve(currentFrame); imagesCircQ.Enqueue(currentFrame); areGetNewImage.Set(); } }
private void Grab() { Mat mat = new Mat(); capture.Grab(); capture.Retrieve(mat); if (rec != null && !mat.Size.IsEmpty) { rec.Invoke(mat); } }
private void ImageReceived(object sender, EventArgs e) { try { Mat frame = new Mat(); //Matrix to save the picture capture.Retrieve(frame, 0); //retrieve the picture to the matrinx Image <Bgr, byte> image = frame.ToImage <Bgr, byte>(); var imgBytes = image.ToJpegData(); var img = (Bitmap)((new ImageConverter().ConvertFrom(imgBytes))); pictureBox1.Image = img; } catch (Exception) { throw; } }
private void Capture_ImageGrabbed(object sender, EventArgs e) { try { Emgu.CV.Mat frame = new Emgu.CV.Mat(); if (capture.Retrieve(frame)) { Emgu.CV.Mat grayFrame = new Emgu.CV.Mat(); Emgu.CV.CvInvoke.CvtColor(frame, grayFrame, Emgu.CV.CvEnum.ColorConversion.Bgr2Gray); Rectangle[] faces = emguFaceClassifier.DetectMultiScale(grayFrame, ScaleFactor, Neighbors); foreach (var face in faces) { Emgu.CV.CvInvoke.Rectangle(frame, face, new MCvScalar(0, 0, 255)); } //ImageSource = ToBitmapSource(currentFrame); //Bitmap bmi = frame.ToBitmap(); //ImageSource = ToBitmapImage(bmi); } } catch (Exception ex) { } }
/* public void TestGpuVibe() { int warmUpFrames = 20; GpuVibe<Gray> vibe = null; Image<Gray, Byte> mask = null; using (ImageViewer viewer = new ImageViewer()) //create an image viewer using (Capture capture = new Capture()) //create a camera captue { capture.ImageGrabbed += delegate(object sender, EventArgs e) { //run this until application closed (close button click on image viewer) using(Image<Bgr, byte> frame = capture.RetrieveBgrFrame(0)) using (CudaImage<Bgr, byte> gpuFrame = new CudaImage<Bgr, byte>(frame)) using (CudaImage<Gray, Byte> gpuGray = gpuFrame.Convert<Gray, Byte>()) { if (warmUpFrames > 0) { warmUpFrames--; return; } if (vibe == null) { vibe = new GpuVibe<Gray>(1234567, gpuGray, null); return; } else { vibe.Apply(gpuGray, null); if (mask == null) mask = new Image<Gray, byte>(vibe.ForgroundMask.Size); vibe.ForgroundMask.Download(mask); viewer.Image = frame.ConcateHorizontal(mask.Convert<Bgr, Byte>()); //draw the image obtained from camera } } }; capture.Start(); viewer.ShowDialog(); //show the image viewer } } public void TestGpuBackgroundModel() { int warmUpFrames = 20; int totalFrames = 0; //CudaBackgroundSubtractorMOG2<Bgr> bgModel = null; //CudaBackgroundSubtractorMOG<Bgr> bgModel = null; CudaBackgroundSubtractorGMG<Bgr> bgModel = null; //CudaBackgroundSubtractorFGD<Bgr> bgModel = null; Image<Gray, Byte> mask = null; using (ImageViewer viewer = new ImageViewer()) //create an image viewer using (Capture capture = new Capture()) //create a camera captue { capture.ImageGrabbed += delegate(object sender, EventArgs e) { //run this until application closed (close button click on image viewer) totalFrames++; if (viewer != null && !viewer.IsDisposed) { if (viewer.InvokeRequired) { viewer.Invoke((Action)delegate { viewer.Text = String.Format("Processing {0}th frame.", totalFrames); }); } else { viewer.Text = String.Format("Processing {0}th frame.", totalFrames); } } using (Image<Bgr, byte> frame = capture.RetrieveBgrFrame(0)) using (CudaImage<Bgr, byte> gpuFrame = new CudaImage<Bgr, byte>(frame)) { if (warmUpFrames > 0) { warmUpFrames--; return; } if (bgModel == null) { //bgModel = new CudaBackgroundSubtractorMOG2<Bgr>(500, 16, true); //bgModel = new CudaBackgroundSubtractorMOG<Bgr>(200, 5, 0.7, 0); bgModel = new CudaBackgroundSubtractorGMG<Bgr>(120, 0.8); bgModel.Apply(gpuFrame, -1.0f, null); //bgModel = new CudaBackgroundSubtractorFGD<Bgr>(128, 15, 25, 64, 25, 40, true, 1, 0.1f, 0.005f, 0.1f, 2.0f, 0.9f, 15.0f); //bgModel.Apply(gpuFrame, -1.0f); return; } else { bgModel.Apply(gpuFrame, -1.0f, null); //bgModel.Apply(gpuFrame, -1.0f); if (mask == null) mask = new Image<Gray, byte>(bgModel.ForgroundMask.Size); bgModel.ForgroundMask.Download(mask); Image<Bgr, Byte> result = frame.ConcateHorizontal(mask.Convert<Bgr, Byte>()); if (viewer != null && !viewer.IsDisposed) { if (viewer.InvokeRequired) { viewer.Invoke((Action)delegate { viewer.Image = result; }); } else { viewer.Image = result; //draw the image obtained from camera } } } } }; capture.Start(); viewer.ShowDialog(); //show the image viewer } }*/ public void CameraTest() { using (ImageViewer viewer = new ImageViewer()) //create an image viewer using (VideoCapture capture = new VideoCapture()) //create a camera captue { capture.ImageGrabbed += delegate(object sender, EventArgs e) { //run this until application closed (close button click on image viewer) Mat m = new Mat(); capture.Retrieve(m); viewer.Image = m; //draw the image obtained from camera }; capture.Start(); viewer.ShowDialog(); //show the image viewer } }
/* public void TestCodeBookBGModel() { using (Capture capture = new Capture()) using (BGCodeBookModel<Bgr> model = new BGCodeBookModel<Bgr>()) { ImageViewer viewer = new ImageViewer(); Image<Gray, byte> fgMask = capture.QueryFrame().Convert<Gray, Byte>(); Application.Idle += delegate(Object sender, EventArgs args) { Mat frame = capture.QueryFrame(); model.Apply(frame); viewer.Image = model.ForegroundMask; }; viewer.ShowDialog(); } } public void TestBlobTracking() { MCvFGDStatModelParams fgparam = new MCvFGDStatModelParams(); fgparam.alpha1 = 0.1f; fgparam.alpha2 = 0.005f; fgparam.alpha3 = 0.1f; fgparam.delta = 2; fgparam.is_obj_without_holes = 1; fgparam.Lc = 32; fgparam.Lcc = 16; fgparam.minArea = 15; fgparam.N1c = 15; fgparam.N1cc = 25; fgparam.N2c = 25; fgparam.N2cc = 35; fgparam.perform_morphing = 0; fgparam.T = 0.9f; BlobTrackerAutoParam<Bgr> param = new BlobTrackerAutoParam<Bgr>(); param.BlobDetector = new BlobDetector(Emgu.CV.CvEnum.BlobDetectorType.CC); param.FGDetector = new FGDetector<Bgr>(Emgu.CV.CvEnum.ForgroundDetectorType.Fgd, fgparam); param.BlobTracker = new BlobTracker(Emgu.CV.CvEnum.BLOBTRACKER_TYPE.MSFG); param.FGTrainFrames = 10; BlobTrackerAuto<Bgr> tracker = new BlobTrackerAuto<Bgr>(param); //MCvFont font = new MCvFont(Emgu.CV.CvEnum.FontFace.HersheySimplex, 1.0, 1.0); using(ImageViewer viewer = new ImageViewer()) using (Capture capture = new Capture()) { capture.ImageGrabbed += delegate(object sender, EventArgs e) { tracker.Process(capture.RetrieveBgrFrame()); //Image<Bgr, Byte> img = capture.RetrieveBgrFrame(); Image<Bgr, Byte> img = tracker.ForegroundMask.Convert<Bgr, Byte>(); foreach (MCvBlob blob in tracker) { img.Draw((Rectangle)blob, new Bgr(255.0, 255.0, 255.0), 2); img.Draw(blob.ID.ToString(), Point.Round(blob.Center), CvEnum.FontFace.HersheySimplex, 1.0, new Bgr(255.0, 255.0, 255.0)); } viewer.Image = img; }; capture.Start(); viewer.ShowDialog(); } }*/ public void TestCvBlob() { //MCvFont font = new MCvFont(Emgu.CV.CvEnum.FontFace.HersheySimplex, 0.5, 0.5); using (CvTracks tracks = new CvTracks()) using (ImageViewer viewer = new ImageViewer()) using (VideoCapture capture = new VideoCapture()) using (Mat fgMask = new Mat()) { //BGStatModel<Bgr> bgModel = new BGStatModel<Bgr>(capture.QueryFrame(), Emgu.CV.CvEnum.BG_STAT_TYPE.GAUSSIAN_BG_MODEL); BackgroundSubtractorMOG2 bgModel = new BackgroundSubtractorMOG2(0, 0, true); //BackgroundSubstractorMOG bgModel = new BackgroundSubstractorMOG(0, 0, 0, 0); capture.ImageGrabbed += delegate(object sender, EventArgs e) { Mat frame = new Mat(); capture.Retrieve(frame); bgModel.Apply(frame, fgMask); using (CvBlobDetector detector = new CvBlobDetector()) using (CvBlobs blobs = new CvBlobs()) { detector.Detect(fgMask.ToImage<Gray, Byte>(), blobs); blobs.FilterByArea(100, int.MaxValue); tracks.Update(blobs, 20.0, 10, 0); Image<Bgr, Byte> result = new Image<Bgr, byte>(frame.Size); using (Image<Gray, Byte> blobMask = detector.DrawBlobsMask(blobs)) { frame.CopyTo(result, blobMask); } //CvInvoke.cvCopy(frame, result, blobMask); foreach (KeyValuePair<uint, CvTrack> pair in tracks) { if (pair.Value.Inactive == 0) //only draw the active tracks. { CvBlob b = blobs[pair.Value.BlobLabel]; Bgr color = detector.MeanColor(b, frame.ToImage<Bgr, Byte>()); result.Draw(pair.Key.ToString(), pair.Value.BoundingBox.Location, CvEnum.FontFace.HersheySimplex, 0.5, color); result.Draw(pair.Value.BoundingBox, color, 2); Point[] contour = b.GetContour(); result.Draw(contour, new Bgr(0, 0, 255), 1); } } viewer.Image = frame.ToImage<Bgr, Byte>().ConcateVertical(fgMask.ToImage<Bgr, Byte>().ConcateHorizontal(result)); } }; capture.Start(); viewer.ShowDialog(); } }
public void TestFileCapturePause() { int totalFrames1 = 0; String fileName = EmguAssert.GetFile("tree.avi"); String fileName2 = fileName.Replace("tree.avi", "tree2.avi"); File.Copy(fileName, fileName2, true); VideoCapture capture1 = new VideoCapture(fileName); //capture one will continute capturing all the frames. EventHandler captureHandle1 = delegate { Mat img = new Mat(); capture1.Retrieve(img); totalFrames1++; Trace.WriteLine(String.Format("capture 1 frame {0}: {1}", totalFrames1, DateTime.Now.ToString())); }; capture1.ImageGrabbed += captureHandle1; capture1.Start(); System.Threading.Thread.Sleep(2); int totalFrames2 = 0; VideoCapture capture2 = new VideoCapture(fileName2); int counter = 0; //capture 2 will capture 2 frames, pause for 1 seconds, then continute; EventHandler captureHandle = delegate { counter++; totalFrames2++; bool needPause = (counter >= 2); if (needPause) { capture2.Pause(); counter = 0; } Mat img = new Mat(); capture2.Retrieve(img); Trace.WriteLine(String.Format("capture 2 frame {0}: {1}", totalFrames2, DateTime.Now.ToString())); if (needPause) { System.Threading.ThreadPool.QueueUserWorkItem(delegate { Trace.WriteLine("Sleep for 1 sec"); System.Threading.Thread.Sleep(1000); capture2.Start(); }); } }; capture2.ImageGrabbed += captureHandle; capture2.Start(); //int totalFrames = 69; Stopwatch s = Stopwatch.StartNew(); while (! (totalFrames1 == totalFrames2)) { System.Threading.Thread.Sleep(1000); if (s.ElapsedMilliseconds > 120 * 1000) { EmguAssert.IsTrue(false, "Unable to finished reading frames in 2 mins"); break; } } capture1.Dispose(); capture2.Dispose(); }
public void TestFileCaptureNonAscii() { String fileName = EmguAssert.GetFile("tree.avi"); String newName = fileName.Replace("tree.avi", "树.avi"); File.Copy(fileName, newName, true); VideoCapture capture = new VideoCapture(EmguAssert.GetFile(newName)); int counter = 0; using (Mat m = new Mat()) while (capture.Grab()) { capture.Retrieve(m); counter++; } Trace.WriteLine(String.Format("{0} frames found in file {1}", counter, newName)); }
public void TestCaptureFromFile() { using (VideoCapture capture = new VideoCapture(EmguAssert.GetFile( "tree.avi"))) using (VideoWriter writer = new VideoWriter("tree_invert.avi", 10, new Size(capture.Width, capture.Height), true)) { int maxCount = 10; Mat img = new Mat(); while (capture.Grab() && maxCount > 0) { capture.Retrieve(img); CvInvoke.BitwiseNot(img, img); writer.Write(img); maxCount--; } } }