private async void btnPlay_Click(object sender, EventArgs e) { pausa = false; if (video == null) { Importar(); } try { while (!pausa) { Mat mFrame1 = new Mat(); video.Retrieve(mFrame1); video.Grab(); frame = mFrame1.ToImage <Bgr, byte>(); if (!mFrame1.IsEmpty) { pictureOriginal.Image = frame.ToBitmap(); await Task.Delay(1000 / tiempo); } else { video.Stop(); break; } } }catch (Exception) { } }
void Update() { if (!webcam.IsOpened) { return; } webcam.Grab(); if (Input.GetKey(KeyCode.Mouse0)) { if (Input.GetKeyDown(KeyCode.Mouse0)) { orgBoxPos = Input.mousePosition; } else { endBoxPos = Input.mousePosition; } } if (Input.GetKeyUp(KeyCode.Mouse0)) { if (endBoxPos != Vector2.zero && orgBoxPos != Vector2.zero) { HandleUnitSelection(); } endBoxPos = orgBoxPos = Vector2.zero; } if (colorDetect) { } }
public bool StartStream(string url) { ///Оборачиваем конструктор класса VideoCapture в задачу, и ждем завершения 5 секунд. ///Это необходимо, т.к. у VideoCapture нет таймаута и если камера не ответила, то приложение зависнет. var CaptureTask = Task.Factory.StartNew(() => Capture = new VideoCapture(url)); if (!CaptureTask.Wait(new TimeSpan(0, 0, 5))) { return(false); } ///Получаем первый кадр с камеры Capture.Grab(); ///Если удачно записали его в переменную, то продолжаем if (!Capture.Retrieve(MatFrame, 3)) { return(false); } ///Узнаем формат пикселей кадра System.Windows.Media.PixelFormat pixelFormat = GetPixelFormat(MatFrame); ///Определяем сколько места занимает один кадр pcount = (uint)(MatFrame.Width * MatFrame.Height * pixelFormat.BitsPerPixel / 8); ///Создаем объект в памяти source = CreateFileMapping(new IntPtr(-1), IntPtr.Zero, 0x04, 0, pcount, null); ///Получаем ссылку на начальный адрес объекта map = MapViewOfFile(source, 0xF001F, 0, 0, pcount); ///Инициализируем InteropBitmap используя созданный выше объект в памяти Frame = Imaging.CreateBitmapSourceFromMemorySection(source, MatFrame.Width, MatFrame.Height, pixelFormat, MatFrame.Width * pixelFormat.BitsPerPixel / 8, 0) as InteropBitmap; Capture.ImageGrabbed += Capture_ImageGrabbed; Capture.Start(); return(true); }
static void Main() { VideoCapture cap = new VideoCapture(0); if (!cap.IsOpened) { return; } cap.SetCaptureProperty(CapProp.FrameWidth, 1920); cap.SetCaptureProperty(CapProp.FrameHeight, 1080); cap.SetCaptureProperty(CapProp.Fps, 30); Mat frame = new Mat(); long msecCounter = 0; long frameNumber = 0; for (;;) { if (cap.Grab()) { msecCounter = (long)cap.GetCaptureProperty(CapProp.PosMsec); frameNumber = (long)cap.GetCaptureProperty(CapProp.PosFrames); if (cap.Retrieve(frame)) { ProcessFrame(frame, msecCounter, frameNumber); } } // TODO: Determine when to quit the processing loop } }
/// <summary> /// Захватывает кадр с камеры, если она активна, и обрабатывает его. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void Camera_ImageGrab(object sender, EventArgs e) { if ((Camera != null) && Camera.Grab() && Camera.Retrieve(Frame.Mat)) { FrameResult fr = Proc.ProcessFrame(Frame.Clone()); //обрабатываем кадр if (fr.QRLocation != null) { lb_qr.Visible = true; countQR += 1; lb_qr.Text = ("QR found: " + countQR + "times"); } if (fr.ArucoLocation != null) { lb_aruco.Visible = true; countAruco += 1; lb_aruco.Text = ("Aruco found: " + countAruco + "times"); } // и просто показываем результат ViewBox.Image = fr.Visual; this.Text = "Data: " + fr.Commentary; TiltAngleLbl.Text = double.IsNaN(fr.TiltAngle) ? "------" : string.Format("Наклон {0:f2} гр", fr.TiltAngle * 180.0 / Math.PI); } }
private void backgroundWorker_DoWork(object sender, DoWorkEventArgs e) { BackgroundWorker bw = (BackgroundWorker)sender; while (!backgroundWorker.CancellationPending) { //画像取得 capture.Read(frame); if (!capture.IsOpened()) { this.backgroundWorker.CancelAsync(); while (this.backgroundWorker.IsBusy) { Application.DoEvents(); } return; } else { capture.Grab(); } NativeMethods.videoio_VideoCapture_operatorRightShift_Mat(capture.CvPtr, frame.CvPtr); bw.ReportProgress(0); } }
private void LateUpdate() { if (webcam.IsOpened) { webcam.Grab(); } }
private void CaptureFeed(string url, int feedId) { // TODO: on the real webcam use https://getakka.net/articles/utilities/scheduler.html // to send a message itself every 1 sec. using (var capture = new VideoCapture(url)) { var fps = (int)capture.Fps; Console.WriteLine($"Feed {feedId} FPS: {capture.Fps}"); int i = -1; using (var image = new Mat()) { while (true) { capture.Grab(); i++; if (i % fps != 0) { continue; } capture.Read(image); if (image.Empty()) { Console.WriteLine($"Feed {feedId}: end of video"); break; } Console.WriteLine($"Feed {feedId}: sending frame {i}"); FrameProcessor.Tell(new ProcessFrame { Frame = image, FrameNo = i, Id = feedId }); } } } }
private bool setupCamera() { Camera = new VideoCapture(); Camera.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameWidth, parameters.cameraResolution.Width); Camera.SetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameHeight, parameters.cameraResolution.Height); Mat temp = new Mat(); Camera.Read(temp); if (!Camera.Grab()) { MessageBox.Show("Unable to open camera!", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error); return(false); } if (temp.Size != parameters.cameraResolution) { MessageBox.Show("Camera resolution " + parameters.cameraResolution + " doesn't fit given resolution: " + temp.Size, "Warning", MessageBoxButtons.OK, MessageBoxIcon.Warning); } FrameSize = temp.Size; return(true); }
void Update() { if (capture.IsOpened) { capture.Grab(); } }
void Update() { if (videoCapture.IsOpened) { videoCapture.Grab(); } }
void generateVideo(string path, string filename) { int i = -1; var fcc = VideoWriter.Fourcc('m', 'p', '4', 'v'); Stopwatch watch = new Stopwatch(); watch.Start(); using (var video = new VideoCapture(path)) using (var writer = new VideoWriter(@$ "{filename}.mp4", fcc, video.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.Fps) / FRAMES, new Size((int)video.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameWidth), (int)video.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameHeight)), true)) using (var img = new Mat()) { while (video.Grab()) { if (++i % FRAMES == 0) { video.Read(img); Bitmap bmp = new Bitmap(convertIntoAscii(img.ToBitmap()), (int)video.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameWidth), (int)video.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameHeight)); Image <Bgr, byte> frame = bmp.ToImage <Bgr, byte>(); writer.Write(frame.Mat); } } } watch.Stop(); Console.WriteLine($"\u001b[36mTime: {watch.ElapsedMilliseconds/60} sec.\u001b[0m"); }
private List <InputFileModel> GetInputFromStereoCamera(VideoCapture LeftCamera, VideoCapture RightCamera, int countInputFile = 0) { LeftCamera.Grab(); RightCamera.Grab(); Mat LeftImage = new Mat(); Mat RightImage = new Mat(); LeftCamera.Retrieve(LeftImage); RightCamera.Retrieve(RightImage); LeftImage.Save(Path.Combine($@"{tempLeftStackDirectory}", $"Left_{countInputFile}.JPG")); RightImage.Save(Path.Combine($@"{tempRightStackDirectory}", $"Right_{countInputFile}.JPG")); var inputFileLeft = new InputFileModel(Path.Combine($@"{tempLeftStackDirectory}", $"Left_{countInputFile}.JPG")); var imageList = _winForm.ImageList[(int)EListViewGroup.LeftCameraStack]; var listViewer = _winForm.ListViews[(int)EListViewGroup.LeftCameraStack]; _fileManager.AddInputFileToList(inputFileLeft, _fileManager.listViewerModel.ListOfListInputFolder[(int)EListViewGroup.LeftCameraStack], imageList, listViewer); var inputFileRight = new InputFileModel(Path.Combine($@"{tempRightStackDirectory}", $"Right_{countInputFile}.JPG")); imageList = _winForm.ImageList[(int)EListViewGroup.RightCameraStack]; listViewer = _winForm.ListViews[(int)EListViewGroup.RightCameraStack]; _fileManager.AddInputFileToList(inputFileLeft, _fileManager.listViewerModel.ListOfListInputFolder[(int)EListViewGroup.RightCameraStack], imageList, listViewer); var returnList = new List <InputFileModel>(); returnList.Add(inputFileLeft); returnList.Add(inputFileRight); return(returnList); }
/// <summary> /// /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void ScanBackgroundWorker_DoWork(object sender, DoWorkEventArgs e) { BackgroundWorker bw = (BackgroundWorker)sender; while (!scanBackgroundWorker.CancellationPending) { // バーコード読み取り ZXing.BarcodeReader reader = new ZXing.BarcodeReader(); reader.AutoRotate = true; reader.Options.TryHarder = true; //画像取得 capture.Grab(); OpenCvSharp.NativeMethods.videoio_VideoCapture_operatorRightShift_Mat(capture.CvPtr, frame.CvPtr); // Windows FormsではBitmapを渡す ZXing.Result result = reader.Decode(BitmapConverter.ToBitmap(frame.CvtColor(ColorConversionCodes.BGR2GRAY))); if (result != null) { this.Invoke(new Action <ZXing.Result>(this.UpdateBarcodeFormatText), result); } bw.ReportProgress(0); } }
// Update is called once per frame void Update() { //setup HSV Color seuilbasHsv = new Hsv(seuilBas.x, seuilBas.y, seuilBas.z); seuilhautHsv = new Hsv(seuilHaut.x, seuilHaut.y, seuilHaut.z); seuilbasHsvBleu = new Hsv(seuilBasBleu.x, seuilBasBleu.y, seuilBasBleu.z); seuilhautHsvBleu = new Hsv(seuilBasBleu.x, seuilBasBleu.y, seuilBasBleu.z); fluxVideo.Grab(); //converti Image <Gray, byte> imageSeuilLimit = Convert(seuilBas, seuilHaut); Image <Gray, byte> imageSeuilLimitBleu = Convert(seuilBasBleu, seuilHautBleu); //dilate pour affiner les trais var strutElement = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(5, 5), new Point(2, 2)); CvInvoke.Dilate(imageSeuilLimit, imageSeuilLimit, strutElement, new Point(2, 2), 1, BorderType.Default, new MCvScalar()); var strutElementBlue = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(5, 5), new Point(2, 2)); CvInvoke.Dilate(imageSeuilLimitBleu, imageSeuilLimitBleu, strutElementBlue, new Point(2, 2), 1, BorderType.Default, new MCvScalar()); //Recognition building buildings = DrawRectangle(imageSeuilLimitBleu, "Building"); ListTriangles = DrawTriangle(imageSeuilLimit, "triangle"); CvInvoke.Imshow("Image seuile POV", imageSeuilLimit.Mat); CvInvoke.Imshow("Image seuile Bleu", imageSeuilLimitBleu.Mat); CvInvoke.Imshow("Image", imageMat); CvInvoke.WaitKey(24); }
private static void VideoCaptureSample() { var cap = new VideoCapture(0); if (!cap.IsOpened()) { Console.WriteLine("Can't use camera."); return; } var frame = new Mat(); cap.Grab(); NativeMethods.videoio_VideoCapture_operatorRightShift_Mat(cap.CvPtr, frame.CvPtr); Window.ShowImages(frame); using (var window = new Window("window")) { while (true) { cap.Read(frame); window.ShowImage(frame); int key = Cv2.WaitKey(50); if (key == 'b') break; } } }
// Update is called once per frame void Update() { if (webCam.IsOpened) { webCam.Grab(); } }
private void FaceDetection() { if (webcam.IsOpened) { webcam.Grab(); } }
// -- cam 1 -- // public void kam_in() { camera_in = new VideoCapture(0); //camera_in = new Emgu.CV.VideoCapture("rtsp://*****:*****@192.168.8.113/live/stream"); camera_in.ImageGrabbed += camera_process; camera_in.Grab(); camera_in.Start(); }
void Update() { if (webcam.IsOpened) { // update the image from the webcam webcam.Grab(); } }
void Update() { if (!_webcam.IsOpened) { return; } _webcam.Grab(); }
private void OnLoaded(object sender, RoutedEventArgs e) { double screenX = Screen.PrimaryScreen.WorkingArea.Width / 2; double screenY = Screen.PrimaryScreen.WorkingArea.Height / 2; double offsetY = Screen.PrimaryScreen.Bounds.Y; Left = Math.Max((screenX - ActualWidth) / 2, 0); Top = 40; resultWindow = new CamerPreviewWindow(); resultWindow.Show(); resultWindow.Left = Left; resultWindow.Top = Top + ActualHeight + 10; detector = new CascadeDetector(FACE_FILE, EYE_FILE); cameraFrame = new Mat(); grayFrame = new Mat(); try { capture = new VideoCapture(); if (App.VideoPath != null) { captureOutput = new VideoWriter( App.VideoPath, App.FrameRate, new System.Drawing.Size((int)capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameWidth), (int)capture.GetCaptureProperty(Emgu.CV.CvEnum.CapProp.FrameHeight)), true); } capture.FlipHorizontal = true; capture.ImageGrabbed += OnFrameCaptured; capture.Grab(); } catch (Exception excpt) { System.Windows.MessageBox.Show(excpt.Message); } canv_left.DataBuffer = leftData; canv_right.DataBuffer = rightData; canv_lt.DataBuffer = leftTopData; canv_rt.DataBuffer = rightTopData; canv_lt_dist.DataBuffer = leftDistData; canv_rt_dist.DataBuffer = rightDistData; btn_start.Click += (_sender, _e) => { string content = btn_start.Content as string; if (content == "开始") { btn_start.IsEnabled = false; isStarted = true; startTime = DateTime.Now; btn_start.Content = "停止"; } }; }
public void VideoDrawWork_DoWork(object sender, DoWorkEventArgs e) { BackgroundWorker bw = (BackgroundWorker)sender; while (!videoDrawWorker.CancellationPending) { //画像取得 try { capture.Grab(); NativeMethods.videoio_VideoCapture_operatorRightShift_Mat(capture.CvPtr, frame.CvPtr); bw.ReportProgress(0); } catch { resultTextBlock.Text = "画像取得に失敗しました"; } } }
// Update is called once per frame void Update() { fluxVideo.Grab(); image = imageMat.ToImage <Bgra, byte>(); //converti Image <Gray, byte> imageSeuilLimit = Convert(seuilBas, seuilhaut); Image <Gray, byte> imageSeuilBlue = Convert(seuilBasBlue, seuilhautBlue); //resize CvInvoke.Resize(image, image, new Size(1280, 720)); CvInvoke.Resize(imageSeuilBlue, imageSeuilBlue, new Size(1280, 720)); //dilate pour affiner les trais var strutElement = CvInvoke.GetStructuringElement(ElementShape.Rectangle, new Size(5, 5), new Point(2, 2)); CvInvoke.Dilate(imageSeuilLimit, imageSeuilLimit, strutElement, new Point(2, 2), 1, BorderType.Default, new MCvScalar()); if (Input.GetKeyDown(KeyCode.S)) { //extrude le background pour avoir que les platformes ExtrudeBackGround(image, imageSeuilBlue); //creation du sprite gameObject.GetComponent <SpriteRenderer>().sprite = PNG2Sprite.LoadNewSprite("./Assets/test.png", 100.0f); var pc = gameObject.GetComponent <PolygonCollider2D>(); Destroy(pc); gameObject.AddComponent <PolygonCollider2D>(); isDrawing = true; } //La texture if (Input.GetKeyDown(KeyCode.A)) { Texture2D tex = new Texture2D(fluxVideo.Width, fluxVideo.Height, TextureFormat.BGRA32, false); Mat hh = new Mat(); CvInvoke.CvtColor(imageSeuilBlue, hh, ColorConversion.Gray2Bgra); //tex.LoadImage(hh.ToImage<Bgra, byte>().Bytes); tex.LoadRawTextureData(hh.ToImage <Bgra, byte>().Bytes); tex.Apply(); //Level.sprite = Sprite.Create(tex, new Rect(0.0f, 0.0f, tex.width, tex.height), new Vector2(0.5f, 0.5f), 1.0f); } //webcam non traité Texture2D tex2 = new Texture2D(fluxVideo.Width, fluxVideo.Height, TextureFormat.BGRA32, false); tex2.LoadRawTextureData(imageMat.ToImage <Bgra, byte>().Bytes); tex2.Apply(); UiCamera.sprite = Sprite.Create(tex2, new Rect(0.0f, 0.0f, tex2.width, tex2.height), new Vector2(0.5f, 0.5f), 1.0f); //CvInvoke.Flip(imageSeuilBlue, imageSeuilBlue, FlipType.Horizontal); CvInvoke.Imshow("image de base", imageSeuilBlue); CvInvoke.WaitKey(24); }
// Update is called once per frame void Update() { if (webCam.IsOpened) { webCam.Grab(); } else { return; } }
private void Update() { if (webcam.IsOpened) { webcam.Grab(); } if (webcamFrame.IsEmpty) { return; } }
void Start() { video = new VideoCapture(0); classifier = new CascadeClassifier(fileName: path); if (video.IsOpened) { video.Grab(); video.ImageGrabbed += new EventHandler(handleWebcamQueryFrame); } }
public void GrabAndRetrieveImageSequence() { using var capture = new VideoCapture("_data/image/blob/shapes%d.png"); using var image1 = new Mat("_data/image/blob/shapes1.png", ImreadModes.Color); using var image2 = new Mat("_data/image/blob/shapes2.png", ImreadModes.Color); using var image3 = new Mat("_data/image/blob/shapes3.png", ImreadModes.Color); Assert.True(capture.IsOpened()); Assert.Equal("CV_IMAGES", capture.GetBackendName()); Assert.Equal(3, capture.FrameCount); using var frame1 = new Mat(); using var frame2 = new Mat(); using var frame3 = new Mat(); using var frame4 = new Mat(); Assert.True(capture.Grab()); Assert.True(capture.Retrieve(frame1)); Assert.True(capture.Grab()); Assert.True(capture.Retrieve(frame2)); Assert.True(capture.Grab()); Assert.True(capture.Retrieve(frame3)); Assert.False(capture.Grab()); Assert.False(capture.Retrieve(frame4)); Assert.False(frame1.Empty()); Assert.False(frame2.Empty()); Assert.False(frame3.Empty()); Assert.True(frame4.Empty()); Cv2.CvtColor(frame1, frame1, ColorConversionCodes.BGRA2BGR); Cv2.CvtColor(frame2, frame2, ColorConversionCodes.BGRA2BGR); Cv2.CvtColor(frame3, frame3, ColorConversionCodes.BGRA2BGR); ImageEquals(image1, frame1); ImageEquals(image2, frame2); ImageEquals(image3, frame3); if (Debugger.IsAttached) { Window.ShowImages(frame1, frame2, frame3, frame4); } }
public Mat getNextFrame() { // var frame = inputVideo.QueryFrame(); var nextFrame = inputVideo.Grab(); if (!nextFrame) { return(null); } var img = new Mat(); inputVideo.Retrieve(img); return(img); }
public async Task Can_capture_camera() { using var capture = new VideoCapture(captureApi: VideoCapture.API.DShow); capture.ImageGrabbed += CaptureOnImageGrabbed; capture.Start(); for (int i = 0; i < 10; i++) { capture.Grab(); await Task.Delay(1000); } }
private void backgroundWorker1_DoWork(object sender, DoWorkEventArgs e) { BackgroundWorker bw = (BackgroundWorker)sender; while (!backgroundWorker1.CancellationPending) { _video.Grab(); NativeMethods.videoio_VideoCapture_operatorRightShift_Mat(_video.CvPtr, _frame.CvPtr); bw.ReportProgress(0); } }