private void Ini_tirar_foto() { AForge.Video.DirectShow.FilterInfoCollection videosources = new AForge.Video.DirectShow.FilterInfoCollection(AForge.Video.DirectShow.FilterCategory.VideoInputDevice); if (videosources != null) { videoSource = new AForge.Video.DirectShow.VideoCaptureDevice(videosources[0].MonikerString); videoSource.NewFrame += (s, f) => perfilPB.Image = (Bitmap)f.Frame.Clone(); videoSource.Start(); } }
public TFCapturaVideo() { InitializeComponent(); AForge.Video.DirectShow.FilterInfoCollection videosources = new AForge.Video.DirectShow.FilterInfoCollection(AForge.Video.DirectShow.FilterCategory.VideoInputDevice); if (videosources != null) { videoSource = new AForge.Video.DirectShow.VideoCaptureDevice(videosources[0].MonikerString); videoSource.NewFrame += (s, e) => pbImagem.Image = (Bitmap)e.Frame.Clone(); videoSource.Start(); } }
public void StartRecording() { if (!created_) { Create(); } if (device_ != null) { device_.Start(); } }
private void toolStripButton1_Click(object sender, EventArgs e) { if (videoSource.IsRunning) { videoSource.SignalToStop(); bbCaptura.Text = " (F3)\n Nova Cap."; } else { videoSource.Start(); bbCaptura.Text = " (F3)\n Capturar"; } }
public void FindFace(PictureBox pb) { AForge.Video.DirectShow.VideoCaptureDevice FinalVideoSource; AForge.Video.DirectShow.FilterInfoCollection VideoCaptuerDevices; VideoCaptuerDevices = new AForge.Video.DirectShow.FilterInfoCollection(AForge.Video.DirectShow.FilterCategory.VideoInputDevice); FinalVideoSource = new AForge.Video.DirectShow.VideoCaptureDevice(VideoCaptuerDevices[0].MonikerString); FinalVideoSource.NewFrame += new AForge.Video.NewFrameEventHandler((sender, eventArgs) => { Bitmap image = (Bitmap)eventArgs.Frame.Clone(); pb.Image = image; }); FinalVideoSource.Start(); FinalVideoSource.NewFrame += new AForge.Video.NewFrameEventHandler((sender, eventArgs) => { Bitmap image = (Bitmap)eventArgs.Frame.Clone(); //görüntüde yüz aranacağı belirtilir. Accord.Vision.Detection.Cascades.FaceHaarCascade cascade = new Accord.Vision.Detection.Cascades.FaceHaarCascade(); //görüntü üzerinde arama yapar. İkinci parametresi minimum arama alanıdır. Accord.Vision.Detection.HaarObjectDetector detector = new Accord.Vision.Detection.HaarObjectDetector(cascade, 1); detector.SearchMode = Accord.Vision.Detection.ObjectDetectorSearchMode.Single; detector.ScalingFactor = 1.5f; detector.ScalingMode = Accord.Vision.Detection.ObjectDetectorScalingMode.GreaterToSmaller; detector.UseParallelProcessing = true; detector.Suppression = 3; Rectangle[] faces = detector.ProcessFrame(image); Graphics g = Graphics.FromImage(image); foreach (var face in faces) { Pen p = new Pen(Color.Red, 10f); g.DrawRectangle(p, face); } g.Dispose(); pb.Image = image; }); }
public TakePicture(MainForm mainForm) { InitializeComponent(); this.mainForm = mainForm; AForge.Video.DirectShow.FilterInfoCollection videosources = new AForge.Video.DirectShow.FilterInfoCollection(AForge.Video.DirectShow.FilterCategory.VideoInputDevice); if (videosources != null) { //videoSource.GetCameraProperty(AForge.Video.DirectShow.CameraControlProperty.) videoSource = new AForge.Video.DirectShow.VideoCaptureDevice(videosources[0].MonikerString); videoSource.NewFrame += (s, e) => { if (pictureBox1.Image != null) { pictureBox1.Image.Dispose(); takenPicture.Dispose(); } pictureBox1.Image = (Bitmap)e.Frame.Clone(); takenPicture = (Bitmap)e.Frame.Clone(); }; videoSource.Start(); } }
public Form1(int tempo, int qtd, string path, string nome) { InitializeComponent(); AForge.Video.DirectShow.FilterInfoCollection videosources = new AForge.Video.DirectShow.FilterInfoCollection(AForge.Video.DirectShow.FilterCategory.VideoInputDevice); if (videosources != null) { videoSource = new AForge.Video.DirectShow.VideoCaptureDevice(videosources[0].MonikerString); videoSource.NewFrame += (s, e) => { if (pictureBox1.Image != null) { pictureBox1.Image.Dispose(); } pictureBox1.Image = (Bitmap)e.Frame.Clone(); }; videoSource.Start(); } timer1.Interval = tempo; _qtd = qtd; _path = path; _nomeOriginal = nome; }
private void cmbDispositivos_SelectedIndexChanged(object sender, EventArgs e) { try { //Se o index for maior do que 1, ou seja, se estiver algum item selecionado if (cmbDispositivos.SelectedIndex > -1) { //será instanciado o videoSource de novo, com o valor selecionado do comboBox como parãmetro _VideoSource = new AForge.Video.DirectShow.VideoCaptureDevice(_VideoSources[cmbDispositivos.SelectedIndex].MonikerString); //o atributo NewFrame será igual a soma dele mesmo com o resultado do método VideoSourceNewFrame _VideoSource.NewFrame += VideoSourceNewFrame; //Será iniciado um novo frame _VideoSource.Start(); this.Text = "Dispositivo : " + cmbDispositivos.SelectedItem.ToString(); } } catch (Exception ex) { MetroMessageBox.Show(this, ex.Message, "Ocorreu um erro", MessageBoxButtons.OK, MessageBoxIcon.Error, 150); } }
private void btnTirar_Click(object sender, EventArgs e) { try { if (btnTirar.Text == "Tirar!") { _VideoSource.Stop(); btnTirar.Text = "Tentar de novo"; ImagemSelecionada = ptbCamera.BackgroundImage; } else { _VideoSource.Start(); btnTirar.Text = "Tirar!"; } } catch (Exception ex) { MetroMessageBox.Show(this, ex.Message, "Ocorreu um erro", MessageBoxButtons.OK, MessageBoxIcon.Error, 150); } }
public override void Start() { videoDevice.Start(); }
bool TrySetupDeviceOnce() { try { DisposeAForgeVideoCaptureInstance(); AForgeVideoCaptureInstance = new AForge.Video.DirectShow.VideoCaptureDevice(VideoCaptureDeviceDevicePath); if (AForgeVideoCaptureInstance == null) { if (ApplicationCommonSettings.IsDebugging) { Debugger.Break(); } throw new EgsDeviceOperationException("AForgeVideoCaptureInstance == null."); } AForgeVideoCaptureInstance.NewFrame += AForgeVideoCaptureInstance_NewFrame; StartUvcIsWorkingMonitorTimer(); UvcIsWorkingMonitorTimer.Tick += UvcIsWorkingMonitorTimer_Tick; // TODO: MUSTDO: AForgeVideoCaptureInstance.VideoResolution = AForgeVideoCaptureInstance.VideoCapabilities[someIndex]; で解像度が変わるが、 // AForgeVideoCaptureInstance.IsRunning == true のままでは変更できない。 // そしてSignalToStop()からのWaitForStop()メソッドの実行が完了しないことがある。 // デバイスがKS版ではなく、後のネゴシエーションの更新版以降では問題ないのか? if (AForgeVideoCaptureInstance.VideoCapabilities == null) { if (ApplicationCommonSettings.IsDebugging) { Debugger.Break(); } throw new EgsDeviceOperationException("AForgeVideoCaptureInstance.VideoCapabilities == null"); } if (VideoCaptureDeviceIndex < 0) { if (ApplicationCommonSettings.IsDebugging) { Debugger.Break(); } throw new EgsDeviceOperationException("VideoCaptureDeviceIndex < 0"); } var videoCapabilityIndex = Device.Settings.CameraViewImageSourceBitmapSize.OptionalValue.SelectedIndex; if (videoCapabilityIndex < 0) { if (ApplicationCommonSettings.IsDebugging) { Debugger.Break(); } throw new EgsDeviceOperationException("videoCapabilityIndex < 0"); } if (videoCapabilityIndex >= AForgeVideoCaptureInstance.VideoCapabilities.Length) { if (ApplicationCommonSettings.IsDebugging) { Debugger.Break(); } throw new EgsDeviceOperationException("videoCapabilityIndex >= AForgeVideoCaptureInstance.VideoCapabilities.Length"); } var capability = AForgeVideoCaptureInstance.VideoCapabilities[videoCapabilityIndex]; if (capability == null) { if (ApplicationCommonSettings.IsDebugging) { Debugger.Break(); } throw new EgsDeviceOperationException("capability == null"); } int width = capability.FrameSize.Width; if (width == 0) { if (ApplicationCommonSettings.IsDebugging) { Debugger.Break(); } throw new EgsDeviceOperationException("width == 0"); } int height = capability.FrameSize.Height; if (height == 0) { if (ApplicationCommonSettings.IsDebugging) { Debugger.Break(); } throw new EgsDeviceOperationException("height == 0"); } AForgeVideoCaptureInstance.VideoResolution = capability; AForgeVideoCaptureInstance.Start(); IsCameraDeviceConnected = true; return(true); } catch (Exception ex) { Debug.WriteLine(ex.Message); if (ApplicationCommonSettings.IsDebugging) { Debugger.Break(); } return(false); } }