private void btnLigarCamera_Click(object sender, EventArgs e) { CamContainer = new DirectX.Capture.Filters(); try { int no_of_cam = CamContainer.VideoInputDevices.Count; for (int i = 0; i < no_of_cam; i++) { try { // obtém o dispositivo de entrada do vídeo Camera = CamContainer.VideoInputDevices[i]; // inicializa a Captura usando o dispositivo CaptureInfo = new DirectX.Capture.Capture(Camera, null); // Define a janela de visualização do vídeo CaptureInfo.PreviewWindow = pcCamera; // Capturando o tratamento de evento CaptureInfo.FrameCaptureComplete += AtualizaImagem; // Captura o frame do dispositivo CaptureInfo.CaptureFrame(); // Se o dispositivo foi encontrado e inicializado então sai sem checar o resto break; } catch (Exception ex) { throw ex; } } } catch (Exception ex) { MessageBox.Show(this, ex.Message); } }
public ChooseCaptureAndAudioDeviceForVideoRecordingWindow() { InitializeComponent(); _filters = new Filters(); foreach (Filter videoDevice in _filters.VideoInputDevices) cbCamera.Items.Add(videoDevice.Name); foreach (Filter audioDevice in _filters.AudioInputDevices) cbMicrophone.Items.Add(audioDevice.Name); }
public MainWindow() { KernelUtil.CreateKernel(); _cameraFilters = new Filters(); InitializeComponent(); _vm = KernelUtil.Kernel.Get<MainWindowViewModel>(); DataContext = _vm; LoadCamera(); KeyDown += MainWindow_KeyDown; }
private SieuAmManager() { try { filters = new Filters(); } catch { MessageBox.Show("Không nhận diện được bất kỳ card video! Xin kiểm tra lại"); return; } capture = new Capture(filters.VideoInputDevices[0], null); }
public DataTable LoadDeviceList() { DataTable dt = new DataTable(); dt.Columns.Add("id"); dt.Columns.Add("CameraName"); try { DirectX.Capture.Filters Dispositive = new DirectX.Capture.Filters(); for (int i = 0; i < Dispositive.VideoInputDevices.Count; i++) { Filter f = Dispositive.VideoInputDevices[i]; DataRow dr = dt.NewRow(); dr["id"] = i; dr["CameraName"] = f.Name; dt.Rows.Add(dr); } //String strName = "".PadRight(100); //String strVer = "".PadRight(100); //bool bReturn = false; //short x = 0; //// Load name of all avialable devices into the lstDevices //do //{ // // Get Driver name and version // //bReturn = capGetDriverDescriptionA(x, ref strName, 100, ref strVer, 100); // // If there was a device add device name to the list // if (bReturn == true) // { // //lstDevices.Items.Add(strName.Trim()); // DataRow dr = dt.NewRow(); // dr["id"] = x; // dr["CamName"] = strName.Trim(); // dr["CamVer"] = strVer.Trim(); // dt.Rows.Add(dr); // } // x += 1; //} while (bReturn == true); } catch (Exception ex) { dt = new DataTable(); } return(dt); }
/// <summary> /// Tworzy nową instancję klienta streamingu przez udp. /// </summary> /// <param name="video_window">Picturebox, w którym będzie wyświetlany obraz z kamery/streamu.</param> /// <param name="remote_ip">Zdalne IP serwera lub innego klienta.</param> public UdpStreamClient(PictureBox video_window, string remote_ip) { videoCapture = null; filters = new Filters(); microphone = null; speaker = null; streaming = false; recieving = false; inColor = true; timeInterval = 1000; framesCount = 0; videoWindow = video_window; audioCodec = new PCMU(); #region Wstępne ustawienia kompresji //################################################# encoderParams = new EncoderParameters(2); // Ustawiamy format kompresji na JPEG jpegEncoder = GetEncoder(ImageFormat.Jpeg); // Ustawiamy jakość obrazu this.SetImageQuality(20L); // Ustawiamy głębię kolorów this.SetImageColorDepth(8L); //################################################# #endregion // Wstępne ustawienia kompresji // Ustalanie lokalnego ip. strHostName = Dns.GetHostName(); this.ipHostEntry = Dns.GetHostByName(strHostName); this.localEndPoint = new IPEndPoint(ipHostEntry.AddressList[0], localUdpPort); // Ustawianie zdalnego ip. try { this.remoteEndPoint = new IPEndPoint(IPAddress.Parse(remote_ip), remoteUdpPort); } catch(FormatException ex) { MessageBox.Show(ex.Message, "Streaming"); } }
private void Form3_Load(object sender, EventArgs e) { CamContainer = new DirectX.Capture.Filters(); try { int no_of_cam = CamContainer.VideoInputDevices.Count; for (int i = 0; i < no_of_cam; i++) { try { // get the video input device Camera = CamContainer.VideoInputDevices[i]; // initialize the Capture using the video input device CaptureInfo = new DirectX.Capture.Capture(Camera, null); // set the input video preview window CaptureInfo.PreviewWindow = this.pictureBox1; // Capturing complete event handler CaptureInfo.FrameCaptureComplete += RefreshImage; // Capture the frame from input device CaptureInfo.CaptureFrame(); // if device found and initialize properly then exit without // checking rest of input device break; } catch (Exception ex) { } } } catch (Exception ex) { MessageBox.Show(this, ex.Message); } }
protected void Preview() { if (capture == null) { filters = new Filters(); if (filters.VideoInputDevices != null) { capture = new Capture(filters.VideoInputDevices[_settings.VideoInputDeviceIndex], null); #if DEBUG Console.WriteLine("****************\nPREVIEW FUNCTION\n****************"); Console.WriteLine("****************\nUSING VIDEO INPUT DEVICE: " + filters.VideoInputDevices[_settings.VideoInputDeviceIndex].Name + "\n****************"); #endif capture.PreviewWindow = _panel; } else { MessageBox.Show("No video device connected to your PC!"); } } else { capture.Stop(); capture.PreviewWindow = _panel; } }
public DataTable LoadDeviceList() { DataTable dt = new DataTable(); dt.Columns.Add("id"); dt.Columns.Add("CameraName"); try { DirectX.Capture.Filters Dispositive = new DirectX.Capture.Filters(); for (int i = 0; i < Dispositive.VideoInputDevices.Count;i++ ) { Filter f = Dispositive.VideoInputDevices[i]; DataRow dr = dt.NewRow(); dr["id"] = i; dr["CameraName"] = f.Name; dt.Rows.Add(dr); } //String strName = "".PadRight(100); //String strVer = "".PadRight(100); //bool bReturn = false; //short x = 0; //// Load name of all avialable devices into the lstDevices //do //{ // // Get Driver name and version // //bReturn = capGetDriverDescriptionA(x, ref strName, 100, ref strVer, 100); // // If there was a device add device name to the list // if (bReturn == true) // { // //lstDevices.Items.Add(strName.Trim()); // DataRow dr = dt.NewRow(); // dr["id"] = x; // dr["CamName"] = strName.Trim(); // dr["CamVer"] = strVer.Trim(); // dt.Rows.Add(dr); // } // x += 1; //} while (bReturn == true); } catch (Exception ex) { dt = new DataTable(); } return dt; }
public void InitVideoCapture(int videoDeviceID, FrameRate framerate, Resolution resolution, ImageFormat format, bool grayscale) { if (cameraInitialized) return; this.resolution = resolution; this.grayscale = grayscale; this.frameRate = framerate; this.videoDeviceID = videoDeviceID; this.format = format; switch (resolution) { case Resolution._160x120: cameraWidth = 160; cameraHeight = 120; break; case Resolution._320x240: cameraWidth = 320; cameraHeight = 240; break; case Resolution._640x480: cameraWidth = 640; cameraHeight = 480; break; case Resolution._800x600: cameraWidth = 800; cameraHeight = 600; break; case Resolution._1024x768: cameraWidth = 1024; cameraHeight = 768; break; case Resolution._1280x1024: cameraWidth = 1280; cameraHeight = 1024; break; case Resolution._1600x1200: cameraWidth = 1600; cameraHeight = 1200; break; } Filters filters = null; Filter videoDevice, audioDevice = null; try { filters = new Filters(); } catch (Exception exp) { throw new GoblinException("No video capturing devices are found"); } try { videoDevice = (videoDeviceID >= 0) ? filters.VideoInputDevices[videoDeviceID] : null; } catch (Exception exp) { String suggestion = "Try the following device IDs:"; for(int i = 0; i < filters.VideoInputDevices.Count; i++) { suggestion += " " + i + ":" + filters.VideoInputDevices[i].Name + ", "; } throw new GoblinException("VideoDeviceID " + videoDeviceID + " is out of the range. " + suggestion); } selectedVideoDeviceName = filters.VideoInputDevices[videoDeviceID].Name; capture = new DCapture(videoDevice, audioDevice); double frame_rate = 0; switch (frameRate) { case FrameRate._15Hz: frame_rate = 15; break; case FrameRate._30Hz: frame_rate = 30; break; case FrameRate._50Hz: frame_rate = 50; break; case FrameRate._60Hz: frame_rate = 60; break; case FrameRate._120Hz: frame_rate = 120; break; case FrameRate._240Hz: frame_rate = 240; break; } if (videoDevice != null) { // Using MPEG compressor //capture.VideoCompressor = filters.VideoCompressors[2]; capture.FrameRate = frame_rate; try { capture.FrameSize = new Size(cameraWidth, cameraHeight); } catch(Exception exp) { throw new GoblinException("Resolution._" + cameraWidth + "x" + cameraHeight + " is not supported for " + selectedVideoDeviceName + ". Maximum resolution supported is " + capture.VideoCaps.MaxFrameSize); } } if (capture.FrameSize.Width != cameraWidth || capture.FrameSize.Height != cameraHeight) throw new GoblinException("Failed to set the resolution to " + cameraWidth + "x" + cameraHeight); tmpPanel = new Panel(); tmpPanel.Size = new Size(cameraWidth, cameraHeight); try { capture.PreviewWindow = tmpPanel; } catch (Exception exp) { throw new GoblinException("Specified framerate or/and resolution is/are not supported " + "for " + selectedVideoDeviceName); } capture.FrameEvent2 += new DCapture.HeFrame(CaptureDone); capture.GrapImg(); cameraInitialized = true; }
void cbMenu_DropDownClosed(object sender, EventArgs e) { try { if (cbMenu.SelectedItem != null && cbMenu.SelectedItem.ToString() == "Full Screen") { try { if (FullScreenMe || FullScreenOther) { MessageBox.Show("Video is all ready running in Full Screen"); } else { if (MyVidType == "Me") { FullScreenMe = true; } else { FullScreenOther = true; } FSUname = lblUName.Content.ToString(); Tfs = new TestFullScreen(); Tfs.Title = lblUName.Content.ToString() + " Video In Full Screen Mode"; Tfs.Show(); Tfs.Closed += new EventHandler(Tfs_Closed); } } catch (Exception ex) { VMuktiAPI.VMuktiHelper.ExceptionHandler(ex, "cbMenu_DropDownClosed", "ctlUserVideo.xaml.cs"); } } else if (cbMenu.SelectedItem != null && cbMenu.SelectedItem.ToString() == "Cancel") { try { MessageBoxResult result = MessageBox.Show("Do You Really Want To Remove " + lblUName.Content.ToString() + "'s Video", "Remove Video", MessageBoxButton.YesNo); if (result == MessageBoxResult.Yes) { if (EntRemoveUser != null) { EntRemoveUser(lblUName.Content.ToString()); } netp2pDirectXVideoChannel.Close(); } if (Tfs != null) { Tfs.Close(); } } catch (Exception ex) { VMuktiAPI.VMuktiHelper.ExceptionHandler(ex, "cbMenu_DropDownClosed", "ctlUserVideo.xaml.cs"); } } else if (cbMenu.SelectedItem != null && cbMenu.SelectedItem.ToString() == "Video Configuration") { try { filters = new Filters(); if (filters.VideoInputDevices.Count > 1) { lstVideoInput = new List<string>(); lstVideoInput = filters.VideoInputDevices.GetVideoInputDevices(); objShowVideo = new ShowVideoInputs(lstVideoInput); objShowVideo.EntSelectedDevice += new ShowVideoInputs.delSelectedDevice(objShowVideo_EntSelectedDevice); objShowVideo.Show(); } else { InitCapture("Low", 0); } } catch (Exception ex) { if (string.Compare(ex.Message, "No devices of the category") == 0) { MessageBox.Show("Sorry You Dont Have Any Video Device Attached To Tour System", "VMukti Says: Video Conference"); } else { VMuktiAPI.VMuktiHelper.ExceptionHandler(ex, "cbMenu_DropDownClosed", "ctlUserVideo.xaml.cs"); } } } } catch (Exception ex) { VMuktiAPI.VMuktiHelper.ExceptionHandler(ex, "cbMenu_DropDownClosed", "ctlUserVideo.xaml.cs"); } }
/// <summary> /// Loads Recording devices /// </summary> private void loadRecordingDevices() { try { filters = new Filters(); cb_recordingDevices.Dispatcher.Invoke(System.Windows.Threading.DispatcherPriority.Normal, new Action( delegate() { foreach (Filter f in filters.AudioInputDevices) { cb_recordingDevices.Items.Add(f.Name); } } )); } catch (Exception ex) { MessageBox.Show("No recording devices found!", "Error", MessageBoxButton.OK, MessageBoxImage.Error); } }
void UserVideo_Loaded(object sender, RoutedEventArgs e) { try { if (capture == null & flg == false) { if (MyVidType == "Me") { filters = new Filters(); cnvUserVideo.Visibility = Visibility.Visible; m_DelSendImage = new DelSendImage(SetMyImage); try { if (filters.VideoInputDevices.Count > 1) { lstVideoInput = new List<string>(); lstVideoInput = filters.VideoInputDevices.GetVideoInputDevices(); objShowVideo = new ShowVideoInputs(lstVideoInput); objShowVideo.EntSelectedDevice += new ShowVideoInputs.delSelectedDevice(objShowVideo_EntSelectedDevice); objShowVideo.Show(); } else if (filters.VideoInputDevices.Count == 0) { } else { InitCapture("Low", 0); } } catch (Exception ex) { VMuktiAPI.VMuktiHelper.ExceptionHandler(ex, "UserVideo_Loaded", "ctlUserVideo.xaml.cs"); } } else { picUserVideo.Visibility = Visibility.Visible; m_DelSendImage = new DelSendImage(SetOtherImage); } } else if (flg) { double[] winXY = new double[2]; winXY = capture.PreviewWidowTag(); if (winXY[0] != 0 & winXY[1] != 0) { if (MyVidType == "Me") { flg = false; capture.showVideo(); } } else { capture.hideVideo(); flg = true; } } } catch (Exception ex) { VMuktiAPI.VMuktiHelper.ExceptionHandler(ex, "UserVideo_Loaded", "ctlUserVideo.xaml.cs"); } }
private void InitCapture() { Filters filters=null; try { filters = new Filters(); } catch (Exception) { } if (filters != null && filters.VideoInputDevices.Count > 0) { foreach (Filter f in filters.VideoInputDevices) { if (f.Name.Equals(me.videoCaptureStr)) { capture = new Capture(f, null, false); break; } } //capture = new Capture(filters.VideoInputDevices[0], null, false); if (capture != null) { if (me.videosystem == Singleton.VideoSystem.PAL) { capture.dxUtils.VideoStandard = DShowNET.AnalogVideoStandard.PAL_B; try { capture.FrameSize = new Size(720, 576); capture.PreviewFrameSize = new Size(720, 576); } catch (Exception) { } } else { capture.dxUtils.VideoStandard = DShowNET.AnalogVideoStandard.NTSC_M; try { capture.FrameSize = new Size(720, 480); } catch (Exception) { } } capture.AllowSampleGrabber = true; //capture.PreviewWindow = pictureBox1; } } }
public void StartCapture() { #if DEBUG Console.WriteLine("****************\nSTART CAPTURE FUNCTION\n****************"); #endif if (capture == null) { filters = new Filters(); tryCapture(); } else { capture.Stop(); StartCompressorCapture(capture); } FileNameCounter++; if (!capture.Cued) capture.Filename = _FileNamePrepender + "_" + FileNameCounter + ".avi"; try { capture.Cue(); //need to make an option to rechoose the codec, if program refuses to capture with selected one capture.Start(); } catch (Exception ex) { ExceptionHandler(ex); } }
/// <summary> /// Records a track to the specified directory /// </summary> /// <param name="track">Track which should be saved</param> protected void recordTrack(SpotTrack track) { stopRecording(); if (recorder == null) { Filters f = new Filters(); recorder = new Capture(null, RecordingDevice); recorder.CaptureComplete += new EventHandler(recorder_CaptureComplete); } recorder.Filename = FileDirectory + string.Format("{0} - {1}", track.Artist, track.Title) + ".wav"; track.Path = recorder.Filename; lastRecordedTrack = track; recorder.Start(); }
private void InitVideo() { this.filters = new Filters(); if (this.filters.VideoInputDevices.Count > 0) { LoggingService.DebugFormatted("共检测到系统有{0}个视频设备...", new object[] { this.filters.VideoInputDevices.Count }); this.videoDevice = this.filters.VideoInputDevices[this.filters.VideoInputDevices.Count - 1]; this.capture = new DirectX.Capture.Capture(this.videoDevice, null); this.capture.FrameSize = new Size(640, 480); this.capture.PreviewWindow = this.pictureBox1; this.capture.RenderPreview(); } else if (this.toolStripStatusLabel2 != null) { this.toolStripStatusLabel2.Text = "系统没有找到摄像头设备,你只能查看已经拍好的照片"; } }
public FormIkarusMain() { InitializeComponent(); if (me.SistemaMetrico == (int)Singleton.SistemasMetricos.Metrico) instrumento_Altimeter1.Calibration = me.HomeAlt; else instrumento_Altimeter1.Calibration = me.HomeAlt * 3.28f; this.SetStyle(ControlStyles.AllPaintingInWmPaint | ControlStyles.UserPaint | ControlStyles.DoubleBuffer, true); debug_values = new float[5, 5]{{me.uplink_pid_ele_P, me.uplink_pid_ele_I, me.uplink_pid_ele_D, me.uplink_pid_ele_IL, me.uplink_pid_ele_DL}, {me.uplink_pid_ail_P , me.uplink_pid_ail_I, me.uplink_pid_ail_D, me.uplink_pid_ail_IL, me.uplink_pid_ail_DL}, {me.uplink_pid_tail_P, me.uplink_pid_tail_I, me.uplink_pid_tail_D, me.uplink_pid_tail_IL, me.uplink_pid_tail_DL}, {me.uplink_pid_thr_P, me.uplink_pid_thr_I, me.uplink_pid_thr_D, me.uplink_pid_thr_IL, me.uplink_pid_thr_DL }, {me.uplink_IR_offX, me.uplink_IR_offY, me.uplink_IR_gain, me.uplink_rumbo_ail, me.uplink_altura_ele}}; comboBox3.SelectedIndex = 0; comboBox2.SelectedIndex = 0; comboBox1.SelectedIndex = 0; this.planeWpt = new WayPoint("Avion", me.HomeLon, me.HomeLat, global::UAVConsole.Properties.Resources.plane3); this.planeWpt.icon.MakeTransparent(Color.White); this.planeWpt.heading = 0; this.targetWpt = new WayPoint("Avion", me.HomeLon, me.HomeLat); this.homeWpt = new WayPoint("Home", me.HomeLon, me.HomeLat); mapControl1.plane = this.planeWpt; mapControl1.target = this.targetWpt; mapControl1.ruta = me.Ruta; medidorBaterias1.num_cells = me.cells1; medidorBaterias2.num_cells = me.cells2; try { filters = new Filters(); } catch (Exception) { } if (filters != null && filters.VideoInputDevices.Count > 0) { foreach (Filter f in filters.VideoInputDevices) { if (f.Name.Equals(me.videoCaptureStr)) { capture = new Capture(f, null, false); break; } } if (capture != null) { foreach (Source vs in capture.VideoSources) { if (vs.ToString().Contains("omposi")) // Video Composite { capture.VideoSource = vs; break; } } if (me.videosystem == Singleton.VideoSystem.PAL) { capture.dxUtils.VideoStandard = DShowNET.AnalogVideoStandard.PAL_B; try { capture.FrameRate = 25; capture.FrameSize = new Size(720, 576); capture.PreviewFrameSize = new Size(720, 576); } catch (Exception) { } } else { capture.dxUtils.VideoStandard = DShowNET.AnalogVideoStandard.NTSC_M; try { capture.FrameRate = 30; capture.FrameSize = new Size(720, 480); } catch (Exception) { } } capture.AllowSampleGrabber = true; capture.PreviewWindow = panel1; capture.FrameEvent2 += new Capture.HeFrame(CaptureDone); capture.GrapImg(); //capture.ShowPropertyPage(1, this); } } if (me.enableAntTrack) { antTracker = new AntTracker(); medidorBaterias3.Enabled = true; medidorBaterias3.AutoCalculate = false; label6.Enabled = true; knob_anttracker.Visible = true; knob_anttracker.Manual = false; medidorRSSI.Height = 97; } else { knob_anttracker.Visible = false; medidorRSSI.Height = 137; } if (me.telemetria == Singleton.Telemetria.Video) { modem = new ModemVideo(); } else if (me.telemetria == Singleton.Telemetria.XBEE) modem = new ModemXbee(me.commPort, me.commBps); else if (me.telemetria == Singleton.Telemetria.AntTracker) modem = new ModemAntTracker(antTracker); if (me.telemetria != Singleton.Telemetria.None) modem.listeners += RefreshInstruments; if (me.moduloTX == Singleton.ModuloControl.Uplink) { if (jthread == null) jthread = new JoystickThread(this); decimal altura = (decimal)(me.HomeAlt + 100); if (altura < numericUpDown1.Minimum) altura = numericUpDown1.Minimum; else if (altura > numericUpDown1.Maximum) altura = numericUpDown1.Maximum; numericUpDown1.Value = altura; label7.Enabled = true; medidorBaterias4.Enabled = true; medidorBaterias4.volts_max = me.uplinkVmax; medidorBaterias4.volts_min = me.uplinkVmin; } else { splitContainer1.SplitterDistance+=panel4.Height; panel5.Location = panel4.Location; panel6.Location = new Point(panel6.Location.X, panel5.Location.Y); panel4.Visible = false; } if (me.enableUDPinout) { if (this.sender == null) this.sender = new SenderUDP(me.portUDPinout); this.sender.listeners += TeamRefresh; } else { comboBoxTeam.Visible = false; button6.Visible = false; button7.Visible = false; //panel8.Width -= 56; } if (me.enableWebServer) { this.KmlWebServer = new MyWebServer(me.portWebServer); } timer1.Enabled = true; }
private void InitVideo() { this.filters = new Filters(); if (this.filters.VideoInputDevices.Count > 0) { this.videoDevice = this.filters.VideoInputDevices[0]; } if (this.filters.AudioInputDevices.Count > 0) { this.audioDevice = this.filters.AudioInputDevices[0]; } this.capture = new DirectX.Capture.Capture(this.videoDevice, this.audioDevice); if (this.filters.VideoCompressors.Count > 0) { this.videoComprossor = this.filters.VideoCompressors[0]; this.capture.VideoCompressor = this.videoComprossor; } this.capture.PreviewWindow = this.pictureBox1; this.capture.RenderPreview(); }
void frmCaptureImage_Disposed(object sender, EventArgs e) { if (isStarted) { if (captureDevice!=null) { if (captureDevice.Capturing) captureDevice.Stop(); captureDevice.Dispose(); this.filters = null; } isStarted =false; } }
private void initDevide() { int i; string strDesFrameName; PictureBox picDesination; CheckBox chkTemp; try {//This mean enum devices and create a link to device filters = new Filters(); //this.thaoTácToolStripMenuItem.DropDownItems.AddRange(new ToolStripItem[] { if (filters.VideoInputDevices.Count<=0) { cboSelectDevide.Items.Add("Không tìm thấy thiết bị..."); return; }else { hasDevice = true; } for (i=1; i<=filters.VideoInputDevices.Count; i++) cboSelectDevide.Items.Add(filters.VideoInputDevices[i-1].Name); this.cboSelectDevide.SelectedIndex = i-2; } catch (Exception e) { hasDevice = false; MessageBox.Show (e.Message.ToString()); return; } isStarted =false; memPicture = new PictureBox(); memPicture.Size = new Size(640 ,480); for (i=1; i<=NO_OF_TEMPORARY_PICTURE; i++) { strDesFrameName = "pic_Frame" + i.ToString(); picDesination = (PictureBox)this.Controls[strDesFrameName]; strDesFrameName = "chkSelect_" + i.ToString(); chkTemp = (CheckBox) this.Controls[strDesFrameName]; picDesination.DoubleClick+= new EventHandler(picDesination_DoubleClick); chkTemp.CheckStateChanged += new EventHandler(ChkSelect_CheckedChanged); } this.Disposed += new EventHandler(frmCaptureImage_Disposed); this.btnOK.Click += BtnOKClick; this.FormClosed+= new FormClosedEventHandler(frmCaptureImage_FormClosed); LblCaptureLinkClicked(null, null); }
public VideoRecorder(Filter camera, Filter microphone) { previewControl = new VideoPreview(); filters = new Filters(); capture = new Capture(camera, microphone); capture.VideoCompressor = filters.VideoCompressors[0]; capture.AudioCompressor = filters.AudioCompressors[0]; capture.FrameRate = NTSC; // NTSC capture.FrameSize = new Size(640, 480); // 640x480 capture.AudioSamplingRate = 44100; // 44.1 kHz capture.AudioSampleSize = 16; // 16-bit capture.AudioChannels = 2; // Stereo }