private void btnLigarCamera_Click(object sender, EventArgs e) { CamContainer = new DirectX.Capture.Filters(); try { int no_of_cam = CamContainer.VideoInputDevices.Count; for (int i = 0; i < no_of_cam; i++) { try { // obtém o dispositivo de entrada do vídeo Camera = CamContainer.VideoInputDevices[i]; // inicializa a Captura usando o dispositivo CaptureInfo = new DirectX.Capture.Capture(Camera, null); // Define a janela de visualização do vídeo CaptureInfo.PreviewWindow = pcCamera; // Capturando o tratamento de evento CaptureInfo.FrameCaptureComplete += AtualizaImagem; // Captura o frame do dispositivo CaptureInfo.CaptureFrame(); // Se o dispositivo foi encontrado e inicializado então sai sem checar o resto break; } catch (Exception ex) { throw ex; } } } catch (Exception ex) { MessageBox.Show(this, ex.Message); } }
private void Form1_Load(object sender, EventArgs e) { capture = new Capture(cameraFilters.VideoInputDevices[0], cameraFilters.AudioInputDevices[0]); VideoCapabilities vc = capture.VideoCaps; capture.FrameSize = new Size(640, 480); capture.PreviewWindow = cam; var referenceCards = sql.GetCards(); capture.FrameEvent2 += new Capture.HeFrame((Bitmap bitmap) => { var magicCards = Utilities.DetectCardArt(bitmap); foreach (var card in magicCards) { camWindow.Image = bitmap; image_output.Image = card.GetDrawnCorners(); cardArtImage.Image = card.CardArtBitmap; var bestMatch = Utilities.MatchCard(card, referenceCards); Graphics g = Graphics.FromImage(bitmap); g.DrawString(bestMatch.name, new Font("Tahoma", 25), Brushes.Black, new PointF(card.Corners[0].X - 29, card.Corners[0].Y - 39)); g.DrawString(bestMatch.name, new Font("Tahoma", 25), Brushes.Yellow, new PointF(card.Corners[0].X - 30, card.Corners[0].Y - 40)); g.Dispose(); image_output.Image = bitmap; } }); capture.GrapImg(); }
/// <summary> /// Preview et enregistrement des videos /// </summary> public void saveVideo() { capture = new Capture(filters.VideoInputDevices[0], null); Filter f = null; for (int i = 0; i < filters.VideoCompressors.Count; i++) { Console.WriteLine(filters.VideoCompressors[i].Name.ToString()); } try { if (capture == null) throw new ApplicationException("Pas de périphériques audios ou vidéos détectés.\n\n"); if (!capture.Cued) { f = filters.VideoCompressors[7]; capture.VideoCompressor = f; capture.FrameSize = new System.Drawing.Size(640, 480); capture.Filename = AppDomain.CurrentDomain.BaseDirectory + "Videos/savedVideo.wmv"; capture.PreviewWindow = panelVideo; } capture.Start(); progressBar(); } catch (Exception ex) { System.Windows.MessageBox.Show("Erreur :" + ex.Message + "\n\n" + ex.ToString()); } }
void Eyeform_Load(object sender, EventArgs e) { cap = new Capture(filter.VideoInputDevices[0], filter.AudioInputDevices[0]); cap.PreviewWindow = pnpreview; cap.FrameRate = 60; cap.FrameCaptureComplete += cap_FrameCaptureComplete; KeyPreview = true; }
void InitCapture(string Quality, int videoIndex) { try { if (capture != null) { capture.Stop(); capture.Dispose(); } capture = new DirectX.Capture.Capture(filters.VideoInputDevices[videoIndex], null); #region make these lines comment for using professional cam if (Quality == "High") { capture.FrameSize = new System.Drawing.Size(capture.VideoCaps.MaxFrameSize.Width, capture.VideoCaps.MaxFrameSize.Height); } else if (Quality == "Low") { capture.FrameSize = new System.Drawing.Size(160, 120); } capture.FrameRate = 30; #endregion double dblPicWidth = 0.0, dblPicHeight = 0.0; this.Dispatcher.BeginInvoke(DispatcherPriority.Background, new DispatcherOperationCallback(delegate(Object state) { dblPicWidth = picUserVideo.ActualWidth; dblPicHeight = picUserVideo.ActualHeight; return(null); }), null); Point pt = picUserVideo.TranslatePoint(new Point(), Application.Current.MainWindow); picUserVideo.Tag = pt; capture.PreviewWindow = cnvUserVideo; disptgetVideoWind.Start(); //timer of getting video window is start if (VMuktiAPI.VMuktiInfo.CurrentPeer.CurrPeerType == PeerType.BootStrap || VMuktiAPI.VMuktiInfo.CurrentPeer.CurrPeerType == PeerType.SuperNode || VMuktiInfo.CurrentPeer.CurrPeerType == PeerType.NodeWithNetP2P) { capture.FrameEvent2 += new DirectX.Capture.Capture.HeFrame(capture_FrameEventNetP2P); } else { capture.FrameEvent2 += new DirectX.Capture.Capture.HeFrame(capture_FrameEventHttp); } capture.GrapImg(); } catch (Exception ex) { VMuktiAPI.VMuktiHelper.ExceptionHandler(ex, "InitCapture", "ctlUserVideo.xaml.cs"); } }
private void LoadCamera() { _capturer = new Capture(_cameraFilters.VideoInputDevices[0], _cameraFilters.AudioInputDevices[0]) { FrameSize = new System.Drawing.Size(640, 480), PreviewWindow = _cam }; _capturer.FrameEvent2 += CaptureDone; _capturer.GrapImg(); }
private SieuAmManager() { try { filters = new Filters(); } catch { MessageBox.Show("Không nhận diện được bất kỳ card video! Xin kiểm tra lại"); return; } capture = new Capture(filters.VideoInputDevices[0], null); }
public SpotRecorder(string fileDirectory, Filter recordingDevice) { if (!fileDirectory.Substring(fileDirectory.Length - 1, 1).Equals(@"\")) { this.FileDirectory = fileDirectory + @"\"; } else { this.FileDirectory = fileDirectory; } this.RecordingDevice = recordingDevice; this.recorder = null; spotHandler = new SpotHandler(processName); spotHandler.TrackChanged += new SpotHandlerBase.TrackChangedEventHandler(spotHandler_TrackChanged); }
public CaptureTest() { // // Required for Windows Form Designer support // InitializeComponent(); // Start with the first video/audio devices // Don't do this in the Release build in case the // first devices cause problems. capture = new Capture(filters.VideoInputDevices[0], filters.AudioInputDevices[0]); Load += new EventHandler(CaptureTest_Load); FormClosing += new FormClosingEventHandler(CaptureTest_FormClosing); // Update the main menu // Much of the interesting work of this sample occurs here try { updateMenu(); } catch { } }
/// <summary> /// Tworzy nową instancję klienta streamingu przez udp. /// </summary> /// <param name="video_window">Picturebox, w którym będzie wyświetlany obraz z kamery/streamu.</param> /// <param name="remote_ip">Zdalne IP serwera lub innego klienta.</param> public UdpStreamClient(PictureBox video_window, string remote_ip) { videoCapture = null; filters = new Filters(); microphone = null; speaker = null; streaming = false; recieving = false; inColor = true; timeInterval = 1000; framesCount = 0; videoWindow = video_window; audioCodec = new PCMU(); #region Wstępne ustawienia kompresji //################################################# encoderParams = new EncoderParameters(2); // Ustawiamy format kompresji na JPEG jpegEncoder = GetEncoder(ImageFormat.Jpeg); // Ustawiamy jakość obrazu this.SetImageQuality(20L); // Ustawiamy głębię kolorów this.SetImageColorDepth(8L); //################################################# #endregion // Wstępne ustawienia kompresji // Ustalanie lokalnego ip. strHostName = Dns.GetHostName(); this.ipHostEntry = Dns.GetHostByName(strHostName); this.localEndPoint = new IPEndPoint(ipHostEntry.AddressList[0], localUdpPort); // Ustawianie zdalnego ip. try { this.remoteEndPoint = new IPEndPoint(IPAddress.Parse(remote_ip), remoteUdpPort); } catch(FormatException ex) { MessageBox.Show(ex.Message, "Streaming"); } }
private void Form3_Load(object sender, EventArgs e) { CamContainer = new DirectX.Capture.Filters(); try { int no_of_cam = CamContainer.VideoInputDevices.Count; for (int i = 0; i < no_of_cam; i++) { try { // get the video input device Camera = CamContainer.VideoInputDevices[i]; // initialize the Capture using the video input device CaptureInfo = new DirectX.Capture.Capture(Camera, null); // set the input video preview window CaptureInfo.PreviewWindow = this.pictureBox1; // Capturing complete event handler CaptureInfo.FrameCaptureComplete += RefreshImage; // Capture the frame from input device CaptureInfo.CaptureFrame(); // if device found and initialize properly then exit without // checking rest of input device break; } catch (Exception ex) { } } } catch (Exception ex) { MessageBox.Show(this, ex.Message); } }
public CaptureTest() { // // Required for Windows Form Designer support // InitializeComponent(); // Start with the first video/audio devices // Don't do this in the Release build in case the // first devices cause problems. #if DEBUG capture = new Capture( filters.VideoInputDevices[0], null); capture.CaptureComplete += new EventHandler( OnCaptureComplete ); #endif // Update the main menu // Much of the interesting work of this sample occurs here try { updateMenu(); } catch {} }
void InitCapture(string Quality, int videoIndex) { try { if (capture != null) { capture.Stop(); capture.Dispose(); } capture = new DirectX.Capture.Capture(filters.VideoInputDevices[videoIndex], null); #region make these lines comment for using professional cam if (Quality == "High") { capture.FrameSize = new System.Drawing.Size(capture.VideoCaps.MaxFrameSize.Width, capture.VideoCaps.MaxFrameSize.Height); } else if (Quality == "Low") { capture.FrameSize = new System.Drawing.Size(160, 120); } capture.FrameRate = 30; #endregion double dblPicWidth = 0.0, dblPicHeight = 0.0; this.Dispatcher.BeginInvoke(DispatcherPriority.Background, new DispatcherOperationCallback(delegate(Object state) { dblPicWidth = picUserVideo.ActualWidth; dblPicHeight = picUserVideo.ActualHeight; return null; }), null); Point pt = picUserVideo.TranslatePoint(new Point(), Application.Current.MainWindow); picUserVideo.Tag = pt; capture.PreviewWindow = cnvUserVideo; disptgetVideoWind.Start(); //timer of getting video window is start if (VMuktiAPI.VMuktiInfo.CurrentPeer.CurrPeerType == PeerType.BootStrap || VMuktiAPI.VMuktiInfo.CurrentPeer.CurrPeerType == PeerType.SuperNode || VMuktiInfo.CurrentPeer.CurrPeerType == PeerType.NodeWithNetP2P) { capture.FrameEvent2 += new DirectX.Capture.Capture.HeFrame(capture_FrameEventNetP2P); } else { capture.FrameEvent2 += new DirectX.Capture.Capture.HeFrame(capture_FrameEventHttp); } capture.GrapImg(); } catch (Exception ex) { VMuktiAPI.VMuktiHelper.ExceptionHandler(ex, "InitCapture", "ctlUserVideo.xaml.cs"); } }
public void InitVideoCapture(int videoDeviceID, FrameRate framerate, Resolution resolution, ImageFormat format, bool grayscale) { if (cameraInitialized) return; this.resolution = resolution; this.grayscale = grayscale; this.frameRate = framerate; this.videoDeviceID = videoDeviceID; this.format = format; switch (resolution) { case Resolution._160x120: cameraWidth = 160; cameraHeight = 120; break; case Resolution._320x240: cameraWidth = 320; cameraHeight = 240; break; case Resolution._640x480: cameraWidth = 640; cameraHeight = 480; break; case Resolution._800x600: cameraWidth = 800; cameraHeight = 600; break; case Resolution._1024x768: cameraWidth = 1024; cameraHeight = 768; break; case Resolution._1280x1024: cameraWidth = 1280; cameraHeight = 1024; break; case Resolution._1600x1200: cameraWidth = 1600; cameraHeight = 1200; break; } Filters filters = null; Filter videoDevice, audioDevice = null; try { filters = new Filters(); } catch (Exception exp) { throw new GoblinException("No video capturing devices are found"); } try { videoDevice = (videoDeviceID >= 0) ? filters.VideoInputDevices[videoDeviceID] : null; } catch (Exception exp) { String suggestion = "Try the following device IDs:"; for(int i = 0; i < filters.VideoInputDevices.Count; i++) { suggestion += " " + i + ":" + filters.VideoInputDevices[i].Name + ", "; } throw new GoblinException("VideoDeviceID " + videoDeviceID + " is out of the range. " + suggestion); } selectedVideoDeviceName = filters.VideoInputDevices[videoDeviceID].Name; capture = new DCapture(videoDevice, audioDevice); double frame_rate = 0; switch (frameRate) { case FrameRate._15Hz: frame_rate = 15; break; case FrameRate._30Hz: frame_rate = 30; break; case FrameRate._50Hz: frame_rate = 50; break; case FrameRate._60Hz: frame_rate = 60; break; case FrameRate._120Hz: frame_rate = 120; break; case FrameRate._240Hz: frame_rate = 240; break; } if (videoDevice != null) { // Using MPEG compressor //capture.VideoCompressor = filters.VideoCompressors[2]; capture.FrameRate = frame_rate; try { capture.FrameSize = new Size(cameraWidth, cameraHeight); } catch(Exception exp) { throw new GoblinException("Resolution._" + cameraWidth + "x" + cameraHeight + " is not supported for " + selectedVideoDeviceName + ". Maximum resolution supported is " + capture.VideoCaps.MaxFrameSize); } } if (capture.FrameSize.Width != cameraWidth || capture.FrameSize.Height != cameraHeight) throw new GoblinException("Failed to set the resolution to " + cameraWidth + "x" + cameraHeight); tmpPanel = new Panel(); tmpPanel.Size = new Size(cameraWidth, cameraHeight); try { capture.PreviewWindow = tmpPanel; } catch (Exception exp) { throw new GoblinException("Specified framerate or/and resolution is/are not supported " + "for " + selectedVideoDeviceName); } capture.FrameEvent2 += new DCapture.HeFrame(CaptureDone); capture.GrapImg(); cameraInitialized = true; }
private void InitCapture() { Filters filters=null; try { filters = new Filters(); } catch (Exception) { } if (filters != null && filters.VideoInputDevices.Count > 0) { foreach (Filter f in filters.VideoInputDevices) { if (f.Name.Equals(me.videoCaptureStr)) { capture = new Capture(f, null, false); break; } } //capture = new Capture(filters.VideoInputDevices[0], null, false); if (capture != null) { if (me.videosystem == Singleton.VideoSystem.PAL) { capture.dxUtils.VideoStandard = DShowNET.AnalogVideoStandard.PAL_B; try { capture.FrameSize = new Size(720, 576); capture.PreviewFrameSize = new Size(720, 576); } catch (Exception) { } } else { capture.dxUtils.VideoStandard = DShowNET.AnalogVideoStandard.NTSC_M; try { capture.FrameSize = new Size(720, 480); } catch (Exception) { } } capture.AllowSampleGrabber = true; //capture.PreviewWindow = pictureBox1; } } }
/// <summary> /// Stops the recorder and destroys it /// </summary> protected void stopRecording() { if (recorder != null) { if (!recorder.Stopped) recorder.Stop(); recorder.Dispose(); recorder = null; } }
/// <summary> /// Uruchamia przechwytywanie obrazu za pomocą pierwszego dostępnego urządzenia. /// </summary> private void StartVideoCapture() { if (this.filters.VideoInputDevices.Count == 0) throw new Exception("Brak urządzeń do przechwytywania obrazu!"); videoCapture = new Capture(filters.VideoInputDevices[0], null); videoCapture.FrameSize = new Size(320, 240); videoCapture.FrameRate = 30.0; videoCapture.PreviewWindow = videoWindow; videoCapture.FrameEvent2 += new Capture.HeFrame(CapturedFrame); videoCapture.GrapImg(); // pauza pozwalająca na kompletne zainicjowanie kamery Thread.Sleep(1000); }
private int SelectVideoDevice(int Index) { try { // Get current devices and dispose of capture object // because the video and audio device can only be changed // by creating a new Capture object. Filter videoDevice = null; Filter audioDevice = null; if (capture != null) { videoDevice = capture.VideoDevice; audioDevice = capture.AudioDevice; capture.Dispose(); capture = null; } // Get new video device if (filters.VideoInputDevices.Count <= Index) return 1; videoDevice = filters.VideoInputDevices[Index]; // Create capture object if ((videoDevice != null) || (audioDevice != null)) { capture = new Capture(videoDevice, audioDevice, false); capture.AllowSampleGrabber = true; capture.CaptureComplete += new EventHandler(OnCaptureComplete); SampleGrabber(true); } } catch (Exception ex) { MessageBox.Show("Video device not supported.\n\n" + ex.Message + "\n\n" + ex.ToString()); return 1; } return 0; }
private void InitVideo() { this.filters = new Filters(); if (this.filters.VideoInputDevices.Count > 0) { LoggingService.DebugFormatted("共检测到系统有{0}个视频设备...", new object[] { this.filters.VideoInputDevices.Count }); this.videoDevice = this.filters.VideoInputDevices[this.filters.VideoInputDevices.Count - 1]; this.capture = new DirectX.Capture.Capture(this.videoDevice, null); this.capture.FrameSize = new Size(640, 480); this.capture.PreviewWindow = this.pictureBox1; this.capture.RenderPreview(); } else if (this.toolStripStatusLabel2 != null) { this.toolStripStatusLabel2.Text = "系统没有找到摄像头设备,你只能查看已经拍好的照片"; } }
private void LiveShow_FormClosed(object sender, FormClosedEventArgs e) { if (this.capture != null) { this.capture.Dispose(); this.capture = null; } }
public void InitVideoCapture(int videoDeviceID, FrameRate framerate, Resolution resolution, ImageFormat format, bool grayscale) { if (cameraInitialized) return; this.resolution = resolution; this.grayscale = grayscale; this.frameRate = framerate; this.videoDeviceID = videoDeviceID; this.format = format; switch (resolution) { case Resolution._160x120: cameraWidth = 160; cameraHeight = 120; break; case Resolution._320x240: cameraWidth = 320; cameraHeight = 240; break; case Resolution._640x480: cameraWidth = 640; cameraHeight = 480; break; case Resolution._800x600: cameraWidth = 800; cameraHeight = 600; break; case Resolution._1024x768: cameraWidth = 1024; cameraHeight = 768; break; case Resolution._1280x1024: cameraWidth = 1280; cameraHeight = 1024; break; case Resolution._1600x1200: cameraWidth = 1600; cameraHeight = 1200; break; } Filters filters = null; Filter videoDevice, audioDevice = null; try { filters = new Filters(); } catch (Exception exp) { throw new GoblinException("No video capturing devices are found"); } try { videoDevice = (videoDeviceID >= 0) ? filters.VideoInputDevices[videoDeviceID] : null; } catch (Exception exp) { String suggestion = "Try the following device IDs:"; for(int i = 0; i < filters.VideoInputDevices.Count; i++) { suggestion += " " + i + ":" + filters.VideoInputDevices[i].Name + ", "; } throw new GoblinException("VideoDeviceID " + videoDeviceID + " is out of the range. " + suggestion); } selectedVideoDeviceName = filters.VideoInputDevices[videoDeviceID].Name; capture = new DCapture(videoDevice, audioDevice); double frame_rate = 0; switch (frameRate) { case FrameRate._15Hz: frame_rate = 15; break; case FrameRate._30Hz: frame_rate = 30; break; case FrameRate._50Hz: frame_rate = 50; break; case FrameRate._60Hz: frame_rate = 60; break; case FrameRate._120Hz: frame_rate = 120; break; case FrameRate._240Hz: frame_rate = 240; break; } if (videoDevice != null) { // Using MPEG compressor //capture.VideoCompressor = filters.VideoCompressors[2]; capture.FrameRate = frame_rate; try { capture.FrameSize = new Size(cameraWidth, cameraHeight); } catch(Exception exp) { throw new GoblinException("Resolution._" + cameraWidth + "x" + cameraHeight + " is not supported for " + selectedVideoDeviceName + ". Maximum resolution supported is " + capture.VideoCaps.MaxFrameSize); } } if (capture.FrameSize.Width != cameraWidth || capture.FrameSize.Height != cameraHeight) throw new GoblinException("Failed to set the resolution to " + cameraWidth + "x" + cameraHeight); tmpPanel = new Panel(); tmpPanel.Size = new Size(cameraWidth, cameraHeight); try { capture.PreviewWindow = tmpPanel; } catch (Exception exp) { throw new GoblinException("Specified framerate or/and resolution is/are not supported " + "for " + selectedVideoDeviceName); } capture.FrameEvent2 += new DCapture.HeFrame(CaptureDone); capture.GrapImg(); cameraInitialized = true; }
protected void Preview() { if (capture == null) { filters = new Filters(); if (filters.VideoInputDevices != null) { capture = new Capture(filters.VideoInputDevices[_settings.VideoInputDeviceIndex], null); #if DEBUG Console.WriteLine("****************\nPREVIEW FUNCTION\n****************"); Console.WriteLine("****************\nUSING VIDEO INPUT DEVICE: " + filters.VideoInputDevices[_settings.VideoInputDeviceIndex].Name + "\n****************"); #endif capture.PreviewWindow = _panel; } else { MessageBox.Show("No video device connected to your PC!"); } } else { capture.Stop(); capture.PreviewWindow = _panel; } }
/// <summary> /// Zatrzymuje przechwytywanie obrazu. /// </summary> private void StopVideoCapture() { videoCapture.FrameEvent2 -= new Capture.HeFrame(CapturedFrame); videoCapture.PreviewWindow = null; videoCapture.Dispose(); videoCapture = null; }
protected void StartCompressorCapture(Capture capture) { try { capture.VideoCompressor = filters.VideoCompressors[_settings.VideoCompressorIndex]; #if DEBUG Console.WriteLine("****************\nUSING COMPRESSOR: " + filters.VideoCompressors[_settings.VideoCompressorIndex].Name + "\n****************"); #endif capture.PreviewWindow = _panel; } catch (Exception ex) { ExceptionHandler(ex); } }
/// <summary> /// Records a track to the specified directory /// </summary> /// <param name="track">Track which should be saved</param> protected void recordTrack(SpotTrack track) { stopRecording(); if (recorder == null) { Filters f = new Filters(); recorder = new Capture(null, RecordingDevice); recorder.CaptureComplete += new EventHandler(recorder_CaptureComplete); } recorder.Filename = FileDirectory + string.Format("{0} - {1}", track.Artist, track.Title) + ".wav"; track.Path = recorder.Filename; lastRecordedTrack = track; recorder.Start(); }
protected void tryCapture() { if (filters.VideoInputDevices != null) { try { capture = new Capture(filters.VideoInputDevices[_settings.VideoInputDeviceIndex], null); StartCompressorCapture(capture); } catch (Exception ex) { ExceptionHandler(ex); } } else { MessageBox.Show("No video device connected to you PC!"); } }
private void InitVideo() { this.filters = new Filters(); if (this.filters.VideoInputDevices.Count > 0) { this.videoDevice = this.filters.VideoInputDevices[0]; } if (this.filters.AudioInputDevices.Count > 0) { this.audioDevice = this.filters.AudioInputDevices[0]; } this.capture = new DirectX.Capture.Capture(this.videoDevice, this.audioDevice); if (this.filters.VideoCompressors.Count > 0) { this.videoComprossor = this.filters.VideoCompressors[0]; this.capture.VideoCompressor = this.videoComprossor; } this.capture.PreviewWindow = this.pictureBox1; this.capture.RenderPreview(); }
private void Form1_Load(object sender, EventArgs e) { cameraBitmap = new Bitmap(640, 480); capture = new Capture(cameraFilters.VideoInputDevices[0], cameraFilters.AudioInputDevices[0]); VideoCapabilities vc = capture.VideoCaps; capture.FrameSize = new Size(640, 480); capture.PreviewWindow = cam; capture.FrameEvent2 += new Capture.HeFrame(CaptureDone); capture.GrapImg(); loadSourceCards(); }
public FormIkarusMain() { InitializeComponent(); if (me.SistemaMetrico == (int)Singleton.SistemasMetricos.Metrico) instrumento_Altimeter1.Calibration = me.HomeAlt; else instrumento_Altimeter1.Calibration = me.HomeAlt * 3.28f; this.SetStyle(ControlStyles.AllPaintingInWmPaint | ControlStyles.UserPaint | ControlStyles.DoubleBuffer, true); debug_values = new float[5, 5]{{me.uplink_pid_ele_P, me.uplink_pid_ele_I, me.uplink_pid_ele_D, me.uplink_pid_ele_IL, me.uplink_pid_ele_DL}, {me.uplink_pid_ail_P , me.uplink_pid_ail_I, me.uplink_pid_ail_D, me.uplink_pid_ail_IL, me.uplink_pid_ail_DL}, {me.uplink_pid_tail_P, me.uplink_pid_tail_I, me.uplink_pid_tail_D, me.uplink_pid_tail_IL, me.uplink_pid_tail_DL}, {me.uplink_pid_thr_P, me.uplink_pid_thr_I, me.uplink_pid_thr_D, me.uplink_pid_thr_IL, me.uplink_pid_thr_DL }, {me.uplink_IR_offX, me.uplink_IR_offY, me.uplink_IR_gain, me.uplink_rumbo_ail, me.uplink_altura_ele}}; comboBox3.SelectedIndex = 0; comboBox2.SelectedIndex = 0; comboBox1.SelectedIndex = 0; this.planeWpt = new WayPoint("Avion", me.HomeLon, me.HomeLat, global::UAVConsole.Properties.Resources.plane3); this.planeWpt.icon.MakeTransparent(Color.White); this.planeWpt.heading = 0; this.targetWpt = new WayPoint("Avion", me.HomeLon, me.HomeLat); this.homeWpt = new WayPoint("Home", me.HomeLon, me.HomeLat); mapControl1.plane = this.planeWpt; mapControl1.target = this.targetWpt; mapControl1.ruta = me.Ruta; medidorBaterias1.num_cells = me.cells1; medidorBaterias2.num_cells = me.cells2; try { filters = new Filters(); } catch (Exception) { } if (filters != null && filters.VideoInputDevices.Count > 0) { foreach (Filter f in filters.VideoInputDevices) { if (f.Name.Equals(me.videoCaptureStr)) { capture = new Capture(f, null, false); break; } } if (capture != null) { foreach (Source vs in capture.VideoSources) { if (vs.ToString().Contains("omposi")) // Video Composite { capture.VideoSource = vs; break; } } if (me.videosystem == Singleton.VideoSystem.PAL) { capture.dxUtils.VideoStandard = DShowNET.AnalogVideoStandard.PAL_B; try { capture.FrameRate = 25; capture.FrameSize = new Size(720, 576); capture.PreviewFrameSize = new Size(720, 576); } catch (Exception) { } } else { capture.dxUtils.VideoStandard = DShowNET.AnalogVideoStandard.NTSC_M; try { capture.FrameRate = 30; capture.FrameSize = new Size(720, 480); } catch (Exception) { } } capture.AllowSampleGrabber = true; capture.PreviewWindow = panel1; capture.FrameEvent2 += new Capture.HeFrame(CaptureDone); capture.GrapImg(); //capture.ShowPropertyPage(1, this); } } if (me.enableAntTrack) { antTracker = new AntTracker(); medidorBaterias3.Enabled = true; medidorBaterias3.AutoCalculate = false; label6.Enabled = true; knob_anttracker.Visible = true; knob_anttracker.Manual = false; medidorRSSI.Height = 97; } else { knob_anttracker.Visible = false; medidorRSSI.Height = 137; } if (me.telemetria == Singleton.Telemetria.Video) { modem = new ModemVideo(); } else if (me.telemetria == Singleton.Telemetria.XBEE) modem = new ModemXbee(me.commPort, me.commBps); else if (me.telemetria == Singleton.Telemetria.AntTracker) modem = new ModemAntTracker(antTracker); if (me.telemetria != Singleton.Telemetria.None) modem.listeners += RefreshInstruments; if (me.moduloTX == Singleton.ModuloControl.Uplink) { if (jthread == null) jthread = new JoystickThread(this); decimal altura = (decimal)(me.HomeAlt + 100); if (altura < numericUpDown1.Minimum) altura = numericUpDown1.Minimum; else if (altura > numericUpDown1.Maximum) altura = numericUpDown1.Maximum; numericUpDown1.Value = altura; label7.Enabled = true; medidorBaterias4.Enabled = true; medidorBaterias4.volts_max = me.uplinkVmax; medidorBaterias4.volts_min = me.uplinkVmin; } else { splitContainer1.SplitterDistance+=panel4.Height; panel5.Location = panel4.Location; panel6.Location = new Point(panel6.Location.X, panel5.Location.Y); panel4.Visible = false; } if (me.enableUDPinout) { if (this.sender == null) this.sender = new SenderUDP(me.portUDPinout); this.sender.listeners += TeamRefresh; } else { comboBoxTeam.Visible = false; button6.Visible = false; button7.Visible = false; //panel8.Width -= 56; } if (me.enableWebServer) { this.KmlWebServer = new MyWebServer(me.portWebServer); } timer1.Enabled = true; }
private void mnuVideoDevices_Click(object sender, System.EventArgs e) { try { // Get current devices and dispose of capture object // because the video and audio device can only be changed // by creating a new Capture object. Filter videoDevice = null; Filter audioDevice = null; if ( capture != null ) { videoDevice = capture.VideoDevice; audioDevice = capture.AudioDevice; capture.Dispose(); capture = null; } // Get new video device MenuItem m = sender as MenuItem; videoDevice = ( m.Index>0 ? filters.VideoInputDevices[m.Index-1] : null ); // Create capture object if ( ( videoDevice != null ) || ( audioDevice != null ) ) { capture = new Capture( videoDevice, audioDevice ); capture.CaptureComplete += new EventHandler( OnCaptureComplete ); } // Update the menu updateMenu(); } catch (Exception ex) { MessageBox.Show( "Video device not supported.\n\n" + ex.Message + "\n\n" + ex.ToString() ); } }
void LblCaptureLinkClicked(object sender, LinkLabelLinkClickedEventArgs e) { if (!hasDevice ) return; if (cboSelectDevide.Items.Count==0) return; if (cboSelectDevide.SelectedIndex<0) return; if (isStarted) { if (captureDevice!=null) { if (captureDevice.Capturing) captureDevice.Stop(); captureDevice.Dispose(); } this.lblCaptureStart.Text = MSG_START; isStarted =false; return; }else {//start device captureDevice = new Capture(filters.VideoInputDevices[cboSelectDevide.SelectedIndex], null); captureDevice.FrameSize = new Size(640, 480); captureDevice.FrameRate = 30; //20frame/sec captureDevice.FrameEvent2+= new Capture.HeFrame(captureDevice_FrameEvent2); captureDevice.PreviewWindow = picPreview; //captureDevice.Cue(); captureDevice.GrapImg(); //captureDevice.Start(); isStarted =true; this.lblCaptureStart.Text = MSG_STOP; } }
public VideoRecorder(Filter camera, Filter microphone) { previewControl = new VideoPreview(); filters = new Filters(); capture = new Capture(camera, microphone); capture.VideoCompressor = filters.VideoCompressors[0]; capture.AudioCompressor = filters.AudioCompressors[0]; capture.FrameRate = NTSC; // NTSC capture.FrameSize = new Size(640, 480); // 640x480 capture.AudioSamplingRate = 44100; // 44.1 kHz capture.AudioSampleSize = 16; // 16-bit capture.AudioChannels = 2; // Stereo }