private void CbFlipY_Checked(object sender, RoutedEventArgs e) { IVFVideoEffectFlipRight flip; var effect = VideoCapture1.Video_Effects_Get("FlipRight"); if (effect == null) { flip = new VFVideoEffectFlipRight(cbFlipY.IsChecked == true); VideoCapture1.Video_Effects_Add(flip); } else { flip = effect as IVFVideoEffectFlipRight; if (flip != null) { flip.Enabled = cbFlipY.IsChecked == true; } } }
private void cbInvert_CheckedChanged(object sender, RoutedEventArgs e) { IVFVideoEffectInvert invert; var effect = VideoCapture1.Video_Effects_Get("Invert"); if (effect == null) { invert = new VFVideoEffectInvert(cbInvert.IsChecked == true); VideoCapture1.Video_Effects_Add(invert); } else { invert = effect as IVFVideoEffectInvert; if (invert != null) { invert.Enabled = cbInvert.IsChecked == true; } } }
private void tbLightness_Scroll(object sender, RoutedPropertyChangedEventArgs <double> e) { IVFVideoEffectLightness lightness; var effect = VideoCapture1.Video_Effects_Get("Lightness"); if (effect == null) { lightness = new VFVideoEffectLightness(true, (int)tbLightness.Value); VideoCapture1.Video_Effects_Add(lightness); } else { lightness = effect as IVFVideoEffectLightness; if (lightness != null) { lightness.Value = (int)tbLightness.Value; } } }
private void tbContrast_Scroll(object sender, RoutedPropertyChangedEventArgs <double> e) { IVFVideoEffectContrast contrast; var effect = VideoCapture1.Video_Effects_Get("Contrast"); if (effect == null) { contrast = new VFVideoEffectContrast(true, (int)tbContrast.Value); VideoCapture1.Video_Effects_Add(contrast); } else { contrast = effect as IVFVideoEffectContrast; if (contrast != null) { contrast.Value = (int)tbContrast.Value; } } }
private void cbGreyscale_CheckedChanged(object sender, RoutedEventArgs e) { IVFVideoEffectGrayscale grayscale; var effect = VideoCapture1.Video_Effects_Get("Grayscale"); if (effect == null) { grayscale = new VFVideoEffectGrayscale(cbGreyscale.IsChecked == true); VideoCapture1.Video_Effects_Add(grayscale); } else { grayscale = effect as IVFVideoEffectGrayscale; if (grayscale != null) { grayscale.Enabled = cbGreyscale.IsChecked == true; } } }
private async void btStart_Click(object sender, EventArgs e) { VideoCapture1.Debug_Mode = cbDebugMode.Checked; VideoCapture1.Debug_Dir = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments) + "\\VisioForge\\"; VideoCapture1.Debug_Telemetry = cbTelemetry.Checked; // configure source var screenSource = new ScreenCaptureSourceSettings(); screenSource.Mode = VFScreenCaptureMode.Screen; screenSource.FullScreen = rbScreenFullScreen.Checked; screenSource.Top = Convert.ToInt32(edScreenTop.Text); screenSource.Bottom = Convert.ToInt32(edScreenBottom.Text); screenSource.Left = Convert.ToInt32(edScreenLeft.Text); screenSource.Right = Convert.ToInt32(edScreenRight.Text); screenSource.DisplayIndex = Convert.ToInt32(cbScreenCaptureDisplayIndex.Text); screenSource.FrameRate = Convert.ToInt32(edScreenFrameRate.Text); screenSource.GrabMouseCursor = cbScreenCapture_GrabMouseCursor.Checked; screenSource.AllowDesktopDuplicationEngine = cbScreenCapture_DesktopDuplication.Checked; VideoCapture1.Screen_Capture_Source = screenSource; // disable audio VideoCapture1.Audio_PlayAudio = false; VideoCapture1.Audio_RecordAudio = false; // configure output if (cbCapture.Checked) { VideoCapture1.Mode = VFVideoCaptureMode.ScreenCapture; VideoCapture1.Output_Format = new VFMP4v8v10Output(); VideoCapture1.Output_Filename = edOutput.Text; } else { VideoCapture1.Mode = VFVideoCaptureMode.ScreenPreview; } await VideoCapture1.StartAsync(); }
private void StopVideoDelegateMethod() { // done. searching for fingerprints. VideoCapture1.Stop(); long n; IntPtr p = VFPSearch.Build(out n, ref searchLiveData); VFPFingerPrint fvp = new VFPFingerPrint() { Data = new byte[n], OriginalFilename = string.Empty }; Marshal.Copy(p, fvp.Data, 0, (int)n); searchLiveData.Free(); foreach (var ad in adVFPList) { List <int> positions; bool found = VFPAnalyzer.Search(ad, fvp, ad.Duration, (int)slDifference.Value, out positions, true); if (found) { foreach (var pos in positions) { results.Add( new ResultsViewModel() { Sample = ad.OriginalFilename, TimeStamp = DateTime.Now.ToString(CultureInfo.InvariantCulture) // minutes + ":" + seconds }); } } } MessageBox.Show("Analyze completed!"); }
private void tbSaturation_Scroll(object sender, RoutedPropertyChangedEventArgs <double> e) { if (VideoCapture1 != null) { IVFVideoEffectSaturation saturation; var effect = VideoCapture1.Video_Effects_Get("Saturation"); if (effect == null) { saturation = new VFVideoEffectSaturation((int)tbSaturation.Value); VideoCapture1.Video_Effects_Add(saturation); } else { saturation = effect as IVFVideoEffectSaturation; if (saturation != null) { saturation.Value = (int)tbSaturation.Value; } } } }
/// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } VideoCapture1?.Dispose(); VideoCapture1 = null; VideoCapture2?.Dispose(); VideoCapture2 = null; tmRecording1?.Dispose(); tmRecording1 = null; tmRecording2?.Dispose(); tmRecording2 = null; base.Dispose(disposing); }
/// <summary> /// Clean up any resources being used. /// </summary> /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param> protected override void Dispose(bool disposing) { if (disposing && (components != null)) { components.Dispose(); } mp4SettingsDialog?.Dispose(); mp4SettingsDialog = null; mp4HWSettingsDialog?.Dispose(); mp4HWSettingsDialog = null; mpegTSSettingsDialog?.Dispose(); mpegTSSettingsDialog = null; movSettingsDialog?.Dispose(); movSettingsDialog = null; aviSettingsDialog?.Dispose(); aviSettingsDialog = null; wmvSettingsDialog?.Dispose(); wmvSettingsDialog = null; gifSettingsDialog?.Dispose(); gifSettingsDialog = null; gifSettingsDialog?.Dispose(); gifSettingsDialog = null; windowCaptureForm?.Dispose(); windowCaptureForm = null; VideoCapture1?.Dispose(); VideoCapture1 = null; base.Dispose(disposing); }
private void btStart_Click(object sender, EventArgs e) { // set debug settings VideoCapture1.Debug_Mode = cbDebugMode.Checked; VideoCapture1.Debug_Dir = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments) + "\\VisioForge\\"; VideoCapture1.Debug_Telemetry = cbTelemetry.Checked; mmLog.Clear(); // configure video source VideoCapture1.Video_CaptureDevice = cbVideoInputDevice.Text; VideoCapture1.Video_CaptureDevice_Format = cbVideoInputFormat.Text; VideoCapture1.Video_CaptureDevice_Format_UseBest = cbUseBestVideoInputFormat.Checked; VideoCapture1.Video_CaptureDevice_FrameRate = Convert.ToDouble(cbVideoFrameRate.Text); // disabe audio VideoCapture1.Audio_PlayAudio = false; VideoCapture1.Audio_RecordAudio = false; // set video preview mode VideoCapture1.Mode = VFVideoCaptureMode.VideoPreview; // set face tracking settings VideoCapture1.Face_Tracking = new FaceTrackingSettings { ColorMode = (CamshiftMode)cbFaceTrackingColorMode.SelectedIndex, Highlight = cbFaceTrackingCHL.Checked, MinimumWindowSize = int.Parse(edFaceTrackingMinimumWindowSize.Text), ScalingMode = (ObjectDetectorScalingMode) cbFaceTrackingScalingMode.SelectedIndex, SearchMode = (ObjectDetectorSearchMode) cbFaceTrackingSearchMode.SelectedIndex }; // start VideoCapture1.Start(); }
private void btSaveScreenshot_Click(object sender, EventArgs e) { DateTime dt = DateTime.Now; string s = dt.Hour + "_" + dt.Minute + "_" + dt.Second + "_" + dt.Millisecond; switch (cbImageType.SelectedIndex) { case 0: VideoCapture1.Frame_Save(edScreenshotsFolder.Text + "\\" + s + ".bmp", VFImageFormat.BMP, 0); break; case 1: VideoCapture1.Frame_Save(edScreenshotsFolder.Text + "\\" + s + ".jpg", VFImageFormat.JPEG, tbJPEGQuality.Value); break; case 2: VideoCapture1.Frame_Save(edScreenshotsFolder.Text + "\\" + s + ".gif", VFImageFormat.GIF, 0); break; case 3: VideoCapture1.Frame_Save(edScreenshotsFolder.Text + "\\" + s + ".png", VFImageFormat.PNG, 0); break; case 4: VideoCapture1.Frame_Save(edScreenshotsFolder.Text + "\\" + s + ".tiff", VFImageFormat.TIFF, 0); break; } }
private void cbTextLogo_CheckedChanged(object sender, RoutedEventArgs e) { if (VideoCapture1 == null) { return; } IVFVideoEffectTextLogo textLogo; var effect = VideoCapture1.Video_Effects_Get("TextLogo"); if (effect == null) { textLogo = new VFVideoEffectTextLogo(cbTextLogo.IsChecked == true); VideoCapture1.Video_Effects_Add(textLogo); } else { textLogo = effect as IVFVideoEffectTextLogo; } if (textLogo == null) { MessageBox.Show("Unable to configure text logo effect."); return; } textLogo.Enabled = cbTextLogo.IsChecked == true; textLogo.Text = edTextLogo.Text; textLogo.Left = Convert.ToInt32(edTextLogoLeft.Text); textLogo.Top = Convert.ToInt32(edTextLogoTop.Text); textLogo.Font = fontDialog.Font; textLogo.FontColor = fontDialog.Color; textLogo.TransparencyLevel = (int)tbTextLogoTransp.Value; textLogo.Update(); }
private void ConfigureVideoCapture() { // select source VideoCapture1.Debug_Mode = cbDebugMode.Checked; VideoCapture1.Debug_Dir = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments) + "\\VisioForge\\"; VideoCapture1.VLC_Path = Environment.GetEnvironmentVariable("VFVLCPATH"); if (rbVideoCaptureDevice.Checked) { VideoCapture1.Mode = VFVideoCaptureMode.VideoPreview; } else { VideoCapture1.Mode = VFVideoCaptureMode.IPPreview; } if ((VideoCapture1.Mode == VFVideoCaptureMode.IPCapture) || (VideoCapture1.Mode == VFVideoCaptureMode.IPPreview)) { // from IP camera IPCameraSourceSettings settings; SelectIPCameraSource(out settings); VideoCapture1.IP_Camera_Source = settings; } else if ((VideoCapture1.Mode == VFVideoCaptureMode.VideoCapture) || (VideoCapture1.Mode == VFVideoCaptureMode.VideoPreview) || (VideoCapture1.Mode == VFVideoCaptureMode.AudioCapture) || (VideoCapture1.Mode == VFVideoCaptureMode.AudioPreview)) { // from video capture device SelectVideoCaptureSource(); } VideoCapture1.Audio_RecordAudio = false; VideoCapture1.Audio_PlayAudio = false; VideoCapture1.Video_Sample_Grabber_Enabled = true; VideoCapture1.Video_Renderer_SetAuto(); }
private void btStart_Click(object sender, EventArgs e) { // configure source var screenSource = new ScreenCaptureSourceSettings(); screenSource.Mode = VFScreenCaptureMode.Screen; screenSource.FullScreen = rbScreenFullScreen.Checked; screenSource.Top = Convert.ToInt32(edScreenTop.Text); screenSource.Bottom = Convert.ToInt32(edScreenBottom.Text); screenSource.Left = Convert.ToInt32(edScreenLeft.Text); screenSource.Right = Convert.ToInt32(edScreenRight.Text); screenSource.DisplayIndex = Convert.ToInt32(cbScreenCaptureDisplayIndex.Text); screenSource.FrameRate = Convert.ToInt32(edScreenFrameRate.Text); screenSource.GrabMouseCursor = cbScreenCapture_GrabMouseCursor.Checked; screenSource.AllowDesktopDuplicationEngine = cbScreenCapture_DesktopDuplication.Checked; VideoCapture1.Screen_Capture_Source = screenSource; // disable audio VideoCapture1.Audio_PlayAudio = false; VideoCapture1.Audio_RecordAudio = false; // configure output if (cbCapture.Checked) { VideoCapture1.Mode = VFVideoCaptureMode.ScreenCapture; VideoCapture1.Output_Format = new VFMP4v8v10Output(); VideoCapture1.Output_Filename = edOutput.Text; } else { VideoCapture1.Mode = VFVideoCaptureMode.ScreenPreview; } VideoCapture1.Start(); }
private void CbDeinterlaceCAVT_Checked(object sender, RoutedEventArgs e) { if (VideoCapture1 == null) { return; } IVFVideoEffectDeinterlaceCAVT cavt; var effect = VideoCapture1.Video_Effects_Get("DeinterlaceCAVT"); if (effect == null) { cavt = new VFVideoEffectDeinterlaceCAVT(cbDeinterlaceCAVT.IsChecked == true, 20); VideoCapture1.Video_Effects_Add(cavt); } else { cavt = effect as IVFVideoEffectDeinterlaceCAVT; if (cavt != null) { cavt.Enabled = cbDeinterlaceCAVT.IsChecked == true; } } }
private async void btResume_Click(object sender, EventArgs e) { await VideoCapture1.ResumeAsync(); }
private async void btScreenCaptureUpdate_Click(object sender, EventArgs e) { await VideoCapture1.Screen_Capture_UpdateParametersAsync(Convert.ToInt32(edScreenLeft.Text), Convert.ToInt32(edScreenTop.Text), cbScreenCapture_GrabMouseCursor.Checked); }
private async void btPause_Click(object sender, EventArgs e) { await VideoCapture1.PauseAsync(); }
private async void btStart_Click(object sender, EventArgs e) { mmLog.Clear(); VideoCapture1.Video_Renderer.Zoom_Ratio = 0; VideoCapture1.Video_Renderer.Zoom_ShiftX = 0; VideoCapture1.Video_Renderer.Zoom_ShiftY = 0; VideoCapture1.Debug_Mode = cbDebugMode.Checked; VideoCapture1.Debug_Dir = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments) + "\\VisioForge\\"; VideoCapture1.Video_Sample_Grabber_Enabled = true; // from screen bool allScreens = cbScreenCaptureDisplayIndex.SelectedIndex == cbScreenCaptureDisplayIndex.Items.Count - 1; if (allScreens) { int n = cbScreenCaptureDisplayIndex.Items.Count - 1; VideoCapture1.Screen_Capture_Source = CreateScreenCaptureSource( Convert.ToInt32(cbScreenCaptureDisplayIndex.Items[0]), true); if (n > 1) { for (int i = 1; i < n; i++) { var source = CreateScreenCaptureSource( Convert.ToInt32(cbScreenCaptureDisplayIndex.Items[i]), true); VideoCapture1.PIP_Mode = VFPIPMode.Horizontal; VideoCapture1.PIP_Sources_Add_ScreenSource(source, 0, 0, 0, 0); } } } else { VideoCapture1.Screen_Capture_Source = CreateScreenCaptureSource( Convert.ToInt32(cbScreenCaptureDisplayIndex.Text), false); } // audio source if (cbRecordAudio.Checked) { VideoCapture1.Audio_RecordAudio = true; VideoCapture1.Audio_PlayAudio = false; VideoCapture1.Audio_CaptureDevice = cbAudioInputDevice.Text; VideoCapture1.Audio_CaptureDevice_Format = cbAudioInputFormat.Text; VideoCapture1.Audio_CaptureDevice_Line = cbAudioInputLine.Text; } else { VideoCapture1.Audio_RecordAudio = false; VideoCapture1.Audio_PlayAudio = false; } // apply capture params VideoCapture1.Video_Effects_Enabled = true; VideoCapture1.Video_Effects_Clear(); lbLogos.Items.Clear(); ConfigureVideoEffects(); if (rbPreview.Checked) { VideoCapture1.Mode = VFVideoCaptureMode.ScreenPreview; } else { VideoCapture1.Mode = VFVideoCaptureMode.ScreenCapture; VideoCapture1.Output_Filename = edOutput.Text; switch (cbOutputFormat.SelectedIndex) { case 0: { var aviOutput = new VFAVIOutput(); SetAVIOutput(ref aviOutput); VideoCapture1.Output_Format = aviOutput; break; } case 1: { var mkvOutput = new VFMKVv1Output(); SetMKVOutput(ref mkvOutput); VideoCapture1.Output_Format = mkvOutput; break; } case 2: { var wmvOutput = new VFWMVOutput(); SetWMVOutput(ref wmvOutput); VideoCapture1.Output_Format = wmvOutput; break; } case 3: { var dvOutput = new VFDVOutput(); SetDVOutput(ref dvOutput); VideoCapture1.Output_Format = dvOutput; break; } case 4: { var webmOutput = new VFWebMOutput(); SetWebMOutput(ref webmOutput); VideoCapture1.Output_Format = webmOutput; break; } case 5: { var ffmpegDLLOutput = new VFFFMPEGDLLOutput(); SetFFMPEGDLLOutput(ref ffmpegDLLOutput); VideoCapture1.Output_Format = ffmpegDLLOutput; break; } case 6: { var ffmpegOutput = new VFFFMPEGEXEOutput(); SetFFMPEGEXEOutput(ref ffmpegOutput); VideoCapture1.Output_Format = ffmpegOutput; break; } case 7: { var mp4Output = new VFMP4v8v10Output(); SetMP4Output(ref mp4Output); VideoCapture1.Output_Format = mp4Output; break; } case 8: { var mp4Output = new VFMP4v11Output(); SetMP4v11Output(ref mp4Output); VideoCapture1.Output_Format = mp4Output; break; } case 9: { var gifOutput = new VFAnimatedGIFOutput(); SetGIFOutput(ref gifOutput); VideoCapture1.Output_Format = gifOutput; break; } case 10: { var encOutput = new VFMP4v8v10Output(); SetMP4Output(ref encOutput); encOutput.Encryption = true; encOutput.Encryption_Format = VFEncryptionFormat.MP4_H264_SW_AAC; VideoCapture1.Output_Format = encOutput; break; } case 11: { var tsOutput = new VFMPEGTSOutput(); SetMPEGTSOutput(ref tsOutput); VideoCapture1.Output_Format = tsOutput; break; } case 12: { var movOutput = new VFMOVOutput(); SetMOVOutput(ref movOutput); VideoCapture1.Output_Format = movOutput; break; } } } await VideoCapture1.StartAsync(); tcMain.SelectedIndex = 3; tmRecording.Start(); }
private async void btStop_Click(object sender, EventArgs e) { tmRecording.Stop(); await VideoCapture1.StopAsync(); }
private async void btStart_Click(object sender, EventArgs e) { #if !NETCOREAPP if (onvifControl != null) { onvifControl.Disconnect(); onvifControl.Dispose(); onvifControl = null; btONVIFConnect.Text = "Connect"; } #endif mmLog.Clear(); VideoCapture1.Debug_Mode = cbDebugMode.Checked; VideoCapture1.Debug_Dir = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments) + "\\VisioForge\\"; VideoCapture1.Audio_RecordAudio = cbIPAudioCapture.Checked; VideoCapture1.Audio_PlayAudio = cbIPAudioCapture.Checked; VideoCapture1.Video_Renderer_SetAuto(); VideoCapture1.Video_Sample_Grabber_Enabled = true; // source VideoCapture1.IP_Camera_Source = new IPCameraSourceSettings { URL = cbIPURL.Text }; switch (cbIPCameraType.SelectedIndex) { case 0: VideoCapture1.IP_Camera_Source.Type = VFIPSource.Auto_VLC; break; case 1: VideoCapture1.IP_Camera_Source.Type = VFIPSource.Auto_FFMPEG; break; case 2: VideoCapture1.IP_Camera_Source.Type = VFIPSource.Auto_LAV; break; case 3: VideoCapture1.IP_Camera_Source.Type = VFIPSource.RTSP_Live555; break; case 4: VideoCapture1.IP_Camera_Source.Type = VFIPSource.HTTP_FFMPEG; break; case 5: VideoCapture1.IP_Camera_Source.Type = VFIPSource.MMS_WMV; break; case 6: VideoCapture1.IP_Camera_Source.Type = VFIPSource.RTSP_UDP_FFMPEG; break; case 7: VideoCapture1.IP_Camera_Source.Type = VFIPSource.RTSP_TCP_FFMPEG; break; case 8: VideoCapture1.IP_Camera_Source.Type = VFIPSource.RTSP_HTTP_FFMPEG; break; case 9: { // audio not supported VideoCapture1.IP_Camera_Source.Type = VFIPSource.HTTP_MJPEG_LowLatency; VideoCapture1.Audio_RecordAudio = false; VideoCapture1.Audio_PlayAudio = false; cbIPAudioCapture.Checked = false; } break; case 10: VideoCapture1.IP_Camera_Source.Type = VFIPSource.RTSP_LowLatency; VideoCapture1.IP_Camera_Source.RTSP_LowLatency_UseUDP = false; break; case 11: VideoCapture1.IP_Camera_Source.Type = VFIPSource.RTSP_LowLatency; VideoCapture1.IP_Camera_Source.RTSP_LowLatency_UseUDP = true; break; case 12: VideoCapture1.IP_Camera_Source.Type = VFIPSource.NDI; break; } VideoCapture1.IP_Camera_Source.AudioCapture = cbIPAudioCapture.Checked; VideoCapture1.IP_Camera_Source.Login = edIPLogin.Text; VideoCapture1.IP_Camera_Source.Password = edIPPassword.Text; VideoCapture1.IP_Camera_Source.VLC_ZeroClockJitterEnabled = cbVLCZeroClockJitter.Checked; VideoCapture1.IP_Camera_Source.VLC_CustomLatency = Convert.ToInt32(edVLCCacheSize.Text); if (cbIPCameraONVIF.Checked) { VideoCapture1.IP_Camera_Source.ONVIF_Source = true; if (cbONVIFProfile.SelectedIndex != -1) { VideoCapture1.IP_Camera_Source.ONVIF_SourceProfile = cbONVIFProfile.Text; } } if (rbPreview.Checked) { VideoCapture1.Mode = VFVideoCaptureMode.IPPreview; } else if (rbCapture.Checked) { VideoCapture1.Mode = VFVideoCaptureMode.IPCapture; VideoCapture1.Output_Filename = edOutput.Text; switch (cbOutputFormat.SelectedIndex) { case 0: { var aviOutput = new VFAVIOutput(); SetAVIOutput(ref aviOutput); VideoCapture1.Output_Format = aviOutput; break; } case 1: { var wmvOutput = new VFWMVOutput(); SetWMVOutput(ref wmvOutput); VideoCapture1.Output_Format = wmvOutput; break; } case 2: { var mp4Output = new VFMP4v8v10Output(); SetMP4Output(ref mp4Output); VideoCapture1.Output_Format = mp4Output; break; } case 3: { var mp4Output = new VFMP4v11Output(); SetMP4v11Output(ref mp4Output); VideoCapture1.Output_Format = mp4Output; break; } case 4: { var gifOutput = new VFAnimatedGIFOutput(); SetGIFOutput(ref gifOutput); VideoCapture1.Output_Format = gifOutput; break; } case 5: { var tsOutput = new VFMPEGTSOutput(); SetMPEGTSOutput(ref tsOutput); VideoCapture1.Output_Format = tsOutput; break; } case 6: { var movOutput = new VFMOVOutput(); SetMOVOutput(ref movOutput); VideoCapture1.Output_Format = movOutput; break; } } } VideoCapture1.Video_Effects_Enabled = true; VideoCapture1.Video_Effects_Clear(); lbLogos.Items.Clear(); ConfigureVideoEffects(); await VideoCapture1.StartAsync(); tcMain.SelectedIndex = 3; tmRecording.Start(); }
private void btAudioInputDeviceSettings_Click(object sender, EventArgs e) { VideoCapture1.Audio_CaptureDevice_SettingsDialog_Show(IntPtr.Zero, cbAudioInputDevice.Text); }
private void BtPause_Click(object sender, RoutedEventArgs e) { VideoCapture1.Pause(); }
private void BtResume_Click(object sender, RoutedEventArgs e) { VideoCapture1.Resume(); }
private void btStop_Click(object sender, RoutedEventArgs e) { tmRecording.Stop(); VideoCapture1.Stop(); }
private void btStart_Click(object sender, RoutedEventArgs e) { VideoCapture1.Video_Sample_Grabber_Enabled = true; mmLog.Clear(); VideoCapture1.Debug_Mode = cbDebugMode.IsChecked == true; VideoCapture1.Debug_Dir = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments) + "\\VisioForge\\"; if (cbRecordAudio.IsChecked == true) { VideoCapture1.Audio_RecordAudio = true; VideoCapture1.Audio_PlayAudio = false; } else { VideoCapture1.Audio_RecordAudio = false; VideoCapture1.Audio_PlayAudio = false; } VideoCapture1.Video_Renderer.Video_Renderer = VFVideoRendererWPF.WPF; VideoCapture1.Video_CaptureDevice = cbVideoInputDevice.Text; VideoCapture1.Video_CaptureDevice_IsAudioSource = true; VideoCapture1.Audio_OutputDevice = cbAudioOutputDevice.Text; VideoCapture1.Audio_CaptureDevice_Format_UseBest = true; VideoCapture1.Video_CaptureFormat = cbVideoInputFormat.Text; VideoCapture1.Video_CaptureFormat_UseBest = cbUseBestVideoInputFormat.IsChecked == true; if (cbFramerate.SelectedIndex != -1) { VideoCapture1.Video_FrameRate = (float)Convert.ToDouble(cbFramerate.Text); } if (rbPreview.IsChecked == true) { VideoCapture1.Mode = VFVideoCaptureMode.VideoPreview; } else { VideoCapture1.Mode = VFVideoCaptureMode.VideoCapture; VideoCapture1.Output_Filename = edOutput.Text; switch (cbOutputFormat.SelectedIndex) { case 0: { var aviOutput = new VFAVIOutput(); SetAVIOutput(ref aviOutput); VideoCapture1.Output_Format = aviOutput; break; } case 1: { var mkvOutput = new VFMKVv1Output(); SetMKVOutput(ref mkvOutput); VideoCapture1.Output_Format = mkvOutput; break; } case 2: { var wmvOutput = new VFWMVOutput(); SetWMVOutput(ref wmvOutput); VideoCapture1.Output_Format = wmvOutput; break; } case 3: { var dvOutput = new VFDVOutput(); SetDVOutput(ref dvOutput); VideoCapture1.Output_Format = dvOutput; break; } case 4: { VideoCapture1.Output_Format = new VFDirectCaptureDVOutput(); break; } case 5: { var webmOutput = new VFWebMOutput(); SetWebMOutput(ref webmOutput); VideoCapture1.Output_Format = webmOutput; break; } case 6: { var ffmpegDLLOutput = new VFFFMPEGDLLOutput(); SetFFMPEGDLLOutput(ref ffmpegDLLOutput); VideoCapture1.Output_Format = ffmpegDLLOutput; break; } case 7: { var ffmpegOutput = new VFFFMPEGEXEOutput(); SetFFMPEGEXEOutput(ref ffmpegOutput); VideoCapture1.Output_Format = ffmpegOutput; break; } case 8: { var mp4Output = new VFMP4v8v10Output(); SetMP4Output(ref mp4Output); VideoCapture1.Output_Format = mp4Output; break; } case 9: { var mp4Output = new VFMP4v11Output(); SetMP4v11Output(ref mp4Output); VideoCapture1.Output_Format = mp4Output; break; } case 10: { var gifOutput = new VFAnimatedGIFOutput(); SetGIFOutput(ref gifOutput); VideoCapture1.Output_Format = gifOutput; break; } case 11: { var encOutput = new VFMP4v8v10Output(); SetMP4Output(ref encOutput); encOutput.Encryption = true; encOutput.Encryption_Format = VFEncryptionFormat.MP4_H264_SW_AAC; VideoCapture1.Output_Format = encOutput; break; } case 12: { var tsOutput = new VFMPEGTSOutput(); SetMPEGTSOutput(ref tsOutput); VideoCapture1.Output_Format = tsOutput; break; } case 13: { var movOutput = new VFMOVOutput(); SetMOVOutput(ref movOutput); VideoCapture1.Output_Format = movOutput; break; } } } VideoCapture1.Video_Effects_Enabled = true; VideoCapture1.Video_Effects_Clear(); lbLogos.Items.Clear(); ConfigureVideoEffects(); VideoCapture1.Start(); tcMain.SelectedIndex = 3; tmRecording.Start(); }
private void btDVStop_Click(object sender, RoutedEventArgs e) { VideoCapture1.DV_SendCommand(VFDVCommand.Stop); }
private void btDVRewind_Click(object sender, RoutedEventArgs e) { VideoCapture1.DV_SendCommand(VFDVCommand.Rew); }
private void btDVFF_Click(object sender, RoutedEventArgs e) { VideoCapture1.DV_SendCommand(VFDVCommand.FastForward); }