public void Initialise_capture(Device device) { this.device = device; var capturers = this.GetComponents<Capturer>(); if(capturers.Any()) { foreach(var capturer in capturers) { Destroy (capturer); } } switch(this.device) { case Device.MULTI_KINECT_WIMUS: this.capturer = this.gameObject.AddComponent<Kinect2Managing>(); this.calibrator = this.capturer is ICalibrator ? this.capturer as ICalibrator : null; this.syncer = this.capturer is ISyncer ? this.capturer as ISyncer : null; this.reconstructor = this.capturer is IReconstructor ? this.capturer as IReconstructor : null; this.recorder = CompositeRecorder.FromRecorders(this.capturer);//TODO: fusion capturer to be added here this.skeletonCapturer = new NoneSkeletonCapturer();//TODO: add fusion here break; case Device.SINGLE_KINECT_2: var k2controller = this.gameObject.AddComponent<Kinect2Controlling>(); k2controller.Streams = KinectStreams.Skeleton; k2controller.enabled = false;// re-enable k2controller.enabled = true; this.capturer = k2controller; this.exporter = GetComponent<SkeletonExporting>(); this.recorder = CompositeRecorder.FromRecorders(this.capturer); this.skeletonCapturer = FindObjectsOfType<Capturer>().First((cap) => cap is ISkeletonGenerator<SkeletonFrame>) as ISkeletonGenerator<SkeletonFrame>; break; } }
void Configure() { VideoMuxerType muxer; if (Capturer == null) { videowindow.Visible = false; return; } /* We need to use Matroska for live replay and remux when the capture is done */ muxer = settings.EncodingSettings.EncodingProfile.Muxer; if (muxer == VideoMuxerType.Avi || muxer == VideoMuxerType.Mp4) { settings.EncodingSettings.EncodingProfile.Muxer = VideoMuxerType.Matroska; } Capturer.Configure(settings, videowindow.WindowHandle); settings.EncodingSettings.EncodingProfile.Muxer = muxer; delayStart = false; Capturer.Run(); videowindow.MessageVisible = true; }
/// <summary> /// Initializes a new instance of the <see cref="MainWindow"/> class. /// </summary> public MainWindow() { InitializeComponent(); Visible = false; _capturer = new Capturer(); // TODO: Make this functionality better & work well for "folder path changed during runtime"... System.IO.Directory.CreateDirectory(Settings.Default.SaveFolder); _lastShotTaken = 0; _listenerKeyboard = new KeyboardHookListener(new GlobalHooker()); _listenerMouse = new MouseHookListener(new GlobalHooker()); _listenerKeyboard.Enabled = true; _listenerMouse.Enabled = true; // Start listening to Mouse- and KeyDown. _listenerKeyboard.KeyDown += ListenerKeyboardKeyDown; _listenerMouse.MouseDown += ListenerMouseMouseDown; _listenerMouse.MouseUp += ListenerMouseMouseUp; }
private void btnVideoCodecProps_Click(object sender, EventArgs e) { Capturer tempCapturer = new Capturer(); tempCapturer.RegistrationName = "demo"; tempCapturer.RegistrationKey = "demo"; tempCapturer.CurrentVideoCodecName = cmbAviVideoCodecs.SelectedItem.ToString(); try { tempCapturer.ShowVideoCodecSettingsDialog(0); } catch (InvalidCastException) { MessageBox.Show("This codec has no properties dialog.", Application.ProductName); } catch (Exception exception) { MessageBox.Show("Failed to open the codec properties dialog.\n" + exception.Message, Application.ProductName); } }
public void StopPeriod() { if (currentPeriod == null) { string msg = Catalog.GetString("Period recording not started"); Config.GUIToolkit.WarningMessage(msg, this); return; } GLib.Source.Remove(timeoutID); currentPeriod.Stop(CurrentCaptureTime); accumTime = CurrentCaptureTime; Log.Debug("Stop period stop=", accumTime.ToMSecondsString()); currentTimeNode = null; currentPeriod = null; recbutton.Visible = true; pausebutton.Visible = resumebutton.Visible = stopbutton.Visible = false; if (Capturer != null && Capturing) { Capturer.TogglePause(); } Capturing = false; }
private void CloseOpenedProject(bool save) { if (save) { SaveProject(OpenedProject, OpenedProjectType); } if (OpenedProjectType != ProjectType.FileProject) { Capturer.Close(); } else { Player.Close(); } if (OpenedProject != null) { OpenedProject.Clear(); } OpenedProject = null; OpenedProjectType = ProjectType.None; EmitProjectChanged(); }
public override void ExecuteInternal() { foreach (var hive in Hives) { Log.Debug("Starting " + hive.ToString()); if (!Filter.IsFiltered(AsaHelpers.GetPlatformString(), "Scan", "Registry", "Hive", "Include", hive.ToString()) && Filter.IsFiltered(AsaHelpers.GetPlatformString(), "Scan", "Registry", "Hive", "Exclude", hive.ToString(), out Regex Capturer)) { Log.Debug("{0} '{1}' {2} '{3}'.", Strings.Get("ExcludingHive"), hive.ToString(), Strings.Get("DueToFilter"), Capturer.ToString()); return; } Filter.IsFiltered(AsaHelpers.GetPlatformString(), "Scan", "Registry", "Key", "Exclude", hive.ToString()); var registryInfoEnumerable = RegistryWalker.WalkHive(hive); Parallel.ForEach(registryInfoEnumerable, (registryKey => { try { var regObj = RegistryKeyToRegistryObject(registryKey); if (regObj != null) { DatabaseManager.Write(regObj, RunId); } } catch (InvalidOperationException e) { Log.Debug(e, JsonConvert.SerializeObject(registryKey) + " invalid op exept"); } })); Log.Debug("Finished " + hive.ToString()); } }
public Form1() { InitializeComponent(); // Create Capturer instance capturer = new Capturer(); capturer.RegistrationName = "demo"; // put your registration name here capturer.RegistrationKey = "demo"; // put your registration key here // Get list of available audio devices for (int i = 0; i < capturer.AudioDeviceCount; i++) { string line = capturer.GetAudioDeviceName(i); cmbAudioDevices.Items.Add(line); } // Select current audio device cmbAudioDevices.SelectedIndex = capturer.CurrentAudioDevice; // Get list of available AVI audio codecs for (int i = 0; i < capturer.AudioCodecsCount; i++) { string codec = capturer.GetAudioCodecName(i); cmbAviAudioCodecs.Items.Add(codec); } // Select current AVI audio codec cmbAviAudioCodecs.SelectedIndex = capturer.CurrentAudioCodec; // Get list of available web camera devices for (int i = 0; i < capturer.WebCamCount; i++) { string camera = capturer.GetWebCamName(i); cmbWebCameras.Items.Add(camera); } // Select current web camera if (cmbWebCameras.Items.Count != 0) { cmbWebCameras.SelectedIndex = capturer.CurrentWebCam; } else { cmbWebCameras.Items.Add("No devices"); cmbWebCameras.SelectedIndex = 0; cmbWebCameras.Enabled = false; cbShowWebCamOverlay.Enabled = false; } // Get list of available AVI video codecs for (int i = 0; i < capturer.VideoCodecsCount; i++) { string codec = capturer.GetVideoCodecName(i); cmbAviVideoCodecs.Items.Add(codec); } // Select current AVI video codec cmbAviVideoCodecs.SelectedIndex = capturer.CurrentVideoCodec; // Get list of available WMV audio codecs for (int i = 0; i < capturer.WMVAudioCodecsCount; i++) { string codec = capturer.GetWMVAudioCodecName(i); cmbWmvAudioCodecs.Items.Add(codec); } // Select current WMV audio codec cmbWmvAudioCodecs.SelectedIndex = capturer.CurrentWMVAudioCodec; // Get list of available WMV video codecs for (int i = 0; i < capturer.WMVVideoCodecsCount; i++) { string codec = capturer.GetWMVVideoCodecName(i); cmbWmvVideoCodecs.Items.Add(codec); } // Select current WMV video codec cmbWmvVideoCodecs.SelectedIndex = capturer.CurrentWMVVideoCodec; // Fill list of FPS values cmbFPS.Items.AddRange(new object[] { 5f, 7.5f, 10f, 12f, 14.985f, 15f, 19.98f, 20f, 23.976f, 24f, 25f, 29.97f, 30f, 50f, 59.94f, 60 }); // Select current FPS cmbFPS.SelectedItem = 14.985f; cbCaptureLayeredWindows.Checked = capturer.CaptureTransparentControls; nudMouseHotSpot.Value = capturer.MouseSpotRadius; }
public async Task <IHttpActionResult> PostFormData() { Video video = new Video(); video.UserId = UserId.Value; if (!Request.Content.IsMimeMultipartContent()) { throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType); } string root = HttpContext.Current.Server.MapPath("~/Uploads/Videos"); var provider = new MyFormDataStreamProvider(root); try { await Request.Content.ReadAsMultipartAsync(provider); foreach (var key in provider.FormData.AllKeys) { if (provider.FileData.Count != 0) { foreach (MultipartFileData file in provider.FileData) { if (Path.GetExtension(file.LocalFileName) == ".mp4") { var capturer = new Capturer(Path.GetFileNameWithoutExtension(file.LocalFileName)); var paths = capturer.ff(); video.Path = paths.videoPath; video.ScreenshotPath = paths.imagePath; video.DateAdded = DateTime.UtcNow; } else { return(BadRequest("Wrong file extension")); } } } else { return(BadRequest("Select file")); } foreach (var val in provider.FormData.GetValues(key)) { if (key == "title") { if (val == "") { return(BadRequest("Title is required")); } else { video.VideoName = val; } } if (key == "description") { if (val == "") { return(BadRequest("Description is required")); } else { video.Description = val; } } } } if (!ModelState.IsValid) { return(BadRequest(ModelState)); } var videoFromServer = videoService.SaveVideo(video); if (videoFromServer.Success) { return(Ok(videoFromServer.Data)); } ModelState.AddModelError("", "All fields are required"); } catch (Exception ex) { ModelState.AddModelError("", ex); } return(BadRequest(ModelState)); }
static void Main(string[] args) { Capturer capturer = new Capturer(); // create new screen capturer object capturer.CapturingType = CaptureAreaType.catWindow; // set capturing area type to Mouse type (to record from the given area near the mouse) capturer.OutputFileName = "GivenWindowCaptured.wmv"; // set output video filename to .WMV or .AVI file // set window to capture MessageBox.Show("Please run Internet Explorer (this sample will capture video from IE) and click OK"); // set window to capture to Internet Explorer to capture video from Internet Explorer capturer.WindowToCapture = "Internet Explorer"; // set output video width and height capturer.OutputWidth = 640; capturer.OutputHeight = 480; // WMV and WEBM output use WMVVideoBitrate property to control output video bitrate // so try to increase it by x2 or x3 times if you think the output video are you are getting is laggy // capturer.WMVVideoBitrate = capturer.WMVVideoBitrate * 2; // uncomment to set Bytescout Lossless Video format output video compression method // do not forget to set file to .avi format if you use Video Codec Name // capturer.CurrentVideoCodecName = "Bytescout Lossless"; // uncomment to enable recording of semitransparent or layered windows (Warning: may cause mouse cursor flickering) // capturer.CaptureTransparentControls = true; // set border around captured area if we are not capturing entire screen if ( capturer.CapturingType != CaptureAreaType.catScreen && capturer.CapturingType != CaptureAreaType.catWebcamFullScreen ) { // set border style capturer.CaptureAreaBorderType = CaptureAreaBorderType.cabtDashed; capturer.CaptureAreaBorderColor = (uint)ColorTranslator.ToOle(Color.Red); } capturer.Run(); // run screen video capturing // IMPORTANT: if you want to check for some code if need to stop the recording then make sure you are // using Thread.Sleep(1) inside the checking loop, so you have the loop like // Do { // Thread.Sleep(1) // } // While(StopButtonNotClicked); Console.WriteLine("Capturing entire screen for 15 seconds..."); Thread.Sleep(15000); // wait for 15 seconds capturer.Stop(); // stop video capturing // Release resources System.Runtime.InteropServices.Marshal.ReleaseComObject(capturer); capturer = null; Console.WriteLine("Done"); Process.Start("GivenWindowCaptured.wmv"); }
protected override void OnExit(ExitEventArgs e) { base.OnExit(e); Capturer.StopListening(); }
public SettingsForm() { InitializeComponent(); lblProductName.Text = Application.ProductName; lblProductVersion.Text = "Version " + Application.ProductVersion; _tempCapturer = new Capturer(); _tempCapturer.RegistrationName = "demo"; _tempCapturer.RegistrationKey = "demo"; cmbFPS.Items.AddRange(new object[] { 5f, 7.5f, 10f, 12f, 14.985f, 15f, 19.98f, 20f, 23.976f, 24f, 25f, 29.97f, 30f, 50f, 59.94f, 60 }); for (int i = 0; i < _tempCapturer.AudioDeviceCount; i++) { string line = _tempCapturer.GetAudioDeviceName(i); cmbAudioDevices.Items.Add(line); } for (int i = 0; i < cmbAudioDevices.Items.Count; i++) { if (cmbAudioDevices.Items[i].ToString() == Program.Cfg.AudioDevice) { cmbAudioDevices.SelectedIndex = i; break; } } if (cmbAudioDevices.SelectedIndex == -1) { cmbAudioDevices.SelectedItem = _tempCapturer.CurrentAudioDeviceName; Program.Cfg.AudioDevice = _tempCapturer.CurrentAudioDeviceName; } for (int i = 0; i < _tempCapturer.AudioCodecsCount; i++) { string codec = _tempCapturer.GetAudioCodecName(i); cmbAviAudioCodecs.Items.Add(codec); } for (int i = 0; i < cmbAviAudioCodecs.Items.Count; i++) { if (cmbAviAudioCodecs.Items[i].ToString() == Program.Cfg.AviAudioCodec) { cmbAviAudioCodecs.SelectedIndex = i; break; } } if (cmbAviAudioCodecs.SelectedIndex == -1) { cmbAviAudioCodecs.SelectedItem = _tempCapturer.CurrentAudioCodecName; Program.Cfg.AviAudioCodec = _tempCapturer.CurrentAudioCodecName; } if (_tempCapturer.WebCamCount > 0) { for (int i = 0; i < _tempCapturer.WebCamCount; i++) { string camera = _tempCapturer.GetWebCamName(i); cmbWebCameras.Items.Add(camera); } for (int i = 0; i < cmbWebCameras.Items.Count; i++) { if (cmbWebCameras.Items[i].ToString() == Program.Cfg.WebCameraDevice) { cmbWebCameras.SelectedIndex = i; break; } } if (cmbWebCameras.SelectedIndex == -1 && cmbWebCameras.Items.Count > 0) { cmbWebCameras.SelectedIndex = _tempCapturer.CurrentWebCam; Program.Cfg.WebCameraDevice = _tempCapturer.CurrentWebCamName; } } else { cmbWebCameras.Enabled = false; tbWebCameraHeight.Enabled = false; tbWebCameraWidth.Enabled = false; tbWebCameraX.Enabled = false; tbWebCameraY.Enabled = false; } for (int i = 0; i < _tempCapturer.VideoCodecsCount; i++) { string codec = _tempCapturer.GetVideoCodecName(i); cmbAviVideoCodecs.Items.Add(codec); } for (int i = 0; i < cmbAviVideoCodecs.Items.Count; i++) { if (cmbAviVideoCodecs.Items[i].ToString() == Program.Cfg.AviVideoCodec) { cmbAviVideoCodecs.SelectedIndex = i; break; } } if (cmbAviVideoCodecs.SelectedIndex == -1) { cmbAviVideoCodecs.SelectedItem = _tempCapturer.CurrentVideoCodecName; Program.Cfg.AviVideoCodec = _tempCapturer.CurrentVideoCodecName; } for (int i = 0; i < _tempCapturer.WMVAudioCodecsCount; i++) { string codec = _tempCapturer.GetWMVAudioCodecName(i); cmbWmvAudioCodecs.Items.Add(codec); } for (int i = 0; i < cmbWmvAudioCodecs.Items.Count; i++) { if (cmbWmvAudioCodecs.Items[i].ToString() == Program.Cfg.WmvAudioCodec) { cmbWmvAudioCodecs.SelectedIndex = i; break; } } if (cmbWmvAudioCodecs.SelectedIndex == -1) { cmbWmvAudioCodecs.SelectedItem = _tempCapturer.CurrentWMVAudioCodecName; Program.Cfg.WmvAudioCodec = _tempCapturer.CurrentWMVAudioCodecName; } for (int i = 0; i < _tempCapturer.WMVVideoCodecsCount; i++) { string codec = _tempCapturer.GetWMVVideoCodecName(i); cmbWmvVideoCodecs.Items.Add(codec); } for (int i = 0; i < cmbWmvVideoCodecs.Items.Count; i++) { if (cmbWmvVideoCodecs.Items[i].ToString() == Program.Cfg.WmvVideoCodec) { cmbWmvVideoCodecs.SelectedIndex = i; break; } } if (cmbWmvVideoCodecs.SelectedIndex == -1) { cmbWmvVideoCodecs.SelectedItem = _tempCapturer.CurrentWMVVideoCodecName; Program.Cfg.WmvVideoCodec = _tempCapturer.CurrentWMVVideoCodecName; } cbEnableAudio.Checked = Program.Cfg.EnableAudio; cbResizeVideo.Checked = Program.Cfg.ResizeOutputVideo; tbWidth.Text = Program.Cfg.OutputWidth.ToString(); tbHeight.Text = Program.Cfg.OutputHeight.ToString(); cbKeepAspectRatio.Checked = Program.Cfg.KeepAspectRatio; cmbFPS.Text = Program.Cfg.FPS.ToString(); cbLog.Checked = Program.Cfg.WriteLog; tbWebCameraX.Text = Program.Cfg.WebCameraWindowX.ToString(); tbWebCameraY.Text = Program.Cfg.WebCameraWindowY.ToString(); tbWebCameraWidth.Text = Program.Cfg.WebCameraWindowWidth.ToString(); tbWebCameraHeight.Text = Program.Cfg.WebCameraWindowHeight.ToString(); cmbAudioLines.Enabled = cbEnableAudio.Checked; tbWidth.Enabled = cbResizeVideo.Checked; tbHeight.Enabled = cbResizeVideo.Checked; cbKeepAspectRatio.Enabled = cbResizeVideo.Checked; tabControl2.SelectedIndex = Program.Cfg.SelectedVideoCodecTab; cbCaptureMouseCursor.Checked = Program.Cfg.CaptureMouseCursor; cbShowMouseHotSpot.Checked = Program.Cfg.ShowMouseHotSpot; tbMouseAnimationDuration.Text = Program.Cfg.MouseAnimationDuration.ToString(CultureInfo.InvariantCulture); tbHotSpotRadius.Text = Program.Cfg.MouseSpotRadius.ToString(CultureInfo.InvariantCulture); ccMouseHotSpotColor.ForeColor = Program.Cfg.MouseHotSpotColor; cbAnimateMouseClicks.Checked = Program.Cfg.AnimateMouseClicks; cbAnimateMouseButtons.Checked = Program.Cfg.AnimateMouseButtons; ccMouseCursorLeftClickAnimationColor.ForeColor = Program.Cfg.MouseCursorLeftClickAnimationColor; ccMouseCursorRightClickAnimationColor.ForeColor = Program.Cfg.MouseCursorRightClickAnimationColor; cmbFrameType.Items.Add("None"); cmbFrameType.Items.Add("Solid"); cmbFrameType.Items.Add("Dashed"); cmbFrameType.Items.Add("Dotted"); cmbFrameType.SelectedIndex = (int)Program.Cfg.CaptureAreaBorderType; ccFrameColor.ForeColor = Program.Cfg.CaptureAreaBorderColor; tbFrameWidth.Text = Program.Cfg.CaptureAreaBorderWidth.ToString(CultureInfo.InvariantCulture); }
private void Stop() { Capturer?.StopRecording(); SendTimer?.Reset(); }
// create the database public Database( string databaseDirPath, Recognizer recognizer, Capturer capturer, float distanceThreshold) { vwElements = new List <VideoWorker.DatabaseElement>(); samples = new List <RawSample>(); thumbnails = new List <OpenCvSharp.Mat>(); names = new List <string>(); // check paths MAssert.Check(Directory.Exists(databaseDirPath), "database not found"); // get directory content List <string> path_l1 = new List <string>(Directory.EnumerateDirectories(databaseDirPath)); // check every element in that directory ulong element_id_counter = 0; for (int il1 = 0; il1 < path_l1.Count; ++il1) { // ignore files if (!Directory.Exists(path_l1[il1])) { continue; } // so path_l1[il1] is supposed to be the path to the person directory // get files inside i List <string> path_l2 = new List <string>(Directory.EnumerateFiles(path_l1[il1])); string name = string.Empty; // search for the name.txt file for (int il2 = 0; il2 < path_l2.Count; ++il2) { if (Path.GetFileName(path_l2[il2]) == "name.txt") { // put file content in the name using (StreamReader sr = new StreamReader(path_l2[il2])) { name = sr.ReadToEnd(); } } } // try to open each file as an image for (int il2 = 0; il2 < path_l2.Count; ++il2) { if (Path.GetFileName(path_l2[il2]) == "name.txt") { continue; } Console.WriteLine("processing '{0}' name: '{1}'", path_l2[il2], name); // read image with opencv OpenCvSharp.Mat readed_image = OpenCvSharp.Cv2.ImRead(path_l2[il2]); if (readed_image.Empty() || readed_image.Type() != OpenCvSharp.MatType.CV_8UC3) { Console.WriteLine("\n\nWARNING: can't read image '{0}'\n\n", path_l2[il2]); continue; } byte[] data = new byte[readed_image.Total() * readed_image.Type().Channels]; Marshal.Copy(readed_image.DataStart, data, 0, (int)data.Length); RawImage image = new RawImage(readed_image.Width, readed_image.Height, RawImage.Format.FORMAT_BGR, data); // capture the face List <RawSample> capturedSamples = capturer.capture(image); if (capturedSamples.Count != 1) { Console.WriteLine("\n\nWARNING: detected {0} faces on '{1}' image instead of one, image ignored \n\n", capturedSamples.Count, path_l2[il2]); continue; } RawSample sample = capturedSamples[0]; // make template Template templ = recognizer.processing(sample); // prepare data for VideoWorker VideoWorker.DatabaseElement vwElement = new VideoWorker.DatabaseElement(element_id_counter++, (ulong)il1, templ, distanceThreshold); vwElements.Add(vwElement); samples.Add(sample); thumbnails.Add(makeThumbnail(sample, name)); names.Add(name); } } MAssert.Check((int)element_id_counter == vwElements.Count); MAssert.Check((int)element_id_counter == samples.Count); MAssert.Check((int)element_id_counter == thumbnails.Count); MAssert.Check((int)element_id_counter == names.Count); }
public virtual void CapturerSnapshot() { Rectangle bounds = Screen.PrimaryScreen.Bounds; Capturer.SnapshotWithMessageBox(this.Name + "_" + DateTime.Now.ToString("yyyy-MM-ddTHH_mm_ss") + DateTime.Now.TimeOfDay.TotalMilliseconds.ToString() + ".jpg", bounds); }
public override void ExecuteInternal() { if (!this.CanRunOnPlatform()) { return; } _ = DatabaseManager.Transaction; Parallel.ForEach(Hives, (hive => { Log.Debug("Starting " + hive.ToString()); if (!Filter.IsFiltered(Helpers.GetPlatformString(), "Scan", "Registry", "Hive", "Include", hive.ToString()) && Filter.IsFiltered(Helpers.GetPlatformString(), "Scan", "Registry", "Hive", "Exclude", hive.ToString(), out Regex Capturer)) { Log.Debug("{0} '{1}' {2} '{3}'.", Strings.Get("ExcludingHive"), hive.ToString(), Strings.Get("DueToFilter"), Capturer.ToString()); return; } Filter.IsFiltered(Helpers.GetPlatformString(), "Scan", "Registry", "Key", "Exclude", hive.ToString()); var registryInfoEnumerable = RegistryWalker.WalkHive(hive); try { Parallel.ForEach(registryInfoEnumerable, (registryObject => { try { DatabaseManager.Write(registryObject, runId); } catch (InvalidOperationException e) { Logger.DebugException(e); Log.Debug(JsonConvert.SerializeObject(registryObject) + " invalid op exept"); } })); } catch (Exception e) { Logger.DebugException(e); Telemetry.TrackTrace(Microsoft.ApplicationInsights.DataContracts.SeverityLevel.Error, e); } })); DatabaseManager.Commit(); }
public ArgumentCaptor() { this.capturer = new Capturer(this); }
public override void Execute() { Start(); Log.Information(JsonConvert.SerializeObject(DefaultHives)); if (!this.CanRunOnPlatform()) { return; } Truncate(this.runId); Parallel.ForEach(Hives, (hive => { Log.Debug("Starting " + hive.ToString()); if (Filter.IsFiltered(Helpers.RuntimeString(), "Scan", "Registry", "Hive", "Include", hive.ToString())) { } else if (Filter.IsFiltered(Helpers.RuntimeString(), "Scan", "Registry", "Hive", "Exclude", hive.ToString(), out Regex Capturer)) { Log.Information("{0} '{1}' {2} '{3}'.", Strings.Get("ExcludingHive"), hive.ToString(), Strings.Get("DueToFilter"), Capturer.ToString()); return; } var registryInfoEnumerable = RegistryWalker.WalkHive(hive); try { Parallel.ForEach(registryInfoEnumerable, (registryObject => { try { Write(registryObject); } // Some registry keys don't get along catch (InvalidOperationException e) { Log.Debug(registryObject.Key + " " + e.GetType()); } })); } catch (Exception e) { Log.Debug(e.GetType().ToString()); Log.Debug(e.Message); Log.Debug(e.StackTrace); Telemetry.TrackTrace(Microsoft.ApplicationInsights.DataContracts.SeverityLevel.Error, e); } })); DatabaseManager.Commit(); Stop(); }
static void Main(string[] args) { try { Capturer capturer = new Capturer(); // create new screen capturer object capturer.CapturingType = CaptureAreaType.catScreen; // set capturing area type to catScreen to capture whole screen capturer.OutputFileName = "EntireScreenCaptured.wmv"; // set output video filename to .WVM or .AVI filename // set output video width and height capturer.OutputWidth = 640; capturer.OutputHeight = 480; // set border around captured area if we are not capturing entire screen if ( capturer.CapturingType != CaptureAreaType.catScreen && capturer.CapturingType != CaptureAreaType.catWebcamFullScreen ) { // set border style capturer.CaptureAreaBorderType = CaptureAreaBorderType.cabtDashed; } // uncomment to set Bytescout Lossless Video format output video compression method //do not forget to set file to .avi format if you use Video Codec Name //capturer.CurrentVideoCodecName = "Bytescout Lossless"; // uncomment to enable recording of semitransparent or layered windows (Warning: may cause mouse cursor flickering) // capturer.CaptureTransparentControls = true; capturer.Run(); // run screen video capturing // IMPORTANT: if you want to check for some code if need to stop the recording then make sure you are // using Thread.Sleep(1) inside the checking loop, so you have the loop like // Do { // Thread.Sleep(1) // } // While(StopButtonNotClicked); Console.WriteLine("Capturing entire screen for 15 seconds..."); Thread.Sleep(15000); // wait for 15 seconds capturer.Stop(); // stop video capturing // Release resources System.Runtime.InteropServices.Marshal.ReleaseComObject(capturer); capturer = null; Console.WriteLine("Done"); Process.Start("EntireScreenCaptured.wmv"); } finally { Console.WriteLine("Press any key to exit"); Console.ReadKey(); } }
protected override void OnStartup(StartupEventArgs e) { base.OnStartup(e); Capturer.StartListening(); }
public void Dispose() { RtcSession?.Dispose(); Capturer?.Dispose(); }
public static void ThreadProc(Object obj) { CapturingThreadData data = (CapturingThreadData)obj; data.Success = true; // Prepare Capturer: Capturer capturer = new Capturer(); // create new screen capturer object capturer.RegistrationName = "demo"; capturer.RegistrationKey = "demo"; capturer.CaptureRectLeft = data.CaptureRectangle.Left; capturer.CaptureRectTop = data.CaptureRectangle.Top; capturer.CaptureRectWidth = data.CaptureRectangle.Width; capturer.CaptureRectHeight = data.CaptureRectangle.Height; capturer.OutputWidth = 640; capturer.OutputHeight = 480; // WMV and WEBM output use WMVVideoBitrate property to control output video bitrate // so try to increase it by x2 or x3 times if you think the output video are you are getting is laggy // capturer.WMVVideoBitrate = capturer.WMVVideoBitrate * 2; capturer.CaptureRectWidth = 320; capturer.CaptureRectHeight = 240; data.TempFile = Path.GetTempFileName(); data.TempFile = Path.ChangeExtension(data.TempFile, ".wmv"); capturer.OutputFileName = data.TempFile; capturer.CapturingType = data.CaptureType; // set border around captured area if we are not capturing entire screen if (capturer.CapturingType != CaptureAreaType.catScreen && capturer.CapturingType != CaptureAreaType.catWebcamFullScreen) { capturer.CaptureAreaBorderType = CaptureAreaBorderType.cabtDashed; capturer.CaptureAreaBorderColor = (uint)ColorTranslator.ToOle(Color.Red); } // Wait for events: WaitHandle[] events = new WaitHandle[] { data.StartOrResumeEvent, data.PauseEvent, data.StopEvent }; try { while (true) { int i = WaitHandle.WaitAny(events); if (events[i] == data.StartOrResumeEvent) { if (!capturer.IsRunning) { capturer.Run(); } } else if (events[i] == data.PauseEvent) { if (capturer.IsRunning) { capturer.Pause(); } } else if (events[i] == data.StopEvent) { capturer.Stop(); break; } } } catch (Exception ex) { data.ErrorText = ex.Message; data.Success = false; } finally { // Release resources Marshal.ReleaseComObject(capturer); } }
public void TearDown() { _capturer = null; }
public static void Main(string[] args) { Capturer call1 = new Capturer(); call1.CaptureIt(); }
public MoveFinisher(Mover mover, Capturer capturer) { this.mover = mover; this.capturer = capturer; }
static void Main(string[] args) { Capturer capturer = new Capturer(); // create new screen capturer object capturer.CapturingType = CaptureAreaType.catScreen; // set capturing area type to catScreen to capture whole screen capturer.OutputFileName = "LowFPS.wmv"; // set output video filename to .WMV or .AVI file // set WMV video codec to Windows Media Video 9 Screen that gives best quality/size ratio when recording from screen capturer.CurrentWMVVideoCodecName = "Windows Media Video 9 Screen"; // disable audio so we will have a video only (and the lowest wmv file size as possible: about 20 kb per 1 second, about 70 mb per 1 hour capturer.AudioEnabled = false; // set FPS to 0.5 fps (1 frame per every 2 seconds) // you may also want to set to 1 fps (1 frame per 1 second or higher) capturer.FPS = 0.50f; // WMV and WEBM output use WMVVideoBitrate property to control output video bitrate // so try to increase it by x2 or x3 times if you think the output video are you are getting is laggy // capturer.WMVVideoBitrate = capturer.WMVVideoBitrate * 2; // set output video width and height capturer.OutputWidth = 640; capturer.OutputHeight = 480; // set the text overlay with autochanging time stamp macros to indicate playing time and current date time capturer.OverlayingRedTextCaption = "Recording: {RUNNINGMIN}:{RUNNINGSEC}:{RUNNINGMSEC} on {CURRENTYEAR}-{CURRENTMONTH}-{CURRENTDAY} at {CURRENTHOUR}:{CURRENTMIN}:{CURRENTSEC}:{CURRENTMSEC}"; // uncomment to enable recording of semitransparent or layered windows (Warning: may cause mouse cursor flickering) // capturer.CaptureTransparentControls = true; // set border around captured area if we are not capturing entire screen if ( capturer.CapturingType != CaptureAreaType.catScreen && capturer.CapturingType != CaptureAreaType.catWebcamFullScreen ) { // set border style capturer.CaptureAreaBorderType = CaptureAreaBorderType.cabtDashed; capturer.CaptureAreaBorderColor = (uint)ColorTranslator.ToOle(Color.Red); } capturer.Run(); // run screen video capturing // IMPORTANT: if you want to check for some code if need to stop the recording then make sure you are // using Thread.Sleep(1) inside the checking loop, so you have the loop like // Do { // Thread.Sleep(1) // } // While(StopButtonNotClicked); Console.WriteLine("Capturing entire screen for 5 seconds..."); Thread.Sleep(5000); // wait for 5 seconds capturer.Stop(); // stop video capturing // Release resources System.Runtime.InteropServices.Marshal.ReleaseComObject(capturer); capturer = null; Console.WriteLine("Done"); Process.Start("LowFPS.wmv"); }
static int Main(string[] args) { try { // print usage Console.WriteLine("Usage: dotnet csharp_video_recognition_demo.dll [OPTIONS] <video_source>..."); Console.WriteLine("Examples:"); Console.WriteLine(" Webcam: dotnet csharp_video_recognition_demo.dll --config_dir ../../../conf/facerec 0"); Console.WriteLine(" RTSP stream: dotnet csharp_video_recognition_demo.dll --config_dir ../../../conf/facerec rtsp://localhost:8554/"); Console.WriteLine(""); // parse arguments bool error = false; Options options = new Options(); CommandLine.Parser.Default.ParseArguments <Options>(args) .WithParsed <Options>(opts => options = opts) .WithNotParsed <Options>(errs => error = true); // exit if argument parsign error if (error) { return(1); } // print values of arguments Console.WriteLine("Arguments:"); foreach (var opt in options.GetType().GetProperties()) { if (opt.Name == "video_sources") { Console.Write("video sources = "); foreach (string vs in options.video_sources) { Console.Write(vs + " "); } Console.WriteLine(); } else { Console.WriteLine("--{0} = {1}", opt.Name, opt.GetValue(options, null)); } } Console.WriteLine("\n"); //parameters parse string config_dir = options.config_dir; string license_dir = options.license_dir; string database_dir = options.database_dir; string method_config = options.method_config; float recognition_distance_threshold = options.recognition_distance_threshold; float frame_fps_limit = options.frame_fps_limit; List <string> video_sources = new List <string>(options.video_sources); // check params MAssert.Check(config_dir != string.Empty, "Error! config_dir is empty."); MAssert.Check(database_dir != string.Empty, "Error! database_dir is empty."); MAssert.Check(method_config != string.Empty, "Error! method_config is empty."); MAssert.Check(recognition_distance_threshold > 0, "Error! Failed recognition distance threshold."); List <ImageAndDepthSource> sources = new List <ImageAndDepthSource>(); List <string> sources_names = new List <string>(); MAssert.Check(video_sources.Count > 0, "Error! video_sources is empty."); for (int i = 0; i < video_sources.Count; i++) { sources_names.Add(string.Format("OpenCvS source {0}", i)); sources.Add(new OpencvSource(video_sources[i])); } MAssert.Check(sources_names.Count == sources.Count); // print sources Console.WriteLine("\n{0} sources: ", sources.Count); for (int i = 0; i < sources_names.Count; ++i) { Console.WriteLine(" {0}", sources_names[i]); } Console.WriteLine(""); // create facerec servcie FacerecService service = FacerecService.createService( config_dir, license_dir); Console.WriteLine("Library version: {0}\n", service.getVersion()); // create database Recognizer recognizer = service.createRecognizer(method_config, true, false, false); Capturer capturer = service.createCapturer("common_capturer4_lbf_singleface.xml"); Database database = new Database( database_dir, recognizer, capturer, recognition_distance_threshold); recognizer.Dispose(); capturer.Dispose(); FacerecService.Config vw_config = new FacerecService.Config("video_worker_fdatracker_blf_fda.xml"); // vw_config.overrideParameter("single_match_mode", 1); vw_config.overrideParameter("search_k", 10); vw_config.overrideParameter("not_found_match_found_callback", 1); vw_config.overrideParameter("downscale_rawsamples_to_preferred_size", 0); //ActiveLiveness.CheckType[] checks = new ActiveLiveness.CheckType[3] //{ // ActiveLiveness.CheckType.BLINK, // ActiveLiveness.CheckType.TURN_RIGHT, // ActiveLiveness.CheckType.SMILE //}; // create one VideoWorker VideoWorker video_worker = service.createVideoWorker( new VideoWorker.Params() .video_worker_config(vw_config) .recognizer_ini_file(method_config) .streams_count(sources.Count) //.age_gender_estimation_threads_count(sources.Count) //.emotions_estimation_threads_count(sources.Count) //.active_liveness_checks_order(checks) .processing_threads_count(sources.Count) .matching_threads_count(sources.Count)); // set database video_worker.setDatabase(database.vwElements, Recognizer.SearchAccelerationType.SEARCH_ACCELERATION_1); for (int i = 0; i < sources_names.Count; ++i) { OpenCvSharp.Window window = new OpenCvSharp.Window(sources_names[i]); OpenCvSharp.Cv2.ImShow(sources_names[i], new OpenCvSharp.Mat(100, 100, OpenCvSharp.MatType.CV_8UC3, OpenCvSharp.Scalar.All(0))); } // prepare buffers for store drawed results Mutex draw_images_mutex = new Mutex(); List <OpenCvSharp.Mat> draw_images = new List <OpenCvSharp.Mat>(sources.Count); // create one worker per one source List <Worker> workers = new List <Worker>(); for (int i = 0; i < sources.Count; ++i) { draw_images.Add(new OpenCvSharp.Mat(100, 100, OpenCvSharp.MatType.CV_8UC3, OpenCvSharp.Scalar.All(0))); workers.Add(new Worker( database, video_worker, sources[i], i, // stream_id draw_images_mutex, draw_images[i], frame_fps_limit )); } // draw results until escape presssed for (;;) { { draw_images_mutex.WaitOne(); for (int i = 0; i < draw_images.Count; ++i) { OpenCvSharp.Mat drawed_im = workers[i]._draw_image; if (!drawed_im.Empty()) { OpenCvSharp.Cv2.ImShow(sources_names[i], drawed_im); draw_images[i] = new OpenCvSharp.Mat(); } } draw_images_mutex.ReleaseMutex(); } int key = OpenCvSharp.Cv2.WaitKey(20); if (27 == key) { foreach (Worker w in workers) { w.Dispose(); } break; } if (' ' == key) { Console.WriteLine("enable processing 0"); video_worker.enableProcessingOnStream(0); } if (13 == key) { Console.WriteLine("disable processing 0"); video_worker.disableProcessingOnStream(0); } if ('r' == key) { Console.WriteLine("reset trackerOnStream"); video_worker.resetTrackerOnStream(0); } // check exceptions in callbacks video_worker.checkExceptions(); } // force free resources // otherwise licence error may occur // when create sdk object in next time service.Dispose(); video_worker.Dispose(); } catch (Exception e) { Console.WriteLine("video_recognition_show exception catched: '{0}'", e.ToString()); return(1); } return(0); }
static void Main(string[] args) { Capturer capturer = new Capturer(); // create new screen capturer object capturer.CapturingType = CaptureAreaType.catScreen; // set capturing area type to catScreen to capture whole screen capturer.OutputFileName = "EntireScreenCaptured.wmv"; // set output video filename to .WMV or .AVI file // set output video width and height capturer.OutputWidth = 640; capturer.OutputHeight = 480; // set the text overlay with autochanging time stamp macros to indicate playing time and current date time capturer.OverlayingRedTextCaption = "Recording: {RUNNINGMIN}:{RUNNINGSEC}:{RUNNINGMSEC} on {CURRENTYEAR}-{CURRENTMONTH}-{CURRENTDAY} at {CURRENTHOUR}:{CURRENTMIN}:{CURRENTSEC}:{CURRENTMSEC}"; // uncomment to enable recording of semitransparent or layered windows (Warning: may cause mouse cursor flickering) // capturer.CaptureTransparentControls = true; // WMV and WEBM output use WMVVideoBitrate property to control output video bitrate // so try to increase it by x2 or x3 times if you think the output video are you are getting is laggy // capturer.WMVVideoBitrate = capturer.WMVVideoBitrate * 2; // set border around captured area if we are not capturing entire screen if ( capturer.CapturingType != CaptureAreaType.catScreen && capturer.CapturingType != CaptureAreaType.catWebcamFullScreen ) { // set border style capturer.CaptureAreaBorderType = CaptureAreaBorderType.cabtDashed; capturer.CaptureAreaBorderColor = (uint)ColorTranslator.ToOle(Color.Red); } // uncomment to set Bytescout Lossless Video format output video compression method // do not forget to set file to .avi format if you use Video Codec Name // capturer.CurrentVideoCodecName = "Bytescout Lossless"; capturer.Run(); // run screen video capturing // IMPORTANT: if you want to check for some code if need to stop the recording then make sure you are // using Thread.Sleep(1) inside the checking loop, so you have the loop like // Do { // Thread.Sleep(1) // } // While(StopButtonNotClicked); Console.WriteLine("Capturing entire screen for 20 seconds..."); Thread.Sleep(15000); // wait for 15 seconds capturer.Stop(); // stop video capturing // Release resources System.Runtime.InteropServices.Marshal.ReleaseComObject(capturer); capturer = null; Console.WriteLine("Done"); Process.Start("EntireScreenCaptured.wmv"); }
public static void ThreadProc(Object obj) { Capturer capturer = new Capturer(); // create new screen capturer object CapturingThreadData data = (CapturingThreadData)obj; if (Program.Cfg.WriteLog) { capturer.SetLogFile(Path.GetTempPath() + Application.ProductName + " log.txt"); } capturer.RegistrationName = "demo"; capturer.RegistrationKey = "demo"; if (Program.Cfg.AudioDevice != "") { capturer.CurrentAudioDeviceName = Program.Cfg.AudioDevice; } if (Program.Cfg.AudioLine != "") { capturer.CurrentAudioDeviceLineName = Program.Cfg.AudioLine; } if (Program.Cfg.SelectedVideoCodecTab == 0) { capturer.CurrentWMVAudioCodecName = Program.Cfg.WmvAudioCodec; capturer.CurrentWMVAudioFormat = Program.Cfg.WmvAudioFormat; capturer.CurrentWMVVideoCodecName = Program.Cfg.WmvVideoCodec; Program.Cfg.WmvAudioCodec = capturer.CurrentWMVAudioCodecName; Program.Cfg.WmvAudioFormat = capturer.CurrentWMVAudioFormat; Program.Cfg.WmvVideoCodec = capturer.CurrentWMVVideoCodecName; } else { capturer.CurrentAudioCodecName = Program.Cfg.AviAudioCodec; capturer.CurrentVideoCodecName = Program.Cfg.AviVideoCodec; } capturer.AudioEnabled = Program.Cfg.EnableAudio; // this option tells to use captured area dimensions as output video width/height // or use user defined video dimensions capturer.MatchOutputSizeToTheSourceSize = !Program.Cfg.ResizeOutputVideo; capturer.FPS = Program.Cfg.FPS; capturer.ShowMouseHotSpot = Program.Cfg.ShowMouseHotSpot; capturer.CaptureMouseCursor = Program.Cfg.CaptureMouseCursor; capturer.AnimateMouseClicks = Program.Cfg.AnimateMouseClicks; capturer.AnimateMouseButtons = Program.Cfg.AnimateMouseButtons; capturer.MouseAnimationDuration = Program.Cfg.MouseAnimationDuration; capturer.MouseSpotRadius = Program.Cfg.MouseSpotRadius; capturer.MouseHotSpotColor = (uint)ColorTranslator.ToOle(Program.Cfg.MouseHotSpotColor); capturer.MouseCursorLeftClickAnimationColor = (uint)ColorTranslator.ToOle(Program.Cfg.MouseCursorLeftClickAnimationColor); capturer.MouseCursorRightClickAnimationColor = (uint)ColorTranslator.ToOle(Program.Cfg.MouseCursorRightClickAnimationColor); capturer.CaptureRectLeft = data.CaptureRectangle.Left; capturer.CaptureRectTop = data.CaptureRectangle.Top; capturer.CaptureRectWidth = data.CaptureRectangle.Width; capturer.CaptureRectHeight = data.CaptureRectangle.Height; capturer.KeepAspectRatio = Program.Cfg.KeepAspectRatio; // show recording time stamp capturer.OverlayingRedTextCaption = "Recording: {RUNNINGMIN}:{RUNNINGSEC}:{RUNNINGMSEC} on {CURRENTYEAR}-{CURRENTMONTH}-{CURRENTDAY} at {CURRENTHOUR}:{CURRENTMIN}:{CURRENTSEC}:{CURRENTMSEC}"; capturer.OutputWidth = Program.Cfg.OutputWidth; capturer.OutputHeight = Program.Cfg.OutputHeight; if ((capturer.WebCamCount > 0) && (data.ShowWebCamStream)) { capturer.AddWebCamVideo = true; if (!String.IsNullOrEmpty(Program.Cfg.WebCameraDevice)) { capturer.CurrentWebCamName = Program.Cfg.WebCameraDevice; } capturer.SetWebCamVideoRectangle(Program.Cfg.WebCameraWindowX, Program.Cfg.WebCameraWindowY, Program.Cfg.WebCameraWindowWidth, Program.Cfg.WebCameraWindowHeight); } data.TempFile = Path.GetTempFileName(); data.TempFile = Path.ChangeExtension(data.TempFile, (Program.Cfg.SelectedVideoCodecTab == 0) ? ".wmv" : ".avi"); capturer.OutputFileName = data.TempFile; capturer.CapturingType = data.CaptureType; // set border around captured area if we are not capturing entire screen if (capturer.CapturingType != CaptureAreaType.catScreen && capturer.CapturingType != CaptureAreaType.catWebcamFullScreen) { // set border style capturer.CaptureAreaBorderType = Program.Cfg.CaptureAreaBorderType; capturer.CaptureAreaBorderColor = (uint)ColorTranslator.ToOle(Program.Cfg.CaptureAreaBorderColor); capturer.CaptureAreaBorderWidth = Program.Cfg.CaptureAreaBorderWidth; } try { capturer.Run(); // IMPORTANT: if you want to check for some code if need to stop the recording then make sure you are // using Thread.Sleep(1) inside the checking loop, so you have the loop like // Do // Thread.Sleep(1) // While StopButtonNotClicked } catch (COMException ex) { data.ErrorText = ex.Message; data.Result = 1; Marshal.ReleaseComObject(capturer); return; } try { Thread.Sleep(Timeout.Infinite); } catch (ThreadInterruptedException) { capturer.Stop(); data.Result = 0; } catch (Exception ex) { data.ErrorText = ex.Message; data.Result = 1; } finally { Marshal.ReleaseComObject(capturer); } }
private void MainWindow_Closing(object sender, System.ComponentModel.CancelEventArgs e) { capturer.Dispose(); capturer = null; }
static void Main(string[] args) { Capturer capturer = new Capturer(); // create new screen capturer object capturer.CapturingType = CaptureAreaType.catScreen; // set capturing area type to catScreen to capture whole screen capturer.OutputFileName = "EntireScreenCaptured.wmv"; // set output video filename to .WMV or .AVI file // set output video width and height capturer.OutputWidth = 640; capturer.OutputHeight = 480; // uncomment to enable recording of semitransparent or layered windows (Warning: may cause mouse cursor flickering) // capturer.CaptureTransparentControls = true; // WMV and WEBM output use WMVVideoBitrate property to control output video bitrate // so try to increase it by x2 or x3 times if you think the output video are you are getting is laggy // capturer.WMVVideoBitrate = capturer.WMVVideoBitrate * 2; // set border around captured area if we are not capturing entire screen if ( capturer.CapturingType != CaptureAreaType.catScreen && capturer.CapturingType != CaptureAreaType.catWebcamFullScreen ) { // set border style capturer.CaptureAreaBorderType = CaptureAreaBorderType.cabtDashed; capturer.CaptureAreaBorderColor = (uint)ColorTranslator.ToOle(Color.Red); } // uncomment to set Bytescout Lossless Video format output video compression method // do not forget to set file to .avi format if you use Video Codec Name // capturer.CurrentVideoCodecName = "Bytescout Lossless"; capturer.Run(); // run screen video capturing // IMPORTANT: if you want to check for some code if need to stop the recording then make sure you are // using Thread.Sleep(1) inside the checking loop, so you have the loop like // Do { // Thread.Sleep(1) // } // While(StopButtonNotClicked); // wait for 1 second (1000 msec) Thread.Sleep(1000); Console.WriteLine("Adding blackout regions after 1 sec of recording..."); capturer.BlackoutAddArea(20, 100, 500, 80); capturer.BlackoutAddArea(10, 200, 550, 80); capturer.BlackoutAddArea(20, 400, 550, 120); // wait for 15 seconds (15000 msec) Thread.Sleep(15000); Console.WriteLine("Remove blackout regions..."); // reset all blackouts capturer.BlackoutReset(); // wait for 9 seconds more Thread.Sleep(9000); capturer.Stop(); // stop video capturing // Release resources System.Runtime.InteropServices.Marshal.ReleaseComObject(capturer); capturer = null; Console.WriteLine("Done"); Process.Start("EntireScreenCaptured.wmv"); }
static void Main(string[] args) { Capturer capturer = new Capturer(); // create new screen capturer object if (args.Length < 1) { usage(capturer); return; } capturer.OutputFileName = args[0]; capturer.CapturingType = CaptureAreaType.catRegion; setParams(args, capturer); // set border around captured area if we are not capturing entire screen if ( capturer.CapturingType != CaptureAreaType.catScreen && capturer.CapturingType != CaptureAreaType.catWebcamFullScreen ) { // set border style capturer.CaptureAreaBorderType = CaptureAreaBorderType.cabtDashed; capturer.CaptureAreaBorderColor = (uint)ColorTranslator.ToOle(Color.Red); } // uncomment to enable recording of semitransparent or layered windows (Warning: may cause mouse cursor flickering) // capturer.CaptureTransparentControls = true; // WMV and WEBM output use WMVVideoBitrate property to control output video bitrate // so try to increase it by x2 or x3 times if you think the output video are you are getting is laggy // capturer.WMVVideoBitrate = capturer.WMVVideoBitrate * 2; try { capturer.Run(); // IMPORTANT: if you want to check for some code if need to stop the recording then make sure you are // using Thread.Sleep(1) inside the checking loop, so you have the loop like // Do { // Thread.Sleep(1) // } // While(StopButtonNotClicked); Console.WriteLine("Starting capture - Hit a key to stop ..."); string s = capturer.CurrentVideoCodecName; Console.WriteLine(string.Format("Using video compressor - {0}", s)); s = capturer.CurrentAudioCodecName; Console.WriteLine(string.Format("Using audio compressor - {0}", s)); s = capturer.CurrentAudioDeviceLineName; Console.WriteLine(string.Format("Using audio input line - {0}", s)); int i = 0; string spin = "|/-\\"; while (Win32Interop._kbhit() == 0) { Console.Write(string.Format("\rEncoding {0}", spin[i++])); i %= 4; Thread.Sleep(50); } capturer.Stop(); // Release resources System.Runtime.InteropServices.Marshal.ReleaseComObject(capturer); capturer = null; Console.Write("\nDone"); Console.Read(); } catch (Exception) { Console.WriteLine(capturer.LastError); } }