public void CaptureScreenshot(CaptureType captureType, TaskSettings taskSettings = null, bool autoHideForm = true) { if (taskSettings == null) taskSettings = TaskSettings.GetDefaultTaskSettings(); switch (captureType) { case CaptureType.Screen: DoCapture(Screenshot.CaptureFullscreen, CaptureType.Screen, taskSettings, autoHideForm); break; case CaptureType.ActiveWindow: CaptureActiveWindow(taskSettings, autoHideForm); break; case CaptureType.ActiveMonitor: DoCapture(Screenshot.CaptureActiveMonitor, CaptureType.ActiveMonitor, taskSettings, autoHideForm); break; case CaptureType.Rectangle: case CaptureType.RectangleWindow: case CaptureType.RoundedRectangle: case CaptureType.Ellipse: case CaptureType.Triangle: case CaptureType.Diamond: case CaptureType.Polygon: case CaptureType.Freehand: CaptureRegion(captureType, taskSettings, autoHideForm); break; case CaptureType.LastRegion: CaptureLastRegion(taskSettings, autoHideForm); break; } }
public CaptureStrategy GetStrategy(CaptureType captureType, MainForm mainForm) { CaptureStrategy strategy = null; switch (captureType) { case CaptureType.Screen: strategy = new CaptureScreen(mainForm); break; case CaptureType.ActiveWindow: strategy = new CaptureActiveWindow(mainForm); break; case CaptureType.ActiveMonitor: strategy = new CaptureActiveMonitor(mainForm); break; case CaptureType.Rectangle: case CaptureType.RectangleWindow: case CaptureType.Polygon: case CaptureType.Freehand: strategy = new CaptureRegion(mainForm); break; case CaptureType.CustomRegion: strategy = new CaptureCustomRegion(mainForm); break; case CaptureType.LastRegion: strategy = new CaptureLastRegion(mainForm); break; } return strategy; }
private void Button_Click_Capture(object sender, RoutedEventArgs e) { HideWnd(); mTargetTextWindow.HideWnd(); mCapType = CaptureType.button; this.Dispatcher.Invoke(() => DoCaptureOCR()); }
private void ParseHole(CaptureType type) { int start = _position; string name = ParseName(out var position); int alignment = 0; string format = null; if (Peek() != '}') { alignment = Peek() == ',' ? ParseAlignment() : 0; format = Peek() == ':' ? ParseFormat() : null; Skip('}'); } else { _position++; } int literalSkip = _position - start + (type == CaptureType.Normal ? 1 : 2); // Account for skipped '{', '{$' or '{@' _current = new LiteralHole(new Literal { Print = _literalLength, Skip = literalSkip }, new Hole( name, format, type, (short)position, (short)alignment )); _literalLength = 0; }
/// <summary> /// Constructs a single message template parameter /// </summary> /// <param name="name">Parameter Name</param> /// <param name="value">Parameter Value</param> /// <param name="format">Parameter Format</param> /// <param name="captureType">Parameter CaptureType</param> public MessageTemplateParameter([NotNull] string name, object value, string format, CaptureType captureType) { Name = name ?? throw new ArgumentNullException(nameof(name)); Value = value; Format = format; CaptureType = captureType; }
public static void CaptureScreenshot(CaptureType captureType, TaskSettings taskSettings = null, bool autoHideForm = true) { if (taskSettings == null) taskSettings = TaskSettings.GetDefaultTaskSettings(); switch (captureType) { case CaptureType.Fullscreen: DoCapture(TaskHelpers.GetScreenshot(taskSettings).CaptureFullscreen, CaptureType.Fullscreen, taskSettings, autoHideForm); break; case CaptureType.ActiveWindow: CaptureActiveWindow(taskSettings, autoHideForm); break; case CaptureType.ActiveMonitor: DoCapture(TaskHelpers.GetScreenshot(taskSettings).CaptureActiveMonitor, CaptureType.ActiveMonitor, taskSettings, autoHideForm); break; case CaptureType.Region: CaptureRegion(taskSettings, autoHideForm); break; case CaptureType.CustomRegion: CaptureCustomRegion(taskSettings, autoHideForm); break; case CaptureType.LastRegion: CaptureLastRegion(taskSettings, autoHideForm); break; } }
private void InitializeConnection(IConnection connection, CaptureType captureType) { CaptureType = captureType; _remoteConnection = connection; connection.DataReceived += ConnectionOnDataReceived; }
private async Task InitializeRemoteComponents(CaptureType captureType, ImageCompressionType compressionType) { await ConnectionInfo.SendCommand(this, new[] { (byte)RemoteDesktopCommunication.Initialize, (byte)captureType, (byte)CurrentScreen.Number, (byte)ImageQuality, ShowCursor ? (byte)1 : (byte)0, (byte)compressionType }); /*ConnectionInfo.Sender.UnsafeSendCommand(ConnectionInfo.ClientInformation.Id, Identifier, * new WriterCall(new[] * { * (byte) RemoteDesktopCommunication.Initialize, (byte) captureType, (byte) CurrentScreen.Number, * (byte) ImageQuality, ShowCursor ? (byte) 1 : (byte) 0, (byte) compressionType * }));*/ // ConnectionInfo.UnsafeSendCommand(this, new WriterCall(new[] // { // (byte) RemoteDesktopCommunication.Initialize, (byte) captureType, (byte) CurrentScreen.Number, // (byte) ImageQuality, ShowCursor ? (byte) 1 : (byte) 0, (byte) compressionType // })); CaptureType = captureType; _currentlyStreamedMonitor = CurrentScreen.Number; _streamCodec?.Dispose(); _cursorStreamCodec?.Dispose(); _streamCodec = new UnsafeStreamCodec(GetImageCompression(compressionType), UnsafeStreamCodecParameters.DontDisposeImageCompressor); _cursorStreamCodec = new CursorStreamCodec(); }
public virtual Bitmap[] Capture(CaptureType typeOfCapture) { Bitmap memoryImage; int count = 1; try { Screen[] screens = Screen.AllScreens; Rectangle rc; switch (typeOfCapture) { case CaptureType.PrimaryScreen: rc = Screen.PrimaryScreen.Bounds; break; case CaptureType.VirtualScreen: rc = SystemInformation.VirtualScreen; break; case CaptureType.WorkingArea: rc = Screen.PrimaryScreen.WorkingArea; break; case CaptureType.AllScreens: count = screens.Length; typeOfCapture = CaptureType.WorkingArea; rc = screens[0].WorkingArea; break; default: rc = SystemInformation.VirtualScreen; break; } this.images = new Bitmap[count]; for (int index = 0; index < count; index++) { if (index > 0) { rc = screens[index].WorkingArea; } memoryImage = new Bitmap(rc.Width, rc.Height, PixelFormat.Format32bppArgb); using (Graphics memoryGrahics = Graphics.FromImage(memoryImage)) { memoryGrahics.CopyFromScreen(rc.X, rc.Y, 0, 0, rc.Size, CopyPixelOperation.SourceCopy); } this.images[index] = memoryImage; } } catch (Exception ex) { Log.Error("Screen capture failed.", ex); } return(this.images); }
private void DoCaptureWork(ScreenCaptureDelegate capture, CaptureType captureType, TaskSettings taskSettings, bool autoHideForm = true) { if (autoHideForm) { Hide(); Thread.Sleep(250); } Image img = null; try { Screenshot.CaptureCursor = taskSettings.CaptureSettings.ShowCursor; Screenshot.CaptureShadow = taskSettings.CaptureSettings.CaptureShadow; Screenshot.ShadowOffset = taskSettings.CaptureSettings.CaptureShadowOffset; Screenshot.CaptureClientArea = taskSettings.CaptureSettings.CaptureClientArea; Screenshot.AutoHideTaskbar = taskSettings.CaptureSettings.CaptureAutoHideTaskbar; img = capture(); } catch (Exception ex) { DebugHelper.WriteException(ex); } finally { if (autoHideForm) { this.ShowActivate(); } AfterCapture(img, captureType, taskSettings); } }
private void _miCaptureAreaOptions_Click(object sender, EventArgs e) { bool bTopMost = logWindow.TopMost; logWindow.TopMost = false; if (_captureType != CaptureType.None) { _captureType = CaptureType.None; _engine.StopCapture(); UpdateScreenCaptureItems(); } try { _areaOptions = _engine.ShowCaptureAreaOptionsDialog(this, ScreenCaptureDialogFlags.None, _areaOptions, false, null); } catch (Exception ex) { if (ex.Message != "UserAbort" && ex.Message != "User has aborted operation") { Messager.ShowError(this, ex); } } logWindow.TopMost = bTopMost; }
public void ParseHoleType(string input, CaptureType holeType) { var template = TemplateParser.Parse(input); Assert.Equal(1, template.Holes.Length); Assert.Equal(holeType, template.Holes[0].CaptureType); }
public override void Capture(CaptureType captureType, TaskSettings taskSettings, bool autoHideForm = true) { DoCapture(() => { Image img; string activeWindowTitle = NativeMethods.GetForegroundWindowText(); string activeProcessName = null; using (Process process = NativeMethods.GetForegroundWindowProcess()) { if (process != null) { activeProcessName = process.ProcessName; } } if (taskSettings.CaptureSettings.CaptureTransparent && !taskSettings.CaptureSettings.CaptureClientArea) { img = Screenshot.CaptureActiveWindowTransparent(); } else { img = Screenshot.CaptureActiveWindow(); } img.Tag = new ImageTag { ActiveWindowTitle = activeWindowTitle, ActiveProcessName = activeProcessName }; return img; }, CaptureType.ActiveWindow, taskSettings, autoHideForm); }
private static void DoCaptureWork(ScreenCaptureDelegate capture, CaptureType captureType, TaskSettings taskSettings, bool autoHideForm = true) { if (autoHideForm) { Program.MainForm.Hide(); Thread.Sleep(250); } Image img = null; try { img = capture(); } catch (Exception ex) { DebugHelper.WriteException(ex); } finally { if (autoHideForm) { Program.MainForm.ForceActivate(); } AfterCapture(img, captureType, taskSettings); } }
public static void CaptureScreenshot(CaptureType captureType, TaskSettings taskSettings = null, bool autoHideForm = true) { if (taskSettings == null) { taskSettings = TaskSettings.GetDefaultTaskSettings(); } switch (captureType) { case CaptureType.Fullscreen: DoCapture(TaskHelpers.GetScreenshot(taskSettings).CaptureFullscreen, CaptureType.Fullscreen, taskSettings, autoHideForm); break; case CaptureType.ActiveWindow: CaptureActiveWindow(taskSettings, autoHideForm); break; case CaptureType.ActiveMonitor: DoCapture(TaskHelpers.GetScreenshot(taskSettings).CaptureActiveMonitor, CaptureType.ActiveMonitor, taskSettings, autoHideForm); break; case CaptureType.Region: CaptureRegion(taskSettings, autoHideForm); break; case CaptureType.CustomRegion: CaptureCustomRegion(taskSettings, autoHideForm); break; case CaptureType.LastRegion: CaptureLastRegion(taskSettings, autoHideForm); break; } }
/// <summary> /// Capture the screen and save it into a file, which portion of the screen is captured /// is defined by <paramref name="typeOfCapture"/>. /// </summary> /// <param name="typeOfCapture">Selects, what is actually captured, see <see cref="CaptureType"/>.</param> /// <param name="filename">The name of the target file. The extension in there is ignored, /// it will replaced by an extension derived from the desired file format.</param> /// <param name="format">The format of the file.</param> /// <returns>An array of images captured.</returns> public virtual Bitmap[] Capture(CaptureType typeOfCapture, String filename, ImageFormatHandler.ImageFormatTypes format) { Capture(typeOfCapture); Save(filename, format); return(images); }
protected override void ParseHidReport(byte[] report) { if (Capture != CaptureType.Default) { _dumper.DumpArray(Capture.ToString(), report, report.Length); Capture = CaptureType.Default; } }
public void TestSerializationWillBeSuccessfulForNull(CaptureType captureType, string expected) { StringBuilder builder = new StringBuilder(); var result = CreateValueFormatter().FormatValue(null, string.Empty, captureType, null, builder); Assert.True(result); Assert.Equal(expected, builder.ToString()); }
/// <summary> /// Constructor /// </summary> public Hole(string name, string format, CaptureType captureType, short position, short alignment) { Name = name; Format = format; CaptureType = captureType; Index = position; Alignment = alignment; }
public override void Capture(CaptureType captureType, TaskSettings taskSettings, bool autoHideForm = true) { DoCapture(() => { Rectangle regionBounds = taskSettings.CaptureSettings.CaptureCustomRegion; Image img = Screenshot.CaptureRectangle(regionBounds); return img; }, CaptureType.CustomRegion, taskSettings, autoHideForm); }
public async Task InitializeRemoteDesktopDirect(CaptureType captureType, ImageCompressionType compressionType) { await ConnectionInfo.SendCommand(this, (byte)RemoteDesktopCommunication.InitializeDirectConnection); InitializeConnection(new ServerConnection(), captureType); await InitializeRemoteComponents(captureType, compressionType); LogService.Send((string)Application.Current.Resources["InitializeRemoteDesktop"]); }
private void MainForm_Activated(object sender, EventArgs e) { if (_captureType != CaptureType.None) { _captureType = CaptureType.None; _engine.StopCapture(); UpdateMyControls(); UpdateStatusBarText(); } }
/// <summary> /// Capture the screen, which portion of the screen is captured /// is defined by <paramref name="typeOfCapture"/>. /// </summary> /// <param name="typeOfCapture">Selects, what is actually captured, see <see cref="CaptureType"/>.</param> /// <returns>An array of images captured.</returns> public virtual Bitmap[] Capture(CaptureType typeOfCapture) { int count = 1; try { Screen[] screens = Screen.AllScreens; Rectangle rc; switch (typeOfCapture) { case CaptureType.PrimaryScreen: rc = Screen.PrimaryScreen.Bounds; break; case CaptureType.VirtualScreen: rc = SystemInformation.VirtualScreen; break; case CaptureType.WorkingArea: rc = Screen.PrimaryScreen.WorkingArea; break; case CaptureType.AllScreens: count = screens.Length; rc = screens[0].WorkingArea; break; default: rc = SystemInformation.VirtualScreen; break; } images = new Bitmap[count]; for (int index = 0; index < count; index++) { if (index > 0) { rc = screens[index].WorkingArea; } Bitmap memoryImage = new Bitmap(rc.Width, rc.Height, PixelFormat.Format32bppArgb); using (Graphics memoryGrahics = Graphics.FromImage(memoryImage)) { memoryGrahics.CopyFromScreen(rc.X, rc.Y, 0, 0, rc.Size, CopyPixelOperation.SourceCopy); } images[index] = memoryImage; } } catch (Exception ex) { System.Windows.Forms.MessageBox.Show(ex.ToString(), "Capture failed", MessageBoxButtons.OK, MessageBoxIcon.Error); } return(images); }
//private IEnumerable<IntPtr> GetWindowHandles(IntPtr handle, int indent) //{ // yield return handle; // var childHandles = new ChildWindowHandles(handle); // if(!childHandles.Any()) // { // yield break; // } // foreach (var childHandle in childHandles) // { // yield return childHandle; // } //} /// <summary> /// 指定した対象を指定した種類でキャプチャする /// </summary> /// <param name="target">キャプチャ対象</param> /// <param name="type">キャプチャ種類</param> private void CaptureImage(CaptureTarget target, CaptureType type) { //List<WindowInfo> windowInfos = new List<WindowInfo>(); //foreach (var handle in new TopLevelWindowHandles()) //{ // windowInfos.Add(new WindowInfo(handle)); //} //foreach(var windowInfo in windowInfos) //{ // Console.WriteLine($"{windowInfo.WindowText} : {windowInfo.ClassName}"); //} try { // キーボードフック中断 KeyboardHook.Pause(); // 矩形領域 if (target == CaptureTarget.RectArea) { using (Bitmap screenBitmap = this.CaptureScreen()) using (CaptureForm captureForm = new CaptureForm(screenBitmap)) { captureForm.ShowDialog(); using (Bitmap captureBitmap = captureForm.CaptureBitmap) { this.SaveImage(captureBitmap, type); } } } // デスクトップ else if (target == CaptureTarget.Desktop) { using (Bitmap captureBitmap = this.CaptureScreen()) { this.SaveImage(captureBitmap, type); } } // アクティブウィンドウ else if (target == CaptureTarget.ActiveWindow) { using (Bitmap captureBitmap = this.CaptureActiveWindow()) { this.SaveImage(captureBitmap, type); } } } finally { // キーボードフック再開 KeyboardHook.Start(); } }
private void OnEncoderStarted(CaptureType captureType) { if (captureType == CaptureType.VOD || captureType == CaptureType.LIVE) { captureStarted = true; } else if (captureType == CaptureType.SCREENSHOT) { screenshotStarted = true; } //Debug.Log(string.Format("{0} capture started", captureType)); }
public virtual Bitmap[] Capture(CaptureType typeOfCapture) { int length = 1; try { Rectangle virtualScreen; Screen[] allScreens = Screen.AllScreens; switch (typeOfCapture) { case CaptureType.VirtualScreen: virtualScreen = SystemInformation.VirtualScreen; break; case CaptureType.PrimaryScreen: virtualScreen = Screen.PrimaryScreen.Bounds; break; case CaptureType.WorkingArea: virtualScreen = Screen.PrimaryScreen.WorkingArea; break; case CaptureType.AllScreens: length = allScreens.Length; typeOfCapture = CaptureType.WorkingArea; virtualScreen = allScreens[0].WorkingArea; break; default: virtualScreen = SystemInformation.VirtualScreen; break; } this.bitmap_1 = new Bitmap[length]; for (int i = 0; i < length; i++) { if (i > 0) { virtualScreen = allScreens[i].WorkingArea; } Bitmap image = new Bitmap(virtualScreen.Width, virtualScreen.Height, PixelFormat.Format32bppArgb); using (Graphics graphics = Graphics.FromImage(image)) { graphics.CopyFromScreen(virtualScreen.X, virtualScreen.Y, 0, 0, virtualScreen.Size, CopyPixelOperation.SourceCopy); } this.bitmap_1[i] = image; } } catch (Exception) { } return(this.bitmap_1); }
public async Task <ActionResult> Create(CaptureType captureType) { if (ModelState.IsValid) { var _context = new Entities(); _context.CaptureTypes.Add(captureType); await _context.SaveChangesAsync(); TempData["Message"] = "create"; return(RedirectToAction(nameof(Index))); } return(View(captureType)); }
/// <summary> /// Allocates and initialized the CvCapture structure for reading a video stream from the camera. /// Currently two camera interfaces can be used on Windows: Video for Windows (VFW) and Matrox Imaging Library (MIL); and two on Linux: V4L and FireWire (IEEE1394). /// </summary> /// <param name="index">Index of the camera to be used. If there is only one camera or it does not matter what camera to use -1 may be passed. </param> /// <returns></returns> public void Open(int index) { ThrowIfDisposed(); try { NativeMethods.videoio_VideoCapture_open2(ptr, index); } catch (Exception e) { throw new OpenCvSharpException("Failed to create CvCapture", e); } captureType = CaptureType.Camera; }
public MainWindow() { InitializeComponent(); mCapType = CaptureType.button; mOptions = Settings.LoadSetting(); RestoreLanguageTypeMap(); // 设置托盘图标 mNotifyClose = false; var notifyIconCms = new ContextMenuStrip(); notifyIconCms.Items.Add(new ToolStripMenuItem("打开", null, new EventHandler(NotifyIcon_Open))); notifyIconCms.Items.Add(new ToolStripMenuItem("截取", null, new EventHandler(NotifyIcon_Capture))); notifyIconCms.Items.Add(new ToolStripMenuItem("翻译", null, new EventHandler(NotifyIcon_Trans))); notifyIconCms.Items.Add("-"); notifyIconCms.Items.Add(new ToolStripMenuItem("隐藏", null, new EventHandler(NotifyIcon_Hide))); notifyIconCms.Items.Add("-"); notifyIconCms.Items.Add(new ToolStripMenuItem("退出", null, new EventHandler(NotifyIcon_Close))); mNotifyIcon = new NotifyIcon { Visible = true, Icon = System.Drawing.Icon.ExtractAssociatedIcon(System.Windows.Forms.Application.ExecutablePath), ContextMenuStrip = notifyIconCms }; mNotifyIcon.Click += new EventHandler(NotifyIcon_Open); // 关联关闭函数,设置为最小化到托盘 this.Closing += new CancelEventHandler(Window_Closing); mTargetTextWindow = new FloatTextWindow((FloatTextWindow.ButtonType btntype) => { switch (btntype) { case FloatTextWindow.ButtonType.Capture: mTargetTextWindow.HideWnd(); mCapType = CaptureType.hotkey; this.Dispatcher.Invoke(() => DoCaptureOCR()); break; case FloatTextWindow.ButtonType.Trans: mCapType = CaptureType.hotkey; DoCaptureOCR(true); break; default: break; } }); }
private void OnPlayerCapture(PlayerCaptureEvent Event) { if (Event == null) { return; } if (Event.Captor == null) { return; } if (Event.TargetEntity == null) { return; } if (Event.Captor == Event.TargetEntity) { return; } if (!Event.Captor.IsPlayer) { return; } var sleeper = Event.TargetEntity.GetComponentInChildren <PlayerSleeperObject>(); if (sleeper == null) { return; } Player player = Event.Captor.Owner; string playerId = player.Id.ToString(); if (hasPermission(player) && AdminCanRope) { return; } if (sleeper) { CaptureType currentType = Event.Captor.Get <PlayerCaptureManager>().CurrentType; if (currentType == CaptureType.Rope || currentType == CaptureType.Chain) { Event.Cancel(Message("logNoRope"), player); SendReply(player, Message("noRope")); if (LoggingOn) { Puts(Message("logNoRope"), player); } } } }
/// <summary> /// 空の状態で初期化. 後でOpenが必要. /// </summary> /// <returns></returns> #else /// <summary> /// Initializes empty capture. /// To use this, you should call Open. /// </summary> /// <returns></returns> #endif public VideoCapture() { try { ptr = NativeMethods.videoio_VideoCapture_new1(); } catch (AccessViolationException e) { throw new OpenCvSharpException("Failed to create VideoCapture", e); } if (ptr == IntPtr.Zero) throw new OpenCvSharpException("Failed to create VideoCapture"); captureType = CaptureType.NotSpecified; }
public bool Start(string captureName, IntPtr handle, CaptureType type = CaptureType.CreateDibSection) { if (!CaptureService.Instance.RegisterCapture(captureName, handle, type)) { return(false); } _captureName = captureName; //创建守护定时器,马上执行 _timer = new Timer(CaptureFunc, null, TimeSpan.FromMilliseconds(0), Timeout.InfiniteTimeSpan); return(true); }
// ************************** // PROGRAM SPECIFIC FUNCTIONS // ************************** private void MainForm_Load(object sender, EventArgs e) { // setup our caption Messager.Caption = "LEADTOOLS C# Screen Capture Demo"; Text = Messager.Caption; // what to capture _captureType = CaptureType.None; // set the current window state _previousWindowState = this.WindowState; // as a start, do not beep when capturing _isBeepOn = false; // minimize window on capturing _minimizeOnCapture = true; // activate window after capturing _activateAfterCapture = true; // beeping is off _isBeepOn = false; // no cut is active _cutImage = false; // initialize the codecs object _codecs = new RasterCodecs(); // no opened images for now _countOfOpenedImages = 0; // startup the engine ScreenCaptureEngine.Startup(); // initialize Screen Capture Variables _engine = new ScreenCaptureEngine(); _engine.CaptureInformation += new EventHandler <ScreenCaptureInformationEventArgs>(_engine_CaptureInformation); _areaOptions = ScreenCaptureEngine.DefaultCaptureAreaOptions; _objectOptions = ScreenCaptureEngine.DefaultCaptureObjectOptions; _options = _engine.CaptureOptions; _captureInformation = null; _isHotKeyEnabled = true; UpdateMyControls(); UpdateStatusBarText(); }
public void UpdateImagePreview(string error, Texture2D texture, CaptureType captureType) { lastCapturedImage = texture; if (string.IsNullOrEmpty(error)) { imagePreviewContainer.SetActive(true); imagePreviewTexture.texture = texture; lastCapturedType = captureType; } else { imagePreviewContainer.SetActive(false); Logger.Instance.LogError(error); } }
/// <summary> /// カメラからのビデオキャプチャを初期化する. /// Windows では,次の二つのカメラインタフェースが利用できる:Video for Windows(VFW),Matrox Imaging Library(MIL). /// Linux では,次の二つカメラインタフェースが利用できる:Video for Linux(V4L),FireWire(IEEE1394). /// </summary> /// <param name="index">使われるカメラのインデックス.使用するカメラが1台のとき,あるいは,何台のカメラを使うかが重要でないときは,-1 でも問題ない場合もある.</param> /// <returns></returns> #else /// <summary> /// Allocates and initialized the CvCapture structure for reading a video stream from the camera. /// Currently two camera interfaces can be used on Windows: Video for Windows (VFW) and Matrox Imaging Library (MIL); and two on Linux: V4L and FireWire (IEEE1394). /// </summary> /// <param name="index">Index of the camera to be used. If there is only one camera or it does not matter what camera to use -1 may be passed. </param> /// <returns></returns> #endif public CvCapture(int index) { try { ptr = NativeMethods.cvCreateCameraCapture(index); } catch (AccessViolationException e) { throw new OpenCvSharpException("Failed to create CvCapture", e); } if (ptr == IntPtr.Zero) throw new OpenCvSharpException("Failed to create CvCapture"); captureType = CaptureType.Camera; }
/// <summary> /// カメラからのビデオキャプチャを初期化する. /// Windows では,次の二つのカメラインタフェースが利用できる:Video for Windows(VFW),Matrox Imaging Library(MIL). /// Linux では,次の二つカメラインタフェースが利用できる:Video for Linux(V4L),FireWire(IEEE1394). /// </summary> /// <param name="index">使われるカメラのインデックス.使用するカメラが1台のとき,あるいは,何台のカメラを使うかが重要でないときは,-1 でも問題ない場合もある.</param> /// <returns></returns> #else /// <summary> /// Allocates and initialized the CvCapture structure for reading a video stream from the camera. /// Currently two camera interfaces can be used on Windows: Video for Windows (VFW) and Matrox Imaging Library (MIL); and two on Linux: V4L and FireWire (IEEE1394). /// </summary> /// <param name="index">Index of the camera to be used. If there is only one camera or it does not matter what camera to use -1 may be passed. </param> /// <returns></returns> #endif public CvCapture(int index) { try { this._ptr = CvInvoke.cvCreateCameraCapture(index); } catch (AccessViolationException e) { throw new OpenCvSharpException("Failed to create CvCapture", e); } if (this._ptr == IntPtr.Zero) { throw new OpenCvSharpException("Failed to create CvCapture"); } this._captureType = CaptureType.Camera; }
/// <summary> /// Allocates and initialized the CvCapture structure for reading a video stream from the camera. /// Currently two camera interfaces can be used on Windows: Video for Windows (VFW) and Matrox Imaging Library (MIL); and two on Linux: V4L and FireWire (IEEE1394). /// </summary> /// <param name="index">Index of the camera to be used. If there is only one camera or it does not matter what camera to use -1 may be passed. </param> /// <returns></returns> public VideoCapture(int index) { try { ptr = NativeMethods.videoio_VideoCapture_new3(index); } catch (Exception e) { throw new OpenCvSharpException("Failed to create VideoCapture", e); } if (ptr == IntPtr.Zero) { throw new OpenCvSharpException("Failed to create VideoCapture"); } captureType = CaptureType.Camera; }
private void InitializeStreamingComponents(CaptureType captureType, int monitor, int quality, IConnectionInfo connectionInfo, bool drawCursor, ImageCompressionType compressionType) { var oldScreenCaptureService = _screenCaptureService; _screenCaptureService = _screenCaptureServices[captureType](); try { _screenCaptureService.Initialize(monitor); } catch (Exception ex) { _screenCaptureService = oldScreenCaptureService; ResponseBytes((byte)RemoteDesktopCommunication.ResponseInitializationFailed, Encoding.UTF8.GetBytes(ex.Message), connectionInfo); return; } Program.WriteLine($"InitializeStreamingComponents: oldScreenCaptureService == null: {oldScreenCaptureService == null} (else dispose)"); oldScreenCaptureService?.Dispose(); Program.WriteLine("Dispose other stuff in InitializeStreamingComponents"); _unsafeCodec?.Dispose(); _cursorStreamCodec?.Dispose(); _unsafeCodec = new UnsafeStreamCodec(GetImageCompression(compressionType), UnsafeStreamCodecParameters.DontDisposeImageCompressor | UnsafeStreamCodecParameters.UpdateImageEveryTwoSeconds); _currentImageCompression.Quality = quality; if (drawCursor) { _cursorStreamCodec = new CursorStreamCodec(); } _compressionType = compressionType; _currentMonitor = monitor; _drawCursor = drawCursor; ResponseByte((byte)RemoteDesktopCommunication.ResponseInitializationSucceeded, connectionInfo); Debug.Print("Initialized"); }
public static Image Capture(CaptureType type, Point origin = default(Point), Size size = default(Size), IntPtr handle = default(IntPtr)) { Image result; switch (type) { case CaptureType.Range: result = CaptureRange(origin, size); break; case CaptureType.Window: result = CaptureWindow(handle); break; default: result = CaptureFullScreen(); break; } Clipboard.SetImage(result); return result; }
private async void ExecuteJob(CaptureType captureType, bool withDelay) { if (withDelay) await Task.Delay(250); switch (captureType) { case CaptureType.PrintScreen: DoCaptureWork(Screenshot.CaptureFullscreen); break; case CaptureType.ActiveWindow: DoCaptureWork(Screenshot.CaptureActiveWindow); break; case CaptureType.Rectangle: DoCaptureWork(CaptureRectangle); break; } }
public static void DoCapture(ScreenCaptureDelegate capture, CaptureType captureType, TaskSettings taskSettings = null, bool autoHideForm = true) { if (taskSettings == null) taskSettings = TaskSettings.GetDefaultTaskSettings(); if (taskSettings.CaptureSettings.IsDelayScreenshot && taskSettings.CaptureSettings.DelayScreenshot > 0) { TaskEx.Run(() => { int sleep = (int)(taskSettings.CaptureSettings.DelayScreenshot * 1000); Thread.Sleep(sleep); }, () => { DoCaptureWork(capture, captureType, taskSettings, autoHideForm); }); } else { DoCaptureWork(capture, captureType, taskSettings, autoHideForm); } }
protected void AfterCapture(Image img, CaptureType captureType, TaskSettings taskSettings) { if (img != null) { if (taskSettings.GeneralSettings.PlaySoundAfterCapture) { TaskHelpers.PlayCaptureSound(taskSettings); } if (taskSettings.ImageSettings.ImageEffectOnlyRegionCapture && !IsRegionCapture(captureType)) { taskSettings.AfterCaptureJob = taskSettings.AfterCaptureJob.Remove(AfterCaptureTasks.AddImageEffects); } string customFileName; if (TaskHelpers.ShowAfterCaptureForm(taskSettings, out customFileName, img)) { UploadManager.RunImageTask(img, taskSettings, customFileName); } } }
private void AfterCapture(Image img, CaptureType captureType, TaskSettings taskSettings) { if (img != null) { if (taskSettings.GeneralSettings.PlaySoundAfterCapture) { Helpers.PlaySoundAsync(Resources.CameraSound); } if (taskSettings.ImageSettings.ImageEffectOnlyRegionCapture && !IsRegionCapture(captureType)) { taskSettings.AfterCaptureJob = taskSettings.AfterCaptureJob.Remove(AfterCaptureTasks.AddImageEffects); } if (taskSettings.GeneralSettings.ShowAfterCaptureTasksForm) { using (AfterCaptureForm afterCaptureForm = new AfterCaptureForm(img, taskSettings)) { afterCaptureForm.ShowDialog(); switch (afterCaptureForm.Result) { case AfterCaptureFormResult.Continue: taskSettings.AfterCaptureJob = afterCaptureForm.AfterCaptureTasks; break; case AfterCaptureFormResult.Copy: taskSettings.AfterCaptureJob = AfterCaptureTasks.CopyImageToClipboard; break; case AfterCaptureFormResult.Cancel: if (img != null) img.Dispose(); return; } } } UploadManager.RunImageTask(img, taskSettings); } }
public virtual Bitmap[] Capture(CaptureType typeOfCapture) { Bitmap memoryImage; int count = 1; try { Screen[] screens = Screen.AllScreens; Rectangle rc; switch (typeOfCapture) { case CaptureType.PrimaryScreen: rc = Screen.PrimaryScreen.Bounds; break; case CaptureType.VirtualScreen: rc = SystemInformation.VirtualScreen; break; case CaptureType.WorkingArea: rc = Screen.PrimaryScreen.WorkingArea; break; case CaptureType.AllScreens: count = screens.Length; typeOfCapture = CaptureType.WorkingArea; rc = screens[0].WorkingArea; break; default: rc = SystemInformation.VirtualScreen; break; } this.images = new Bitmap[count]; for (int index = 0; index < count; index++) { if (index > 0) rc = screens[index].WorkingArea; memoryImage = new Bitmap(rc.Width, rc.Height, PixelFormat.Format32bppArgb); using (Graphics memoryGrahics = Graphics.FromImage(memoryImage)) { memoryGrahics.CopyFromScreen(rc.X, rc.Y, 0, 0, rc.Size, CopyPixelOperation.SourceCopy); } this.images[index] = memoryImage; } } catch (Exception ex) { Log.Error("Screen capture failed.", ex); } return this.images; }
/// <summary> /// Capture the screen, which portion of the screen is captured /// is defined by <paramref name="typeOfCapture"/>. /// </summary> /// <param name="typeOfCapture">Selects, what is actually captured, see <see cref="CaptureType"/>.</param> /// <returns>An array of images captured.</returns> public virtual Bitmap[] Capture( CaptureType typeOfCapture ) { Bitmap memoryImage; int count = 1; try { //アクティブウィンドウのキャプチャ以外 if (typeOfCapture != CaptureType.ActiveWindow) { Screen[] screens = Screen.AllScreens; Rectangle rc; switch (typeOfCapture) { case CaptureType.PrimaryScreen: rc = Screen.PrimaryScreen.Bounds; break; case CaptureType.VirtualScreen: rc = SystemInformation.VirtualScreen; break; case CaptureType.WorkingArea: rc = Screen.PrimaryScreen.WorkingArea; break; case CaptureType.AllScreens: count = screens.Length; typeOfCapture = CaptureType.WorkingArea; rc = screens[0].WorkingArea; break; default: rc = SystemInformation.VirtualScreen; break; } images = new Bitmap[count]; for (int index = 0; index < count; index++) { if (index > 0) rc = screens[index].WorkingArea; memoryImage = new Bitmap(rc.Width, rc.Height, PixelFormat.Format32bppArgb); using (Graphics memoryGrahics = Graphics.FromImage(memoryImage)) { memoryGrahics.CopyFromScreen(rc.X, rc.Y, 0, 0, rc.Size, CopyPixelOperation.SourceCopy); } images[index] = memoryImage; } } //アクティブウィンドウのキャプチャ時 else { images = new Bitmap[1]; images[0] = NativeMethods.CaptureActiveWindow(); } } catch ( Exception ex ) { MessageBox.Show( ex.ToString(), "Capture failed", MessageBoxButtons.OK, MessageBoxIcon.Error ); } return images; }
/// <summary> /// Capture the screen and save it into a file, which portion of the screen is captured /// is defined by <paramref name="typeOfCapture"/>. /// </summary> /// <param name="typeOfCapture">Selects, what is actually captured, see <see cref="CaptureType"/>.</param> /// <param name="filename">The name of the target file. The extension in there is ignored, /// it will replaced by an extension derived from the desired file format.</param> /// <param name="format">The format of the file.</param> /// <returns>An array of images captured.</returns> public virtual Bitmap[] Capture( CaptureType typeOfCapture, String filename, ImageFormatHandler.ImageFormatTypes format ) { Capture( typeOfCapture ); Save( filename, format ); return images; }
/// <summary> /// カメラからのビデオキャプチャを初期化する. /// Windows では,次の二つのカメラインタフェースが利用できる:Video for Windows(VFW),Matrox Imaging Library(MIL). /// Linux では,次の二つカメラインタフェースが利用できる:Video for Linux(V4L),FireWire(IEEE1394). /// </summary> /// <param name="index">使われるカメラのインデックス.使用するカメラが1台のとき,あるいは,何台のカメラを使うかが重要でないときは,-1 でも問題ない場合もある.</param> /// <returns></returns> #else /// <summary> /// Allocates and initialized the CvCapture structure for reading a video stream from the camera. /// Currently two camera interfaces can be used on Windows: Video for Windows (VFW) and Matrox Imaging Library (MIL); and two on Linux: V4L and FireWire (IEEE1394). /// </summary> /// <param name="index">Index of the camera to be used. If there is only one camera or it does not matter what camera to use -1 may be passed. </param> /// <returns></returns> #endif public VideoCapture(int index) { try { ptr = NativeMethods.videoio_VideoCapture_new3(index); } catch (AccessViolationException e) { throw new OpenCvSharpException("Failed to create VideoCapture", e); } if (ptr == IntPtr.Zero) { throw new OpenCvSharpException("Failed to create VideoCapture"); } captureType = CaptureType.Camera; }
/// <summary> /// ファイルからのビデオキャプチャを初期化する /// </summary> /// <param name="fileName">ビデオファイル名</param> /// <returns></returns> #else /// <summary> /// Allocates and initialized the CvCapture structure for reading the video stream from the specified file. /// After the allocated structure is not used any more it should be released by cvReleaseCapture function. /// </summary> /// <param name="fileName">Name of the video file. </param> /// <returns></returns> #endif public VideoCapture(string fileName) { if (string.IsNullOrEmpty(fileName)) throw new ArgumentNullException("fileName"); /*if (!File.Exists(fileName)) throw new FileNotFoundException("File not found", fileName);*/ ptr = NativeMethods.videoio_VideoCapture_new2(fileName); if (ptr == IntPtr.Zero) throw new OpenCvSharpException("Failed to create VideoCapture"); captureType = CaptureType.File; }
/// <summary> /// ファイルからのビデオキャプチャを初期化する /// </summary> /// <param name="filename">ビデオファイル名</param> /// <returns></returns> #else /// <summary> /// Allocates and initialized the CvCapture structure for reading the video stream from the specified file. /// After the allocated structure is not used any more it should be released by cvReleaseCapture function. /// </summary> /// <param name="filename">Name of the video file. </param> /// <returns></returns> #endif public CvCapture(string filename) { if (string.IsNullOrEmpty(filename)) throw new ArgumentNullException("filename"); if (!File.Exists(filename)) throw new FileNotFoundException("File not found", filename); this._ptr = CvInvoke.cvCreateFileCapture(filename); if (this._ptr == IntPtr.Zero) { throw new OpenCvSharpException("Failed to create CvCapture"); } this._captureType = CaptureType.File; }
public virtual Bitmap[] Capture(CaptureType typeOfCapture, string filename, ImageFormatTypes format) { this.Capture(typeOfCapture); this.Save(filename, format); return this.images; }
/// <summary> /// Do disconnect this camera. /// </summary> /// <returns>Can disconnect the camera. Also if allready disconnted.</returns> public static bool Open(CaptureType CaptureType) { CameraCapture closeCameraCapture = cameraCaptures.Where(x => x.CaptureType == CaptureType).FirstOrDefault(); if (closeCameraCapture != null) { return closeCameraCapture.Open(); } return false; }
/// <summary> /// Do disconnect this camera. /// </summary> /// <returns>Can disconnect the camera. Also if allready disconnted.</returns> public static bool Disconnect(CaptureType CaptureType) { CameraCapture closeCameraCapture = cameraCaptures.Where(x => x.CaptureType == CaptureType).FirstOrDefault(); if (closeCameraCapture != null) { if (closeCameraCapture.Disconnect() == true) { UnRegisterCameraCapture(closeCameraCapture); return true; } } return false; }
/// <summary> /// Connect the current camera. /// </summary> /// <returns>Check for correct connection of the camera.</returns> public static bool CreateCameraAndOpen(CaptureType CaptureType, out CameraCapture NewCameraCapture) { NewCameraCapture = null; CameraCapture cameraCapture = CreateCamera(CaptureType); if (cameraCapture != null) { if (cameraCapture.Connect() == true) { if (cameraCapture.Open() == true) { NewCameraCapture = cameraCapture; return true; } } } return false; }
/// <summary> /// Connect the current camera. /// </summary> /// <returns>Check for correct connection of the camera.</returns> public static CameraCapture CreateCamera(CaptureType CaptureType) { CameraCapture cameraCapture = new CameraCapture(CaptureType); RegisterCameraCapture(cameraCapture); return cameraCapture; }
/// <summary> /// 指定されたビデオファイルをオープンします. /// </summary> /// <param name="fileName"></param> /// <returns></returns> #else /// <summary> /// Opens the specified video file /// </summary> /// <param name="fileName"></param> /// <returns></returns> #endif public void Open(string fileName) { ThrowIfDisposed(); NativeMethods.videoio_VideoCapture_open1(ptr, fileName); captureType = CaptureType.File; }
/// <summary> /// カメラからのビデオキャプチャを初期化する. /// Windows では,次の二つのカメラインタフェースが利用できる:Video for Windows(VFW),Matrox Imaging Library(MIL). /// Linux では,次の二つカメラインタフェースが利用できる:Video for Linux(V4L),FireWire(IEEE1394). /// </summary> /// <param name="index">使われるカメラのインデックス.使用するカメラが1台のとき,あるいは,何台のカメラを使うかが重要でないときは,-1 でも問題ない場合もある.</param> /// <returns></returns> #else /// <summary> /// Allocates and initialized the CvCapture structure for reading a video stream from the camera. /// Currently two camera interfaces can be used on Windows: Video for Windows (VFW) and Matrox Imaging Library (MIL); and two on Linux: V4L and FireWire (IEEE1394). /// </summary> /// <param name="index">Index of the camera to be used. If there is only one camera or it does not matter what camera to use -1 may be passed. </param> /// <returns></returns> #endif public void Open(int index) { ThrowIfDisposed(); try { NativeMethods.videoio_VideoCapture_open2(ptr, index); } catch (AccessViolationException e) { throw new OpenCvSharpException("Failed to create CvCapture", e); } captureType = CaptureType.Camera; }
/// <summary> /// ファイルからのビデオキャプチャを初期化する /// </summary> /// <param name="filename">ビデオファイル名</param> /// <returns></returns> #else /// <summary> /// Allocates and initialized the CvCapture structure for reading the video stream from the specified file. /// After the allocated structure is not used any more it should be released by cvReleaseCapture function. /// </summary> /// <param name="filename">Name of the video file. </param> /// <returns></returns> #endif public CvCapture(string filename) { if (string.IsNullOrEmpty(filename)) throw new ArgumentNullException("filename"); if (!File.Exists(filename)) throw new FileNotFoundException("File not found", filename); ptr = NativeMethods.cvCreateFileCapture(filename); if (ptr == IntPtr.Zero) throw new OpenCvSharpException("Failed to create CvCapture"); captureType = CaptureType.File; }
private bool IsRegionCapture(CaptureType captureType) { return captureType.HasFlagAny(CaptureType.RectangleWindow, CaptureType.Rectangle, CaptureType.RoundedRectangle, CaptureType.Ellipse, CaptureType.Triangle, CaptureType.Diamond, CaptureType.Polygon, CaptureType.Freehand, CaptureType.LastRegion); }