public void CaptureScreenshot_WhenBitmapIsNullAndZoomed_CapturesScaledScreenshotIntoNewBitmap() { var parameters = new CaptureParameters() { Zoom = 0.25 }; using (var grabber = new ScreenGrabber(parameters)) { if (ScreenGrabber.CanCaptureScreenshot()) { using (Bitmap bitmap = grabber.CaptureScreenshot(null)) { TestLog.EmbedImage("Screenshot with 0.25x zoom", bitmap); Assert.Multiple(() => { Assert.AreApproximatelyEqual(ScreenGrabber.GetScreenSize().Width / 2, grabber.ScreenshotWidth, 1); Assert.AreApproximatelyEqual(ScreenGrabber.GetScreenSize().Height / 2, grabber.ScreenshotHeight, 1); Assert.AreEqual(grabber.ScreenshotWidth, bitmap.Width); Assert.AreEqual(grabber.ScreenshotHeight, bitmap.Height); }); } } else { Assert.Throws <ScreenshotNotAvailableException>(() => grabber.CaptureScreenshot(null), "CanCaptureScreenshot returned false so expected an exception to be thrown."); } } }
private void btnScreen_Click(object sender, EventArgs e) { ScreenShot screenShot = ScreenGrabber.Win32ScreenShot(2.0); Debug.WriteLine($"Size: {screenShot.Width} x {screenShot.Height} Length: {screenShot.Bytes.Length}"); pbScreen.Image = CreateBitmapFromScreenShot(screenShot); }
public void CaptureScreenshot_WhenBitmapIsNotNull_CapturesScreenshotIntoProvidedBitmap() { var parameters = new CaptureParameters() { Zoom = 0.25 }; using (var grabber = new ScreenGrabber(parameters)) { using (Bitmap bitmap = new Bitmap(grabber.ScreenshotWidth, grabber.ScreenshotHeight)) { if (ScreenGrabber.CanCaptureScreenshot()) { Bitmap returnedBitmap = grabber.CaptureScreenshot(bitmap); TestLog.EmbedImage("Screenshot with 0.25x zoom", bitmap); Assert.AreSame(bitmap, returnedBitmap); } else { Assert.Throws <ScreenshotNotAvailableException>(() => grabber.CaptureScreenshot(bitmap), "CanCaptureScreenshot returned false so expected an exception to be thrown."); } } } }
//public override bool IsBody(Collider collider) //{ // VRLog.Info("Is Body? {0} {1}", collider.name, LayerMask.LayerToName(collider.gameObject.layer)); // return collider.gameObject.layer > 0; //} protected override void OnStart() { base.OnStart(); var camPosType = typeof(CameraPosition); _interestingFields = _interestingFieldNames.Select(name => camPosType.GetField(name, BindingFlags.Instance | BindingFlags.NonPublic)).ToArray(); var bgGrabber = new ScreenGrabber(1280, 720, ScreenGrabber.FromList( "Camera_BG", // backgrounds "Camera_Main", // no idea "Camera_Effect", // effects (e.g. vignette?) "Camera" // cinematics )); _BGDisplay = GUIQuad.Create(bgGrabber); _BGDisplay.transform.localScale = Vector3.one * 15; DontDestroyOnLoad(_BGDisplay.gameObject); _BGDisplay.gameObject.SetActive(false); //VR.GUI.AddGrabber(new CameraConsumer()); VR.GUI.AddGrabber(bgGrabber); Invoke(() => OnLevel(SceneManager.GetActiveScene().buildIndex), 0.1f); }
/// <summary> /// Applies the screen mirror effect to the lights. /// This method is called 10x per second by <see cref="ScreenMirrorEffect"/> /// </summary> public async Task ApplyEffect() { const int numberOfPanelsPerIteration = 5; //5x10 = 50hz. Note: 50hz seems to work good, higher values can make Canvas stop its external control const int minimumColorDifference = 30; var panelsToUpdate = _panels.Take(numberOfPanelsPerIteration * 2).ToList(); //Take 2 times the number of panels, in case any color differences are not large enough var colors = ScreenGrabber.CalculateAverageColor(panelsToUpdate.Select(panel => panel.ScreenshotArea), 0); var numberOfPanelsChanged = 0; for (var i = 0; i < panelsToUpdate.Count; i++) { //Only update the color of a panel that has a large enough color difference if (ColorDistance(panelsToUpdate[i].CurrentColor, colors[i]) > minimumColorDifference) { numberOfPanelsChanged++; panelsToUpdate[i].CurrentColor = colors[i]; _externalControlEndpoint.SetPanelColor(_deviceType, panelsToUpdate[i].PanelId, colors[i].R, colors[i].G, colors[i].B); _panels.Remove(panelsToUpdate[i]); //Remove the current panel and place it at the back of the list _panels.Add(panelsToUpdate[i]); } if (numberOfPanelsChanged >= numberOfPanelsPerIteration) { break; } } }
public void CaptureScreenshot_WhenDisposed_Throws() { var parameters = new CaptureParameters(); var grabber = new ScreenGrabber(parameters); grabber.Dispose(); Assert.Throws <ObjectDisposedException>(() => grabber.CaptureScreenshot(null)); }
/// <summary> /// Applies the ambilight effect to the lights. Ambilight is the average color of the whole screen /// Sets the color of the nanoleaf with the logging disabled. /// Seeing as a maximum of 10 requests per second can be set this will generate a lot of unwanted log data. /// See https://github.com/StijnOostdam/Winleafs/issues/40. /// </summary> public async Task ApplyEffect() { var color = ScreenGrabber.CalculateAverageColor(_screenBounds)[0]; //Safe since we always have 1 area var hue = (int)color.GetHue(); var sat = (int)(color.GetSaturation() * 100); await _nanoleafClient.StateEndpoint.SetHueAndSaturationAsync(hue, sat, disableLogging : true); }
public void Parameters_ReturnsParameters() { var parameters = new CaptureParameters(); using (var grabber = new ScreenGrabber(parameters)) { Assert.AreSame(parameters, grabber.Parameters); } }
public void Video_ReturnsVideo() { var grabber = new ScreenGrabber(new CaptureParameters()); var video = new FlashScreenVideo(new FlashScreenVideoParameters(grabber.ScreenshotWidth, grabber.ScreenshotHeight, 5)); using (var recorder = new ScreenRecorder(grabber, video)) { Assert.AreSame(video, recorder.Video); } }
public void ScreenSize_ReturnsSensibleResult() { Size screenSize = ScreenGrabber.GetScreenSize(); Assert.Multiple(() => { Assert.GreaterThan(screenSize.Width, 0); Assert.GreaterThan(screenSize.Height, 0); }); }
public void Stop_WhenDisposed_Throws() { var grabber = new ScreenGrabber(new CaptureParameters()); var video = new FlashScreenVideo(new FlashScreenVideoParameters(grabber.ScreenshotWidth, grabber.ScreenshotHeight, 5)); var recorder = new ScreenRecorder(grabber, video); recorder.Dispose(); Assert.Throws <ObjectDisposedException>(() => recorder.Stop()); }
public void ScreenshotWidth_WhenDisposed_Throws() { var parameters = new CaptureParameters(); var grabber = new ScreenGrabber(parameters); grabber.Dispose(); int x; Assert.Throws <ObjectDisposedException>(() => x = grabber.ScreenshotWidth); }
/// <summary> /// Stops the timer and gives it 1 second to complete. Also stop the screengrabber if no other ambilight effects are active /// </summary> public async Task Deactivate() { _timer.Stop(); Thread.Sleep(1000); //Give the last command the time to complete, 1000 is based on testing and a high value (better safe then sorry) //Check if any other screen mirror effects are active, if not, stop the screen grabber if (OrchestratorCollection.CountOrchestratorsWithActiveScreenMirrorEffect() <= 0) { ScreenGrabber.Stop(); } }
/// <summary> /// Applies the screen mirror effect to the lights. /// This method is called X times per second by <see cref="ScreenMirrorEffect"/> /// </summary> public async Task ApplyEffect() { var colors = ScreenGrabber.CalculateAverageColor(_panelAreas, 0); if (colors == null) { //This can happen when before the first screen shot is taken when the effect is enabled return; } _externalControlEndpoint.SetPanelsColors(_deviceType, _panelIds, colors); }
public void ScreenshotHeight_ReturnsScaledHeight() { var parameters = new CaptureParameters() { Zoom = 0.25 }; using (var grabber = new ScreenGrabber(parameters)) { Assert.AreApproximatelyEqual(ScreenGrabber.GetScreenSize().Height / 2, grabber.ScreenshotHeight, 1); } }
public void Constructor_WhenVideoSizeDoesNotMatchGrabberScreenshotsSize_Throws() { var grabber = new ScreenGrabber(new CaptureParameters()); var video = new FlashScreenVideo(new FlashScreenVideoParameters(1, grabber.ScreenshotHeight, 5)); var ex = Assert.Throws <ArgumentException>(() => new ScreenRecorder(grabber, video)); Assert.Contains(ex.Message, "The video dimensions must be exactly the same as the screenshots obtained by the grabber."); video = new FlashScreenVideo(new FlashScreenVideoParameters(grabber.ScreenshotWidth, 1, 5)); ex = Assert.Throws <ArgumentException>(() => new ScreenRecorder(grabber, video)); Assert.Contains(ex.Message, "The video dimensions must be exactly the same as the screenshots obtained by the grabber."); }
/// <summary> /// Captures an image of the entire desktop. /// </summary> /// <param name="parameters">The capture parameters.</param> /// <returns>The screenshot.</returns> /// <exception cref="ArgumentNullException">Thrown if <paramref name="parameters"/> is null.</exception> /// <exception cref="ScreenshotNotAvailableException">Thrown if a screenshot cannot be captured at this time.</exception> public static Bitmap Screenshot(CaptureParameters parameters) { if (parameters == null) { throw new ArgumentNullException("parameters"); } using (var grabber = new ScreenGrabber(parameters)) { grabber.OverlayManager.AddOverlay(GetOverlayManager().ToOverlay()); return(grabber.CaptureScreenshot(null)); } }
/// <inheritdoc /> public async Task Activate() { if (_screenMirrorAlgorithm == ScreenMirrorAlgorithm.ScreenMirrorFit || _screenMirrorAlgorithm == ScreenMirrorAlgorithm.ScreenMirrorStretch) { //For screen mirror, we need to enable external control await _nanoleafClient.ExternalControlEndpoint.PrepareForExternalControl(_orchestrator.PanelLayout.DeviceType, _orchestrator.Device.IPAddress); } //Start the screengrabber ScreenGrabber.Start(); _timer.Start(); }
/// <summary> /// Applies the ambilight effect to the lights. Ambilight is the average color of the whole screen /// Sets the color of the nanoleaf with the logging disabled. /// Seeing as a maximum of 10 requests per second can be set this will generate a lot of unwanted log data. /// See https://github.com/StijnOostdam/Winleafs/issues/40. /// </summary> public async Task ApplyEffect() { var colors = ScreenGrabber.CalculateAverageColor(_screenBounds); if (colors == null) { //This can happen when before the first screen shot is taken when the effect is enabled return; } var hue = (int)colors[0].GetHue(); //Safe since we always have 1 area var sat = (int)(colors[0].GetSaturation() * 100); //Safe since we always have 1 area await _nanoleafClient.StateEndpoint.SetHueAndSaturationAsync(hue, sat, disableLogging : true); }
public void CaptureScreenshot_WhenBitmapIsNotTheRightSize_Throws() { var parameters = new CaptureParameters() { Zoom = 0.25 }; using (var grabber = new ScreenGrabber(parameters)) { var ex = Assert.Throws <ArgumentException>(() => grabber.CaptureScreenshot(new Bitmap(1, grabber.ScreenshotHeight))); Assert.Contains(ex.Message, "The bitmap dimensions must exactly match the screenshot dimensions."); ex = Assert.Throws <ArgumentException>(() => grabber.CaptureScreenshot(new Bitmap(grabber.ScreenshotWidth, 1))); Assert.Contains(ex.Message, "The bitmap dimensions must exactly match the screenshot dimensions."); } }
public void Start_CapturesVideoUntilStopped() { var grabber = new ScreenGrabber(new CaptureParameters() { Zoom = 0.25 }); var video = new FlashScreenVideo(new FlashScreenVideoParameters(grabber.ScreenshotWidth, grabber.ScreenshotHeight, 5)); using (var recorder = new ScreenRecorder(grabber, video)) { recorder.Start(); Thread.Sleep(2000); recorder.Stop(); TestLog.EmbedVideo("Video", recorder.Video); } }
/// <summary> /// Applies the screen mirror effect to the ligts. /// Screen mirror takes the average color of each triangle and applies it to that triangle /// </summary> public async Task ApplyEffect() { foreach (var panel in _panels) { //For each panel, draw a rectangle around its midpoint, according to the set rectangle size //Then get the average color of that rectangle and apply the color to the panel var startX = (int)Math.Floor(panel.MidPoint.X - (_rectangleSize / 2)); var startY = (int)Math.Floor(panel.MidPoint.Y - (_rectangleSize / 2)); // In multi monitor setup, all screens are joined in one larger pixel area. For example, if you want to take a screenshot of the second from left monitor, // you need to start at the right of the first left monitor. Hence, we need to add _screenBounds X and Y here to the location of the rectangle we want to capture var bounds = new Rectangle(_screenBounds.X + startX, _screenBounds.Y + startY, _rectangleSize, _rectangleSize); var bitmap = ScreenGrabber.CaptureScreen(bounds); var color = ScreenGrabber.CalculateAverageColor(bitmap, _capturedBounds, 0); await _externalControlEndpoint.SetPanelColorAsync(panel.PanelId, color.R, color.G, color.B); } }
/// <summary> /// Create Screen Shots /// </summary> static void ScreenProc() { Console.WriteLine("Screen Thread running..."); while (true) { if (stopwatch.IsRunning == false) { stopwatch.Start(); } ScreenShot screenShotData = ScreenGrabber.Win32ScreenShot(2.0); lock (locker) { buffer = screenShotData.Bytes; } width = screenShotData.Width; height = screenShotData.Height; PrintFps(); Thread.Sleep(sleepTime); } }
void render() { // ConsoleLogger.logDebug( "ContentBase.render" ); var rtv = swapChain?.GetCurrentBackBufferRTV(); Debug.Assert(null != rtv); ComUtils.assign(ref cachedRtv, rtv); var dsv = swapChain?.GetDepthBufferDSV(); Debug.Assert(null != dsv); ComUtils.assign(ref cachedDsv, dsv); if (null == rtv || null == dsv) { return; } animation?.update(); // ConsoleLogger.logDebug( "ContentBase.render 2" ); backBufferTexture = rtv; try { // MicroProfiler.start(); scene.render(this, rtv, dsv); m_cursor?.render(); string screenshot = System.Threading.Interlocked.Exchange(ref screenshotLocation, null); if (null != screenshot) { using (var dev = device) using (var tx = rtv.GetTexture()) ScreenGrabber.saveTexture(dev, context, tx, screenshot); } // MicroProfiler.key( "done rendering" ); } finally { backBufferTexture = null; } swapChain.Present(1); // MicroProfiler.finish(); }
//public override bool IsBody(Collider collider) //{ // VRLog.Info("Is Body? {0} {1}", collider.name, LayerMask.LayerToName(collider.gameObject.layer)); // return collider.gameObject.layer > 0; //} protected override void OnStart() { base.OnStart(); var bgGrabber = new ScreenGrabber(1280, 720, ScreenGrabber.FromList( "Camera_BG", // backgrounds "Camera_Main", // no idea "Camera_Effect", // effects (e.g. vignette?) "Camera" // cinematics )); _BGDisplay = GUIQuad.Create(bgGrabber); _BGDisplay.transform.localScale = Vector3.one * 15; DontDestroyOnLoad(_BGDisplay.gameObject); _BGDisplay.gameObject.SetActive(false); //VR.GUI.AddGrabber(new CameraConsumer()); VR.GUI.AddGrabber(bgGrabber); Invoke(() => OnLevel(SceneManager.GetActiveScene().buildIndex), 0.1f); }
/// <summary> /// Applies the ambilight effect to the lights. Ambilight is the average color of the whole screen /// Sets the color of the nanoleaf with the logging disabled. /// Seeing as a maximum of 10 requests per second can be set this will generate a lot of unwanted log data. /// See https://github.com/StijnOostdam/Winleafs/issues/40. /// </summary> public async Task ApplyEffect() { var bitmap = ScreenGrabber.CaptureScreen(_screenBounds); var color = ScreenGrabber.CalculateAverageColor(bitmap, _screenBounds); var hue = (int)color.GetHue(); var sat = (int)(color.GetSaturation() * 100); if (_controlBrightness) { //For brightness calculation see: https://stackoverflow.com/a/596243 and https://www.w3.org/TR/AERT/#color-contrast //We do not use Color.GetBrightness() since that value is always null because we use Color.FromArgb in the screengrabber. //Birghtness can be maximum 100 var brightness = Math.Min(100, (int)(0.299 * color.R + 0.587 * color.G + 0.114 * color.B)); await _nanoleafClient.StateEndpoint.SetHueSaturationAndBrightnessAsync(hue, sat, brightness, disableLogging : true); } else { await _nanoleafClient.StateEndpoint.SetHueAndSaturationAsync(hue, sat, disableLogging : true); } }
/// <summary> /// Starts recording a screen capture video of the entire desktop. /// </summary> /// <remarks> /// <para> /// Recording a screen capture video can be very CPU and space intensive particularly /// when running tests on a single-core CPU. We recommend calling /// <see cref="StartRecording(CaptureParameters, double)" /> with /// a <see cref="CaptureParameters.Zoom" /> factor of 0.25 or less and a frame rate /// of no more than 5 to 10 frames per second. /// </para> /// </remarks> /// <param name="parameters">The capture parameters.</param> /// <param name="framesPerSecond">The number of frames per second to capture.</param> /// <returns>The recorder.</returns> /// <exception cref="ArgumentNullException">Thrown if <paramref name="parameters"/> is null.</exception> /// <exception cref="ScreenshotNotAvailableException">Thrown if a screenshot cannot be captured at this time.</exception> public static ScreenRecorder StartRecording(CaptureParameters parameters, double framesPerSecond) { if (parameters == null) { throw new ArgumentNullException("parameters"); } ScreenGrabber.ThrowIfScreenshotNotAvailable(); ScreenGrabber grabber = new ScreenGrabber(parameters); try { FlashScreenVideo video = new FlashScreenVideo(new FlashScreenVideoParameters( grabber.ScreenshotWidth, grabber.ScreenshotHeight, framesPerSecond)); ScreenRecorder recorder = new ScreenRecorder(grabber, video); try { recorder.OverlayManager.AddOverlay(GetOverlayManager().ToOverlay()); recorder.Start(); return(recorder); } catch { recorder.Dispose(); throw; } } catch { grabber.Dispose(); throw; } }
public void Constructor_WhenVideoIsNull_Throws() { var grabber = new ScreenGrabber(new CaptureParameters()); Assert.Throws <ArgumentNullException>(() => new ScreenRecorder(grabber, null)); }
public void InitScreenGrabber(Rectangle SourceRectangle) { ScreenG = new ScreenGrabber(display, SpritePainter, SourceRectangle); }
protected override void CreateRenderGraphs(RenderGraphCollection graphs, HashSet <string> reportingTags) { // create graph with renderers _graph = new RenderGraph { Destinations = GetRenderers(_config, reportingTags), Resize = _config.Global.Resize, FlipHorizontally = _config.Global.FlipHorizontally, FlipVertically = _config.Global.FlipVertically, IdleAfter = _options.IdleAfter, IdlePlay = _options.IdlePlay }; _graph.SetColor(_config.Global.DmdColor); // setup source and additional processors switch (_options.Source) { case SourceType.PinballFX2: { _graph.Source = new PinballFX2Grabber { FramesPerSecond = _options.FramesPerSecond }; reportingTags.Add("In:PinballFX2"); break; } case SourceType.PinballFX3: { if (_options.Fx3GrabScreen) { _graph.Source = new PinballFX3Grabber { FramesPerSecond = _options.FramesPerSecond }; reportingTags.Add("In:PinballFX3Legacy"); } else { _graph.Source = new PinballFX3MemoryGrabber { FramesPerSecond = _options.FramesPerSecond }; reportingTags.Add("In:PinballFX3"); } break; } case SourceType.PinballArcade: { _graph.Source = new TPAGrabber { FramesPerSecond = _options.FramesPerSecond }; reportingTags.Add("In:PinballArcade"); break; } case SourceType.ProPinball: { _graph.Source = new ProPinballSlave(_options.ProPinballArgs); reportingTags.Add("In:ProPinball"); break; } case SourceType.Screen: var grabber = new ScreenGrabber { FramesPerSecond = _options.FramesPerSecond, Left = _options.Position[0], Top = _options.Position[1], Width = _options.Position[2], Height = _options.Position[3], DestinationWidth = _options.ResizeTo[0], DestinationHeight = _options.ResizeTo[1] }; if (_options.GridSpacing > 0) { grabber.Processors.Add(new GridProcessor { Width = _options.ResizeTo[0], Height = _options.ResizeTo[1], Spacing = _options.GridSpacing }); } _graph.Source = grabber; reportingTags.Add("In:ScreenGrab"); break; default: throw new ArgumentOutOfRangeException(); } graphs.Add(_graph); }