コード例 #1
0
        private async Task StartCaptureInternal(GraphicsCaptureItem item)
        {
            // Stop the previous capture if we had one.
            StopCapture();

            _item     = item;
            _lastSize = _item.Size;

            _canvasDevice = new CanvasDevice();

            _framePool = Direct3D11CaptureFramePool.Create(
                _canvasDevice,                             // D3D device
                DirectXPixelFormat.B8G8R8A8UIntNormalized, // Pixel format
                2,                                         // Number of frames
                _item.Size);                               // Size of the buffers

            _item.Closed += (s, a) =>
            {
                StopCapture();
            };

            _session = _framePool.CreateCaptureSession(_item);
            _session.StartCapture();

            await Task.Delay(500);

            var frame = _framePool.TryGetNextFrame();

            await ProcessFrame(frame);

            StopCapture();
        }
コード例 #2
0
        public void SetGraphicItem(GraphicsCaptureItem item)
        {
            this.ResetState();
            this.item = item;

            if (this.item != null)
            {
                this.renderTargetDescription = new Texture2DDescription
                {
                    CpuAccessFlags    = CpuAccessFlags.None,
                    Width             = item.Size.Width,
                    Height            = item.Size.Height,
                    Usage             = ResourceUsage.Default,
                    Format            = SharpDX.DXGI.Format.B8G8R8A8_UNorm,
                    ArraySize         = 1,
                    BindFlags         = BindFlags.RenderTarget | BindFlags.ShaderResource,
                    OptionFlags       = ResourceOptionFlags.Shared,
                    MipLevels         = 1,
                    SampleDescription = new SampleDescription(1, 0),
                };
                this.renderTarget = new Texture2D(d3dDevice, this.renderTargetDescription);

                framePool = Direct3D11CaptureFramePool.Create(device, DirectXPixelFormat.B8G8R8A8UIntNormalized, 2, this.item.Size);
                session   = framePool.CreateCaptureSession(this.item);
                lastSize  = this.item.Size;

                framePool.FrameArrived += this.OnFrameArrived;
                session.StartCapture();
            }
        }
コード例 #3
0
        async public static Task <IList <MediaDevice> > GetVideoCaptureDevices()
        {
            var devices = (await Windows.Devices.Enumeration.DeviceInformation.FindAllAsync(
                               Windows.Media.Devices.MediaDevice.GetVideoCaptureSelector()));

            IList <MediaDevice> list = new List <MediaDevice>();

            foreach (var deviceInfo in devices)
            {
                list.Add(new MediaDevice()
                {
                    Id   = deviceInfo.Id,
                    Name = deviceInfo.Name,
                    //Location = device.Info.EnclosureLocation
                });
            }

            if (GraphicsCaptureSession.IsSupported())
            {
                list.Add(new MediaDevice()
                {
                    Id   = Screen.DeviceId,
                    Name = Screen.DeviceName
                });
            }

            return(list);
        }
コード例 #4
0
        public MainPage()
        {
            InitializeComponent();

            if (!GraphicsCaptureSession.IsSupported())
            {
                IsEnabled = false;

                var dialog = new MessageDialog(
                    "Screen capture is not supported on this device for this release of Windows!",
                    "Screen capture unsupported");

                var ignored = dialog.ShowAsync();
                return;
            }

            // initialize screen recording
            screenDevice = Direct3D11Helpers.CreateDevice();

            // connect to the powerpoint app service
            App.AppServiceConnected += MainPage_AppServiceConnected;

            _timer.Interval = new TimeSpan(0, 0, 1);
            _timer.Tick    += _timer_Tick;
        }
コード例 #5
0
        public void StartCaptureInternal(GraphicsCaptureItem item)
        {
            StopCapture();
            _item      = item;
            _lastSize  = _item.Size;
            _swapChain = new CanvasSwapChain(_canvasDevice, _item.Size.Width, _item.Size.Height, 96);

            swapChain.SwapChain = _swapChain;

            _framePool = Direct3D11CaptureFramePool.Create(
                _canvasDevice,                             // D3D device
                DirectXPixelFormat.B8G8R8A8UIntNormalized, // Pixel format
                60,                                        // Number of frames
                _item.Size);                               // Size of the buffers
            _session = _framePool.CreateCaptureSession(_item);
            _framePool.FrameArrived += (s, a) =>
            {
                using (var frame = _framePool.TryGetNextFrame())
                {
                    ProcessFrame(frame);
                }
            };
            _item.Closed += (s, a) =>
            {
                StopCapture();
            };
            _session.StartCapture();
        }
コード例 #6
0
        private void CreateCaptureItemDependendStuff()
        {
            _framePool = Direct3D11CaptureFramePool.Create(_device, PixelFormat, 2, _item.Size);
            _framePool.FrameArrived += OnFrameArrived;
            _session = _framePool.CreateCaptureSession(_item);
            _session.IsCursorCaptureEnabled = !PresentationToNDIAddIn.Properties.Settings.Default.HideMouse;

            var description = new SwapChainDescription1
            {
                Width             = _item.Size.Width,
                Height            = _item.Size.Height,
                Format            = SharpDxFormat,
                Stereo            = false,
                SampleDescription = new SampleDescription {
                    Count = 1, Quality = 0
                },
                Usage       = Usage.RenderTargetOutput,
                BufferCount = 2,
                Scaling     = Scaling.Stretch,
                SwapEffect  = SwapEffect.FlipSequential,
                AlphaMode   = AlphaMode.Premultiplied,
                Flags       = SwapChainFlags.None
            };

            _swapChain = new SwapChain1(_factory, _d3dDevice, ref description);
            _session.StartCapture();
        }
コード例 #7
0
        /// <summary>
        /// Invoked when application execution is being suspended.  Application state is saved
        /// without knowing whether the application will be terminated or resumed with the contents
        /// of memory still intact.
        /// </summary>
        /// <param name="sender">The source of the suspend request.</param>
        /// <param name="e">Details about the suspend request.</param>
        private void OnSuspending(object sender, SuspendingEventArgs e)
        {
            var deferral = e.SuspendingOperation.GetDeferral();

            // Don't bother if capture isn't supported
            if (GraphicsCaptureSession.IsSupported())
            {
                // Save our state
                var rootFrame = Window.Current.Content as Frame;
                if (rootFrame != null)
                {
                    rootFrame = ((RootView)rootFrame.Content).GetRootFrame();
                    if (rootFrame.Content is MainPage mainPage)
                    {
                        mainPage.CacheCurrentSettings();
                    }
                    else if (rootFrame.Content is RecordingPage recordingPage)
                    {
                        recordingPage.EndCurrentRecording();
                    }
                }
            }

            deferral.Complete();
        }
コード例 #8
0
 public MainPageViewModel()
 {
     _suaveKeysService      = App.Current?.Container?.Resolve <ISuaveKeysService>();
     CommandLog             = "";
     SuaveKeysSignInCommand = new AsyncCommand(async() =>
     {
         var signInResult          = await _suaveKeysService.StartSignInAsync();
         SuaveKeysSignInVisibility = signInResult?.ResultType == ResultType.Ok ? Visibility.Visible : Visibility.Collapsed;
     });
     ToggleStartCommand = new AsyncCommand(async() =>
     {
         if (!GraphicsCaptureSession.IsSupported())
         {
             Error = "This device does not support the capture requirements";
             return;
         }
         if (!_isRecording)
         {
             await SetupEncoding();
         }
         else
         {
             Stop();
             Cleanup();
         }
     });
 }
コード例 #9
0
        public void StartCapture(IntPtr hWnd, Device device, Factory factory)
        {
            var capturePicker = new GraphicsCapturePicker();

            // ReSharper disable once PossibleInvalidCastException
            // ReSharper disable once SuspiciousTypeConversion.Global
            var initializer = (IInitializeWithWindow)(object)capturePicker;

            initializer.Initialize(hWnd);

            _captureItem = capturePicker.PickSingleItemAsync().AsTask().Result;
            if (_captureItem == null)
            {
                return;
            }

            _captureItem.Closed += CaptureItemOnClosed;

            var hr = NativeMethods.CreateDirect3D11DeviceFromDXGIDevice(device.NativePointer, out var pUnknown);

            if (hr != 0)
            {
                StopCapture();
                return;
            }

            var winrtDevice = (IDirect3DDevice)Marshal.GetObjectForIUnknown(pUnknown);

            Marshal.Release(pUnknown);

            _captureFramePool = Direct3D11CaptureFramePool.Create(winrtDevice, DirectXPixelFormat.B8G8R8A8UIntNormalized, 2, _captureItem.Size);
            _captureSession   = _captureFramePool.CreateCaptureSession(_captureItem);
            _captureSession.StartCapture();
            IsCapturing = true;
        }
コード例 #10
0
        public int StartRecording()
        {
            if (framePool == null)
            {
                return(1);
            }

            if (gcitem == null)
            {
                return(2);
            }

            counter         = 0;
            threadcounter   = 0;
            totalMemoryUsed = 0;

            framePool.FrameArrived += OnFrameArrived;
            isRecording             = true;

            //parent.msg("Started");

            session = framePool.CreateCaptureSession(gcitem);
            session.StartCapture();

            return(0);
        }
コード例 #11
0
 public void StopCapture()
 {
     _session?.Dispose();
     _framePool?.Dispose();
     _item      = null;
     _session   = null;
     _framePool = null;
 }
コード例 #12
0
 void SetupCapture()
 {
     IsCaptureSupported = GraphicsCaptureSession.IsSupported();
     if (IsCaptureNotSupported)
     {
         return;
     }
 }
コード例 #13
0
ファイル: Recorder.cs プロジェクト: pr8x/ScreenRecorder
 public void Stop()
 {
     _captureItem = null;
     _framePool.Dispose();
     _framePool = null;
     _session.Dispose();
     _session = null;
 }
コード例 #14
0
        public async Task StartRecording()
        {
            if (framePool == null)
            {
                if (parent != null)
                {
                    parent.StartWritingOutput("FramePool is null", 1);
                }

                return;
            }

            if (gcitem == null)
            {
                if (parent != null)
                {
                    parent.StartWritingOutput("Graphics Capture Item is null", 1);
                }

                return;
            }

            counter         = 0;
            threadcounter   = 0;
            totalMemoryUsed = 0;

            framePool.FrameArrived += OnFrameArrived;
            isRecording             = true;

            if (parent != null)
            {
                parent.StartWritingOutput("Recording Starts...", 2);
            }


            //if (bRecordWebcam)
            //{
            //    if (tempWebcamFile != null)
            //    {
            //        MediaEncodingProfile webcamProfile = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Qvga);
            //        await mediaCapture.StartRecordToStorageFileAsync(webcamProfile, tempWebcamFile);
            //    }
            //}
            //else if (bRecordAudio)
            //{
            //    if (memoryAudioStream != null)
            //    {
            //        await mediaCapture.StartRecordToStreamAsync(
            //                        MediaEncodingProfile.CreateMp3(AudioEncodingQuality.Auto), memoryAudioStream);
            //    }
            //}


            session = framePool.CreateCaptureSession(gcitem);
            session.StartCapture();

            return;
        }
コード例 #15
0
 public void StopCapture() // ...or release resources
 {
     _captureSession?.Dispose();
     _captureFramePool?.Dispose();
     _captureSession   = null;
     _captureFramePool = null;
     _captureItem      = null;
     IsCapturing       = false;
 }
コード例 #16
0
 public ModernCaptureMonitorSession(IDirect3DDevice device, ModernCaptureMonitorDescription description) : base(description)
 {
     this.CaptureItem = WinRTCaptureHelper.CreateItemForMonitor(description.MonitorInfo.Hmon);
     this.FramePool   = Direct3D11CaptureFramePool.Create(device,
                                                          description.HdrMetadata.EnableHdrProcessing ? DirectXPixelFormat.R16G16B16A16Float : DirectXPixelFormat.B8G8R8A8UIntNormalized,
                                                          2, CaptureItem.Size);
     this.Session = FramePool.CreateCaptureSession(CaptureItem);
     this.Session.IsCursorCaptureEnabled = description.CaptureCursor;
 }
コード例 #17
0
        public ScreenGrabberWindowsCapture(MonitorInfo screen) : base(screen)
        {
            Application.Current.Dispatcher.Invoke(() =>
            {
                var device = WindowsCaptureHelper.CreateDirect3DDeviceFromSharpDXDevice(new SharpDX.Direct3D11.Device(DriverType.Hardware, DeviceCreationFlags.BgraSupport));
                _d3dDevice = WindowsCaptureHelper.CreateSharpDXDevice(device);
                var item   = WindowsCaptureHelper.CreateItemForMonitor(Screen.HMon);

                var factory = new Factory2();

                var description = new SwapChainDescription1
                {
                    Width             = item.Size.Width,
                    Height            = item.Size.Height,
                    Format            = Format.B8G8R8A8_UNorm,
                    Stereo            = false,
                    SampleDescription = new SampleDescription
                    {
                        Count   = 1,
                        Quality = 0
                    },
                    Usage       = Usage.RenderTargetOutput,
                    BufferCount = 2,
                    Scaling     = Scaling.Stretch,
                    SwapEffect  = SwapEffect.FlipSequential,
                    AlphaMode   = AlphaMode.Premultiplied,
                    Flags       = SwapChainFlags.None
                };

                _swapChain = new SwapChain1(factory, _d3dDevice, ref description);
                _framePool = Direct3D11CaptureFramePool.Create(device, DirectXPixelFormat.B8G8R8A8UIntNormalized, 2, item.Size);
                _session   = _framePool.CreateCaptureSession(item);
                _session.IsCursorCaptureEnabled = false;

                _swapChain.ResizeBuffers(2, item.Size.Width, item.Size.Height, Format.B8G8R8A8_UNorm, SwapChainFlags.None);

                _screenTexture = new Texture2D(_d3dDevice, new Texture2DDescription
                {
                    CpuAccessFlags    = CpuAccessFlags.Read,
                    BindFlags         = BindFlags.None,
                    Format            = Format.B8G8R8A8_UNorm,
                    Width             = item.Size.Width,
                    Height            = item.Size.Height,
                    OptionFlags       = ResourceOptionFlags.None,
                    MipLevels         = 1,
                    ArraySize         = 1,
                    SampleDescription = { Count = 1, Quality = 0 },
                    Usage             = ResourceUsage.Staging
                });

                _framePool.FrameArrived += OnFrameArrived;

                _session.StartCapture();
            });
        }
コード例 #18
0
 public async Task Record(GraphicsCaptureItem _field)
 {
     canvasDevice = new CanvasDevice();
     field        = _field;
     framePool    = Direct3D11CaptureFramePool.Create(
         canvasDevice,            // D3D device
         DirectXPixelFormat.A8P8, // Pixel format
         2,                       // Number of frames
         field.Size);             // Size of the buffers
     session = framePool.CreateCaptureSession(field);
 }
コード例 #19
0
        private void Button_Click(object sender, RoutedEventArgs e)
        {
            bool isSupport = GraphicsCaptureSession.IsSupported();

            if (isSupport)
            {
                caputurebutton.Visibility = Visibility.Visible;
            }
            //MessageDialog messageDialog = new MessageDialog(isSupport.ToString());
            //await messageDialog.ShowAsync();
        }
コード例 #20
0
ファイル: MainPage.xaml.cs プロジェクト: arxalex/VidCapApp
        public void OnInitialization()
        {
            //LND
            lnd = new LEDandDisplay(50, 30, 16, "COM6", 1650, 1050, false, 50);
            Update();

            //WriteSittings();
            ReadSittings();
            ShowSittings();
            //rec
            _canvasDevice = new CanvasDevice();
            _compositionGraphicsDevice = CanvasComposition.CreateCompositionGraphicsDevice(
                Window.Current.Compositor,
                _canvasDevice);

            //framerate
            dt = DateTime.Now;

            //Arduino
            ArduinoInit();

            //background
            bgndSession        = new ExtendedExecutionSession();
            bgndSession.Reason = ExtendedExecutionReason.Unspecified;

            //drawing
            _compositor = Window.Current.Compositor;

            _surface = _compositionGraphicsDevice.CreateDrawingSurface(
                new Size(100, 100),
                DirectXPixelFormat.B8G8R8A8UIntNormalized,
                DirectXAlphaMode.Premultiplied);
            var visual = _compositor.CreateSpriteVisual();

            visual.RelativeSizeAdjustment = Vector2.One;
            var brush = _compositor.CreateSurfaceBrush(_surface);

            brush.HorizontalAlignmentRatio = 0.5f;
            brush.VerticalAlignmentRatio   = 0.5f;
            brush.Stretch = CompositionStretch.Uniform;
            visual.Brush  = brush;
            ElementCompositionPreview.SetElementChildVisual(this, visual);
            if (!GraphicsCaptureSession.IsSupported())
            {
                CaptureButton.Visibility = Visibility.Collapsed;
            }
            //autostart
            if (lnd.autostart == true)
            {
                _item = lnd.item;
                //StartCaptureInternal(lnd.item);
                StartCaptureAsync();
            }
        }
コード例 #21
0
        public void Dispose()
        {
            _session?.Dispose();
            _framePool?.Dispose();
            _swapChain?.Dispose();

            _swapChain = null;
            _framePool = null;
            _session   = null;
            _item      = null;
        }
コード例 #22
0
ファイル: CallsService.cs プロジェクト: UnigramDev/Unigram
        public async Task <VoipVideoCapture> ToggleCapturingAsync(VoipCaptureType type)
        {
            void Disable()
            {
                if (_capturer != null)
                {
                    _capturer.SetOutput(null);
                    _manager.SetVideoCapture(null);

                    _capturer.Dispose();
                    _capturer = null;
                }
            }

            if (type == VoipCaptureType.None)
            {
                Disable();
            }
            else if (type == VoipCaptureType.Video && _capturer is not VoipVideoCapture)
            {
                Disable();

                if (_manager == null)
                {
                    return(null);
                }

                _capturer = new VoipVideoCapture(await _videoWatcher.GetAndUpdateAsync());
                _manager?.SetVideoCapture(_capturer);
            }
            else if (type == VoipCaptureType.Screencast && _capturer is not VoipScreenCapture)
            {
                Disable();

                if (_manager == null || !GraphicsCaptureSession.IsSupported())
                {
                    return(null);
                }

                var picker = new GraphicsCapturePicker();
                var item   = await picker.PickSingleItemAsync();

                if (item == null || _manager == null)
                {
                    return(null);
                }

                _capturer = new VoipScreenCapture(item);
                _manager?.SetVideoCapture(_capturer);
            }

            return(_capturer);
        }
コード例 #23
0
        public CaptureEngine(IDirect3DDevice device, GraphicsCaptureItem item)
        {
            _device    = device;
            _d3dDevice = Direct3D11Helper.CreateSharpDXDevice(_device);

            _framePool = Direct3D11CaptureFramePool.Create(
                _device, DirectXPixelFormat.B8G8R8A8UIntNormalized, 2, item.Size);
            _session  = _framePool.CreateCaptureSession(item);
            _lastSize = item.Size;

            _framePool.FrameArrived += OnFrameArrived;
        }
コード例 #24
0
        public void Run()
        {
            _compositor = new Compositor();
            _target     = _compositor.CreateTargetForCurrentView();
            _root       = _compositor.CreateSpriteVisual();
            _content    = _compositor.CreateSpriteVisual();
            _brush      = _compositor.CreateSurfaceBrush();

            _root.Brush = _compositor.CreateColorBrush(Colors.White);
            _root.RelativeSizeAdjustment = Vector2.One;
            _target.Root = _root;

            if (GraphicsCaptureSession.IsSupported())
            {
                _content.AnchorPoint = new Vector2(0.5f, 0.5f);
                _content.RelativeOffsetAdjustment = new Vector3(0.5f, 0.5f, 0);
                _content.RelativeSizeAdjustment   = Vector2.One;
                _content.Size  = new Vector2(-80, -80);
                _content.Brush = _brush;
                _brush.HorizontalAlignmentRatio = 0.5f;
                _brush.VerticalAlignmentRatio   = 0.5f;
                _brush.Stretch = CompositionStretch.Uniform;
                var shadow = _compositor.CreateDropShadow();
                shadow.Mask     = _brush;
                _content.Shadow = shadow;
                _root.Children.InsertAtTop(_content);

                _device = new CanvasDevice();

                // We can't just call the picker here, because no one is pumping messages yet.
                // By asking the dispatcher for our UI thread to run this, we ensure that the
                // message pump is pumping messages by the time this runs.
                var ignored = _window.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                {
                    var ignoredTask = StartCaptureAsync();
                });

                _doubleTapHelper = new DoubleTapHelper(_window);
                _doubleTapHelper.DoubleTapped += OnDoubleTapped;
            }
            else
            {
                var ignored = _window.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() =>
                {
                    var dialog = new MessageDialog("Screen capture is not supported on this device for this release of Windows!");

                    await dialog.ShowAsync();
                });
            }

            _window.Activate();
            _window.Dispatcher.ProcessEvents(CoreProcessEventsOption.ProcessUntilQuit);
        }
コード例 #25
0
        public MainPage()
        {
            this.InitializeComponent();
            if (!GraphicsCaptureSession.IsSupported())
            {
                // Hide the capture UI if screen capture is not supported.
                CaptureBtn.Visibility = Visibility.Collapsed;
            }


            Setup();
        }
コード例 #26
0
ファイル: CaptureFrameWait.cs プロジェクト: zzcandor/ailab
 private void InitializeCapture()
 {
     this.initializationThread = Thread.CurrentThread.ManagedThreadId;
     this.CurrentItem.Closed  += this.OnClosed;
     this.framePool            = Direct3D11CaptureFramePool.CreateFreeThreaded(
         this.device,
         DirectXPixelFormat.B8G8R8A8UIntNormalized,
         1,
         this.CurrentItem.Size);
     this.framePool.FrameArrived += this.OnFrameArrived;
     this.session = this.framePool.CreateCaptureSession(this.CurrentItem);
     this.session.StartCapture();
 }
コード例 #27
0
        public void cleanUpCam()
        {
            session?.Dispose();
            framePool?.Dispose();
            currentFrame?.Dispose();
            canvasDevice?.Dispose();

            session      = null;
            framePool    = null;
            currentFrame = null;

            canvasDevice = null;
            //gcitem = null; //cannot set to null until app has finished encoding
        }
コード例 #28
0
        private async void StartCaptureInternal(GraphicsCaptureItem item)
        {
            // Stop the previous capture if we had one.
            StopCapture();

            var  scale  = DisplayInformation.GetForCurrentView().RawPixelsPerViewPixel;
            var  height = item.Size.Height - 32;
            bool result = Windows.UI.ViewManagement.ApplicationView.GetForCurrentView().TryResizeView(new Size {
                Width = item.Size.Width / scale, Height = height / scale
            });

            if (!result)
            {
                bool result_full = Windows.UI.ViewManagement.ApplicationView.GetForCurrentView().TryEnterFullScreenMode();
            }

            _item     = item;
            _lastSize = _item.Size;

            _framePool = Direct3D11CaptureFramePool.Create(
                _canvasDevice,                             // D3D device
                DirectXPixelFormat.B8G8R8A8UIntNormalized, // Pixel format
                2,                                         // Number of frames
                _item.Size);                               // Size of the buffers

            _framePool.FrameArrived += (s, a) =>
            {
                // The FrameArrived event is raised for every frame on the thread
                // that created the Direct3D11CaptureFramePool. This means we
                // don't have to do a null-check here, as we know we're the only
                // one dequeueing frames in our application.

                // NOTE: Disposing the frame retires it and returns
                // the buffer to the pool.

                using (var frame = _framePool.TryGetNextFrame())
                {
                    ProcessFrame(frame);
                }
            };

            _item.Closed += (s, a) =>
            {
                StopCapture();
            };

            _session = _framePool.CreateCaptureSession(_item);
            _session.StartCapture();
            await SaveRecordingAsync("a.mp4", 5000);
        }
コード例 #29
0
 public void StopCapture()
 {
     foreach (var clip in ClipImages.Values)
     {
         clip.Stop();
     }
     renderTarget?.Dispose();
     _session?.Dispose();
     _framePool?.Dispose();
     _item        = null;
     _session     = null;
     _framePool   = null;
     renderTarget = null;
 }
コード例 #30
0
        public void cleanUpCam()
        {
            session?.Dispose();
            framePool?.Dispose();
            currentFrame?.Dispose();
            canvasDevice?.Dispose();

            session      = null;
            framePool    = null;
            currentFrame = null;

            canvasDevice = null;
            gcitem       = null;
        }