Example #1
0
 void VideoSubType()
 {
     // <SnippetVideoPropertiesSubType>
     VideoEncodingProperties videoProps =
         VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, 720, 480);
     // </SnippetVideoPropertiesSubType>
 }
        private MediaStreamSource CreateI420VideoStreamSource(
            uint width, uint height, int framerate)
        {
            if (width == 0)
            {
                throw new ArgumentException("Invalid zero width for video.", "width");
            }
            if (height == 0)
            {
                throw new ArgumentException("Invalid zero height for video.", "height");
            }
            // Note: IYUV and I420 have same memory layout (though different FOURCC)
            // https://docs.microsoft.com/en-us/windows/desktop/medfound/video-subtype-guids
            var videoProperties = VideoEncodingProperties.CreateUncompressed(
                MediaEncodingSubtypes.Iyuv, width, height);
            var videoStreamDesc = new VideoStreamDescriptor(videoProperties);

            videoStreamDesc.EncodingProperties.FrameRate.Numerator   = (uint)framerate;
            videoStreamDesc.EncodingProperties.FrameRate.Denominator = 1;
            // Bitrate in bits per second : framerate * frame pixel size * I420=12bpp
            videoStreamDesc.EncodingProperties.Bitrate = ((uint)framerate * width * height * 12);
            var videoStreamSource = new MediaStreamSource(videoStreamDesc);

            videoStreamSource.BufferTime       = TimeSpan.Zero;
            videoStreamSource.SampleRequested += OnMediaStreamSourceRequested;
            videoStreamSource.IsLive           = true;  // Enables optimizations for live sources
            videoStreamSource.CanSeek          = false; // Cannot seek live WebRTC video stream
            return(videoStreamSource);
        }
Example #3
0
        private void CreateMediaObjects()
        {
            // Create our encoding profile based on the size of the item
            // TODO: This only really makes sense for monitors, we need
            //       to change this to make sense in all cases.
            int width  = _captureItem.Size.Width;
            int height = _captureItem.Size.Height;

            // Describe our input: uncompressed BGRA8 buffers comming in at the monitor's refresh rate
            // TODO: We pick 60Hz here because it applies to most monitors. However this should be
            //       more robust.
            var videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, (uint)width, (uint)height);

            _videoDescriptor = new VideoStreamDescriptor(videoProperties);
            _videoDescriptor.EncodingProperties.FrameRate.Numerator   = c_frameRateN;
            _videoDescriptor.EncodingProperties.FrameRate.Denominator = c_frameRateD;
            _videoDescriptor.EncodingProperties.Bitrate = (uint)(c_frameRateN * c_frameRateD * width * height * 4);

            // Create our MediaStreamSource
            _mediaStreamSource                  = new MediaStreamSource(_videoDescriptor);
            _mediaStreamSource.BufferTime       = TimeSpan.FromSeconds(0);
            _mediaStreamSource.Starting        += OnMediaStreamSourceStarting;
            _mediaStreamSource.SampleRequested += OnMediaStreamSourceSampleRequested;

            // Create our device manager
            _mediaGraphicsDevice = MediaGraphicsDevice.CreateFromMediaStreamSource(_mediaStreamSource);
            _mediaGraphicsDevice.RenderingDevice = _device;

            // Create our transcoder
            _transcoder = new MediaTranscoder();
            _transcoder.HardwareAccelerationEnabled = true;
        }
        /// <summary>
        /// Create a new 30-fps NV12-encoded video source for the specified video size.
        /// </summary>
        /// <param name="width">The width of the video in pixels.</param>
        /// <param name="height">The height of the video in pixels.</param>
        /// <returns>The newly created video source.</returns>
        private MediaStreamSource CreateVideoStreamSource(uint width, uint height, uint framerate)
        {
            if (width == 0)
            {
                throw new ArgumentException("Invalid zero width for video stream source.", "width");
            }
            if (height == 0)
            {
                throw new ArgumentException("Invalid zero height for video stream source.", "height");
            }

            // Note: IYUV and I420 have same memory layout (though different FOURCC)
            // https://docs.microsoft.com/en-us/windows/desktop/medfound/video-subtype-guids
            var videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Iyuv, width, height);
            var videoStreamDesc = new VideoStreamDescriptor(videoProperties);

            videoStreamDesc.EncodingProperties.FrameRate.Numerator   = framerate;
            videoStreamDesc.EncodingProperties.FrameRate.Denominator = 1;
            videoStreamDesc.EncodingProperties.Bitrate = (framerate * width * height * 12); // NV12=12bpp
            var videoStreamSource = new MediaStreamSource(videoStreamDesc);

            videoStreamSource.BufferTime       = TimeSpan.Zero; // TODO : playback breaks if buffering, need to investigate
            videoStreamSource.Starting        += OnMediaStreamSourceStarting;
            videoStreamSource.Closed          += OnMediaStreamSourceClosed;
            videoStreamSource.Paused          += OnMediaStreamSourcePaused;
            videoStreamSource.SampleRequested += OnMediaStreamSourceRequested;
            videoStreamSource.IsLive           = true;  // Enables optimizations for live sources
            videoStreamSource.CanSeek          = false; // Cannot seek live WebRTC video stream
            return(videoStreamSource);
        }
        //Method to build connection to video-device and connecting to video
        private MediaStreamSource CreateI420VideoStreamSource(uint width, uint height, int framerate)
        {
            if (width == 0)
            {
                throw new ArgumentException("Invalid zero width for video", "width");
            }
            if (height == 0)
            {
                throw new ArgumentException("Invalid zero height for video", "height");
            }

            VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Iyuv, width, height);
            VideoStreamDescriptor   videoStreamDesc = new VideoStreamDescriptor(videoProperties);

            videoStreamDesc.EncodingProperties.FrameRate.Numerator   = (uint)framerate;
            videoStreamDesc.EncodingProperties.FrameRate.Denominator = 1;
            // Bitrate in bits per second : framerate * frame pixel size * I420=12bpp
            videoStreamDesc.EncodingProperties.Bitrate = ((uint)framerate * width * height * 12);

            MediaStreamSource videoStreamSource = new MediaStreamSource(videoStreamDesc)
            {
                BufferTime = TimeSpan.Zero,
                // Enables optimizations for live sources
                IsLive = true,
                // Cannot seek live WebRTC video stream
                CanSeek = false
            };

            //Event called by request for new frame (?)
            videoStreamSource.SampleRequested += OnMediaStreamSourceRequested;

            return(videoStreamSource);
        }
Example #6
0
        void InitializeMediaPlayer()
        {
            int iWidth  = (int)Window.Current.Bounds.Width;
            int iHeight = (int)Window.Current.Bounds.Height;

            // Even frame size with a 16:9 ratio
            iWidth  = Math.Min(iWidth, ((iHeight * 16 / 9) >> 1) * 2);
            iHeight = Math.Min(iHeight, ((iWidth * 9 / 16) >> 1) * 2);

            VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, (uint)iWidth, (uint)iHeight);

            _videoDesc = new VideoStreamDescriptor(videoProperties);
            _videoDesc.EncodingProperties.FrameRate.Numerator   = c_frameRateN;
            _videoDesc.EncodingProperties.FrameRate.Denominator = c_frameRateD;
            _videoDesc.EncodingProperties.Bitrate = (uint)(c_frameRateN * c_frameRateD * iWidth * iHeight * 4);

            _mss = new Windows.Media.Core.MediaStreamSource(_videoDesc);
            TimeSpan spanBuffer = new TimeSpan(0, 0, 0, 0, 250);

            _mss.BufferTime       = spanBuffer;
            _mss.Starting        += _mss_Starting;
            _mss.SampleRequested += _mss_SampleRequested;

            _sampleGenerator = new DXSurfaceGenerator.SampleGenerator();

            mediaPlayer.AutoPlay             = false;
            mediaPlayer.CurrentStateChanged += mediaPlayer_CurrentStateChanged;
            mediaPlayer.SetMediaStreamSource(_mss);
            _hasSetMediaSource = true;
        }
        async void Init()
        {
            uint iWidth  = 1280;
            uint iHeight = 638;

            VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Nv12, iWidth, iHeight);
            VideoStreamDescriptor   videoDesc       = new VideoStreamDescriptor(videoProperties);

            videoDesc.EncodingProperties.FrameRate.Numerator   = c_frameRateN;
            videoDesc.EncodingProperties.FrameRate.Denominator = c_frameRateD;
            videoDesc.EncodingProperties.Bitrate = (uint)(c_frameRateN * c_frameRateD * iWidth * iHeight * 4);

            videoDesc.EncodingProperties.Width  = iWidth;
            videoDesc.EncodingProperties.Height = iHeight;

            var      mss        = new MediaStreamSource(videoDesc);
            TimeSpan spanBuffer = new TimeSpan(0, 0, 0, 0, 250);

            mss.BufferTime       = spanBuffer;
            mss.Starting        += mss_Starting;
            mss.SampleRequested += mss_SampleRequested;

            MediaElement.SetMediaStreamSource(mss);
            MediaElement.Play();
        }
        public async Task InitStartTranscoder()
        {
            if (parent != null)
            {
                parent.StartWritingOutput("Initialize Transcoder", 1);
            }


            tempFile = await GetTempOutputFile();

            if (parent != null)
            {
                parent.StartWritingOutputExtended("Temporary Output : " + tempFile.Path, 0);
            }

            IRandomAccessStream destStream = await tempFile.OpenAsync(FileAccessMode.ReadWrite);

            int width  = 320;
            int height = 200;

            if (gcitem != null)
            {
                width  = gcitem.Size.Width;
                height = gcitem.Size.Height;
            }

            frameCounter = 0;
            Timestamp    = TimeSpan.Zero;

            VideoEncodingProperties videoSourceProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Rgb32, (uint)width, (uint)height);
            VideoStreamDescriptor   videoSourceDescriptor = new VideoStreamDescriptor(videoSourceProperties);

            MediaStreamSource mediaStreamSource = new MediaStreamSource(videoSourceDescriptor);

            mediaStreamSource.BufferTime       = TimeSpan.FromSeconds(0);
            mediaStreamSource.Starting        += OnMSSStarting;
            mediaStreamSource.SampleRequested += OnMSSSampleRequested;
            mediaStreamSource.SampleRendered  += OnMSSSampleRendered;
            //mediaStreamSource.CanSeek = false;

            MediaTranscoder mediaTranscoder = new MediaTranscoder();

            mediaTranscoder.HardwareAccelerationEnabled = true;

            ////////////////////
            //Start Transcoding
            MediaEncodingProfile   destProfile        = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.HD720p);
            PrepareTranscodeResult transcodeOperation = await mediaTranscoder.PrepareMediaStreamSourceTranscodeAsync(mediaStreamSource, destStream, destProfile);

            //await transcode.TranscodeAsync();
            var rendering = transcodeOperation.TranscodeAsync();

            rendering.Progress  += progressHandler;
            rendering.Completed += completedHandler;
        }
Example #9
0
        public async Task StartAsync()
        {
            var profile = new MediaEncodingProfile
            {
                Audio     = null,
                Video     = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Rgb32, m_width, m_height),
                Container = null
            };

            await m_capture.StartPreviewToCustomSinkAsync(profile, (IMediaExtension)m_preview.MediaSink);
        }
Example #10
0
        public void CS_WP_N_Basic()
        {
            ExecuteOnUIThread(async() =>
            {
                var capture = new MediaCapture();
                await capture.InitializeAsync(new MediaCaptureInitializationSettings
                {
                    StreamingCaptureMode = StreamingCaptureMode.Video
                });

                var graphicsDevice = MediaGraphicsDevice.CreateFromMediaCapture(capture);

                var previewProps = (VideoEncodingProperties)capture.VideoDeviceController.GetMediaStreamProperties(MediaStreamType.VideoPreview);

                var image = new SurfaceImageSource((int)previewProps.Width, (int)previewProps.Height);

                var imagePresenter = ImagePresenter.CreateFromSurfaceImageSource(
                    image,
                    graphicsDevice,
                    (int)previewProps.Width,
                    (int)previewProps.Height
                    );

                var panel = new SwapChainPanel();
                var swapChainPresenter = ImagePresenter.CreateFromSwapChainPanel(
                    panel,
                    graphicsDevice,
                    (int)previewProps.Width,
                    (int)previewProps.Height
                    );

                var readerProps = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, previewProps.Width, previewProps.Height);
                readerProps.FrameRate.Numerator   = previewProps.FrameRate.Numerator;
                readerProps.FrameRate.Denominator = previewProps.FrameRate.Denominator;

                var captureReader = await CaptureReader.CreateAsync(
                    capture, new MediaEncodingProfile
                {
                    Video = readerProps
                });

                using (MediaSample2D sample = (MediaSample2D)await captureReader.GetVideoSampleAsync())
                {
                    swapChainPresenter.Present(sample);
                    imagePresenter.Present(sample);

                    var folder = await KnownFolders.PicturesLibrary.CreateFolderAsync("MediaCaptureReaderTests", CreationCollisionOption.OpenIfExists);
                    var file   = await folder.CreateFileAsync("CS_WP_N_Basic.jpg", CreationCollisionOption.ReplaceExisting);
                    await sample.SaveToFileAsync(file, ImageCompression.Jpeg);
                }
            });
        }
Example #11
0
        private static VideoStreamDescriptor GetStreamDescriptor(uint width, uint height)
        {
            var videoEncoding = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8,
                                                                           width, height);

            var videoStreamDescriptor = new VideoStreamDescriptor(videoEncoding)
            {
                Name  = "Desktop video stream",
                Label = "Desktop video stream"
            };

            return(videoStreamDescriptor);
        }
Example #12
0
        private void CreateMediaObjects()
        {
            var videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, 1920, 1080);

            _videoDescriptor = new VideoStreamDescriptor(videoProperties);

            _mediaStreamSource                  = new MediaStreamSource(_videoDescriptor);
            _mediaStreamSource.BufferTime       = TimeSpan.FromSeconds(0);
            _mediaStreamSource.Starting        += OnMediaStreamSourceStarting;
            _mediaStreamSource.SampleRequested += OnMediaStreamSourceSampleRequested;

            _transcoder = new MediaTranscoder();
            _transcoder.HardwareAccelerationEnabled = IsHardwareAcc;
        }
        void InitializeMediaPlayer()
        {
            m_hasSetMediaSource = false;

            if (CurrentEffect == null || CurrentVideo == null)
            {
                return;
            }

            // Initialize Transition
            SecondVideo = VideoList.IndexOf(CurrentVideo) == VideoList.Count - 1 ? VideoList[0] : VideoList[VideoList.IndexOf(CurrentVideo) + 1];

            ThirdVideo = VideoList.IndexOf(SecondVideo) == VideoList.Count - 1 ? VideoList[0] : VideoList[VideoList.IndexOf(SecondVideo) + 1];

            advanced_media_source.ResetTimeline();
            advanced_media_source.AddVideo(CurrentVideo);
            advanced_media_source.AddTransitionEffect(CurrentEffect.EffectType, 1);
            advanced_media_source.AddVideo(SecondVideo);
            advanced_media_source.AddTransitionEffect(CurrentEffect.EffectType, 1);
            advanced_media_source.AddVideo(ThirdVideo);

            // Initialize MediaStreamSource
            VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, c_frameWidth, c_frameHeight);

            videoDesc = new VideoStreamDescriptor(videoProperties);
            videoDesc.EncodingProperties.FrameRate.Numerator   = c_frameRateN;
            videoDesc.EncodingProperties.FrameRate.Denominator = c_frameRateD;
            videoDesc.EncodingProperties.Bitrate = (uint)(c_frameRateN * c_frameRateD * c_frameWidth * c_frameHeight * 4);

            AudioEncodingProperties audioProperties = AudioEncodingProperties.CreatePcm(c_sampleRate, c_channelCount, c_bitsPerSample);

            audioDesc = new AudioStreamDescriptor(audioProperties);

            media_stream_source = new Windows.Media.Core.MediaStreamSource(videoDesc, audioDesc);

            TimeSpan spanBuffer = new TimeSpan(0, 0, 0, 0, 0);

            media_stream_source.BufferTime       = spanBuffer;
            media_stream_source.Starting        += MSS_Starting;
            media_stream_source.Closed          += MSS_Closed;
            media_stream_source.SampleRequested += MSS_SampleRequested;

            Video.SetMediaStreamSource(media_stream_source);
            m_hasSetMediaSource = true;
        }
Example #14
0
        private void CreateMediaObjects()
        {
            // Create our encoding profile based on the size of the item
            int width  = _captureItem.Size.Width;
            int height = _captureItem.Size.Height;

            // Describe our input: uncompressed BGRA8 buffers
            var videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, (uint)width, (uint)height);

            _videoDescriptor = new VideoStreamDescriptor(videoProperties);

            // Create our MediaStreamSource
            _mediaStreamSource                  = new MediaStreamSource(_videoDescriptor);
            _mediaStreamSource.BufferTime       = TimeSpan.FromSeconds(0);
            _mediaStreamSource.Starting        += OnMediaStreamSourceStarting;
            _mediaStreamSource.SampleRequested += OnMediaStreamSourceSampleRequested;

            // Create our transcoder
            _transcoder = new MediaTranscoder();
            _transcoder.HardwareAccelerationEnabled = true;
        }
        public MainPage()
        {
            this.InitializeComponent();

            var videoProperties = VideoEncodingProperties.CreateH264();//.CreateUncompressed(MediaEncodingSubtypes.H264, 720, 480);
            var vd = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.H264, 720, 480);

            videoDesc = new VideoStreamDescriptor(videoProperties);
            videoDesc.EncodingProperties.FrameRate.Numerator   = 29970;
            videoDesc.EncodingProperties.FrameRate.Denominator = 1000;
            videoDesc.EncodingProperties.Width  = 720;
            videoDesc.EncodingProperties.Height = 480;

            mss         = new MediaStreamSource(videoDesc);
            mss.CanSeek = false;
            //mss.BufferTime = new TimeSpan(0, 0, 0, 0, 250);
            mss.Starting        += mss_Starting;
            mss.SampleRequested += Mss_SampleRequested;
            mss.SampleRendered  += Mss_SampleRendered;

            //initialize some buffers
            buff    = new Windows.Storage.Streams.Buffer(1024 * 4);
            bStream = buff.AsStream();

            //this seems needed for start-up
            threadSync = new System.Threading.AutoResetEvent(false);

            //get the frame time in ms
            double ms = 1000.0 * videoDesc.EncodingProperties.FrameRate.Denominator / videoDesc.EncodingProperties.FrameRate.Numerator;

            //get the frame time in ticks
            T0 = System.TimeSpan.FromTicks((long)(ms * System.TimeSpan.TicksPerMillisecond));

            //our demuxer
            extractor = new MpegTS.BufferExtractor();
            running   = true;

            //give the file IO a head start
            Task.Run(() => RunreadFromFile());
        }
        public void InitialiseRovingSquareSampleMedia()
        {
            VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, (uint)WIDTH, (uint)HEIGHT);

            _videoDesc = new VideoStreamDescriptor(videoProperties);
            _videoDesc.EncodingProperties.FrameRate.Numerator   = FRAME_RATE;
            _videoDesc.EncodingProperties.FrameRate.Denominator = 1;
            _videoDesc.EncodingProperties.Bitrate = (uint)(1 * FRAME_RATE * WIDTH * HEIGHT * 4);

            _mss = new Windows.Media.Core.MediaStreamSource(_videoDesc);
            TimeSpan spanBuffer = new TimeSpan(0);

            _mss.BufferTime       = spanBuffer;
            _mss.Starting        += mss_Starting;
            _mss.SampleRequested += mss_SampleRequested;

            _sampleMaker = new SurfaceGenerator.SampleMaker();

            _remoteVideo.MediaFailed += _remoteVideo_MediaFailed;
            _remoteVideo.SetMediaStreamSource(_mss);
            _remoteVideo.Play();
        }
Example #17
0
        /// <summary>
        /// Initialize the media element for playback
        /// </summary>
        /// <param name="streamConfig">Object containing stream configuration details</param>
        void InitializeMediaPlayer(LimelightStreamConfiguration streamConfig, AvStreamSource streamSource)
        {
            this._streamSource = streamSource;

            AudioEncodingProperties audioProperties = AudioEncodingProperties.CreatePcm(48000, 2, 16);

            VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.H264Es,
                                                                                                 (uint)streamConfig.GetWidth(), (uint)streamConfig.GetHeight());

            videoProperties.ProfileId = H264ProfileIds.High;

            _videoDesc = new VideoStreamDescriptor(videoProperties);
            _audioDesc = new AudioStreamDescriptor(audioProperties);

            _mss                  = new MediaStreamSource(_videoDesc, _audioDesc);
            _mss.BufferTime       = TimeSpan.Zero;
            _mss.CanSeek          = false;
            _mss.Duration         = TimeSpan.Zero;
            _mss.SampleRequested += _mss_SampleRequested;

            // Set for low latency playback
            StreamDisplay.RealTimePlayback = true;

            // Set the audio category to take advantage of hardware audio offload
            StreamDisplay.AudioCategory = AudioCategory.ForegroundOnlyMedia;

            // Render on the full window to avoid extra compositing
            StreamDisplay.IsFullWindow = true;

            // Disable built-in transport controls
            StreamDisplay.AreTransportControlsEnabled = false;

            // Start playing right away
            StreamDisplay.AutoPlay = true;

            StreamDisplay.SetMediaStreamSource(_mss);
        }
Example #18
0
        private async void CreateFile()
        {
            var videoProps      = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, 1024, 768);
            var videoDescriptor = new VideoStreamDescriptor(videoProps);

            //videoDescriptor.EncodingProperties.FrameRate.Numerator = frn;
            //videoDescriptor.EncodingProperties.FrameRate.Denominator = frd;
            //videoDescriptor.EncodingProperties.Bitrate = (frn / frd) * w * h * 4 * 8;
            var streamSource = new MediaStreamSource(videoDescriptor);


            var tc         = new MediaTranscoder();
            var prof       = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.HD720p);
            var tempFolder = ApplicationData.Current.TemporaryFolder;
            var file       = await tempFolder.CreateFileAsync("out2.mp4", CreationCollisionOption.ReplaceExisting);

            var outputStream = await file.OpenAsync(FileAccessMode.ReadWrite);

            try
            {
                var result = await tc.PrepareMediaStreamSourceTranscodeAsync(streamSource, outputStream, prof);

                if (result.CanTranscode)
                {
                    //Debug.Print($"encoding");
                    var op = result.TranscodeAsync();
                    //op.Progress +=
                    //    new AsyncActionProgressHandler<double>(TranscodeProgress);
                    //op.Completed +=
                    //    new AsyncActionWithProgressCompletedHandler<double>(TranscodeComplete);
                    //Debug.WriteLine($"encoded");
                }
            }
            catch (Exception)
            {
            }
        }
Example #19
0
        public CustomMediaStreamSource()
        {
            int iWidth  = (int)Window.Current.Bounds.Width;
            int iHeight = (int)Window.Current.Bounds.Height;

            // Even frame size with a 16:9 ratio
            iWidth  = Math.Min(iWidth, ((iHeight * 16 / 9) >> 1) * 2);
            iHeight = Math.Min(iHeight, ((iWidth * 9 / 16) >> 1) * 2);

            VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, (uint)iWidth, (uint)iHeight);

            _videoDesc = new VideoStreamDescriptor(videoProperties);
            _videoDesc.EncodingProperties.FrameRate.Numerator   = c_frameRateN;
            _videoDesc.EncodingProperties.FrameRate.Denominator = c_frameRateD;
            _videoDesc.EncodingProperties.Bitrate = (uint)(c_frameRateN * c_frameRateD * iWidth * iHeight * 4);

            MediaStreamSource source = new MediaStreamSource(_videoDesc);

            TimeSpan spanBuffer = new TimeSpan(0, 0, 0, 0, 250);

            source.BufferTime       = spanBuffer;
            source.Starting        += Starting;
            source.SampleRequested += SampleRequested;
        }
        private ScreenRecorder(IDirect3DDevice device, ScreenRecorderOptions options)
        {
            var captureItem = Monitor.CreateCaptureItem(options.MonitorDeviceName);

            m_device    = device;
            m_generator = new MediaSampleGenerator(device, captureItem);

            var width  = captureItem.Size.Width;
            var height = captureItem.Size.Height;

            var videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, (uint)width, (uint)height);

            Profile    = MediaEncodingProfile.CreateMp4(options.Quality);
            Transcoder = new MediaTranscoder
            {
                HardwareAccelerationEnabled = options.HardwareAccelerationEnabled
            };
            Source = new MediaStreamSource(new VideoStreamDescriptor(videoProperties))
            {
                IsLive = true
            };
            Source.Starting        += OnStarting;
            Source.SampleRequested += OnSampleRequested;
        }
        private async Task SetupEncoding()
        {
            if (!GraphicsCaptureSession.IsSupported())
            {
                // Show message to user that screen capture is unsupported
                return;
            }

            // Create the D3D device and SharpDX device
            if (_device == null)
            {
                _device = Direct3D11Helpers.CreateD3DDevice();
            }
            if (_sharpDxD3dDevice == null)
            {
                _sharpDxD3dDevice = Direct3D11Helpers.CreateSharpDXDevice(_device);
            }



            try
            {
                // Let the user pick an item to capture
                var picker = new GraphicsCapturePicker();
                _captureItem = await picker.PickSingleItemAsync();

                if (_captureItem == null)
                {
                    return;
                }

                // Initialize a blank texture and render target view for copying frames, using the same size as the capture item
                _composeTexture          = Direct3D11Helpers.InitializeComposeTexture(_sharpDxD3dDevice, _captureItem.Size);
                _composeRenderTargetView = new SharpDX.Direct3D11.RenderTargetView(_sharpDxD3dDevice, _composeTexture);

                // This example encodes video using the item's actual size.
                var width  = (uint)_captureItem.Size.Width;
                var height = (uint)_captureItem.Size.Height;

                // Make sure the dimensions are are even. Required by some encoders.
                width  = (width % 2 == 0) ? width : width + 1;
                height = (height % 2 == 0) ? height : height + 1;


                var  temp      = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.HD1080p);
                var  bitrate   = temp.Video.Bitrate;
                uint framerate = 30;

                _encodingProfile = new MediaEncodingProfile();
                _encodingProfile.Container.Subtype                  = "MPEG4";
                _encodingProfile.Video.Subtype                      = "H264";
                _encodingProfile.Video.Width                        = width;
                _encodingProfile.Video.Height                       = height;
                _encodingProfile.Video.Bitrate                      = bitrate;
                _encodingProfile.Video.FrameRate.Numerator          = framerate;
                _encodingProfile.Video.FrameRate.Denominator        = 1;
                _encodingProfile.Video.PixelAspectRatio.Numerator   = 1;
                _encodingProfile.Video.PixelAspectRatio.Denominator = 1;

                var videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, width, height);
                _videoDescriptor = new VideoStreamDescriptor(videoProperties);

                // Create our MediaStreamSource
                _mediaStreamSource                  = new MediaStreamSource(_videoDescriptor);
                _mediaStreamSource.BufferTime       = TimeSpan.FromSeconds(0);
                _mediaStreamSource.Starting        += OnMediaStreamSourceStarting;
                _mediaStreamSource.SampleRequested += OnMediaStreamSourceSampleRequested;

                // Create our transcoder
                _transcoder = new MediaTranscoder();
                _transcoder.HardwareAccelerationEnabled = true;

                using (var stream = new InMemoryRandomAccessStream())
                    await EncodeAsync(stream);
            }
            catch (Exception ex)
            {
                return;
            }
        }