/// <summary>
        /// Initialize the media element for playback
        /// </summary>
        /// <param name="streamConfig">Object containing stream configuration details</param>
        void InitializeMediaPlayer(MoonlightStreamConfiguration streamConfig, AvStreamSource streamSource)
        {
            this._streamSource = streamSource;

            _videoMss = new MediaStreamSource(new VideoStreamDescriptor(VideoEncodingProperties.CreateH264()));
            _videoMss.BufferTime = TimeSpan.Zero;
            _videoMss.CanSeek = false;
            _videoMss.Duration = TimeSpan.Zero;
            _videoMss.SampleRequested += _videoMss_SampleRequested;

            XAudio2 xaudio = new XAudio2();
            MasteringVoice masteringVoice = new MasteringVoice(xaudio, 2, 48000);
            WaveFormat format = new WaveFormat(48000, 16, 2);

            // Set for low latency playback
            StreamDisplay.RealTimePlayback = true;

            // Render on the full window to avoid extra compositing
            StreamDisplay.IsFullWindow = true;

            // Disable built-in transport controls
            StreamDisplay.AreTransportControlsEnabled = false;

            // Start playing right away
            StreamDisplay.AutoPlay = true;

            StreamDisplay.SetMediaStreamSource(_videoMss);

            AvStream.SetSourceVoice(new SourceVoice(xaudio, format));
        }
        private void Dispose(bool isDisposing)
        {
            if (isDisposing)
            {
                if (_mediaDecoder != null)
                {
                    _mediaDecoder.Dispose();
                    _mediaDecoder = null;
                }

                _buffersQueue = null;
                _mediaSource = null;
            }
        }
Beispiel #3
0
 /// <summary>
 /// Media stream source sample requested callback
 /// </summary>
 private void _mss_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args)
 {
     // Determine which stream needs a sample
     if (args.Request.StreamDescriptor == _videoDesc)
     {
         // Video
         _streamSource.VideoSampleRequested(args);
     }
     else
     {
         // Audio
         _streamSource.AudioSampleRequested(args);
     }
 }
        private void Initialize(IRandomAccessStream fileStream)
        {
            _mediaDecoder.Initialize(fileStream);
            var streamInfo = _mediaDecoder.GetStreamInfo();

            var encodingProperties = AudioEncodingProperties.CreatePcm(
                streamInfo.SampleRate, streamInfo.ChannelCount, streamInfo.BitsPerSample);

            _mediaSource = new MediaStreamSource(new AudioStreamDescriptor(encodingProperties));
            _mediaSource.Starting += OnMediaSourceStarting;
            _mediaSource.SampleRequested += OnMediaSourceSampleRequested;
            _mediaSource.Closed += OnMediaSourceClosed;

            _mediaSource.Duration = TimeSpan.FromSeconds(streamInfo.Duration);
            _mediaSource.BufferTime = TimeSpan.Zero;
            _mediaSource.CanSeek = true;
        }
        public void PlaySound(int samplingRate, byte[] pcmData)
        {
            AudioEncodingProperties audioProps = AudioEncodingProperties.CreatePcm((uint)samplingRate, 1, 16);
            AudioStreamDescriptor audioDesc = new AudioStreamDescriptor(audioProps);
            MediaStreamSource mss = new MediaStreamSource(audioDesc);
            
            bool samplePlayed = false;
            mss.SampleRequested += (sender, args) =>
            {
                if (samplePlayed)
                    return;

                IBuffer ibuffer = pcmData.AsBuffer();
                MediaStreamSample sample = MediaStreamSample.CreateFromBuffer(ibuffer, TimeSpan.Zero);
                sample.Duration = TimeSpan.FromSeconds(pcmData.Length / 2.0 / samplingRate);
                args.Request.Sample = sample;
                samplePlayed = true;
            };

            mediaElement.SetMediaStreamSource(mss);
        }
Beispiel #6
0
        /// <summary>
        /// Initialize the media element for playback
        /// </summary>
        /// <param name="streamConfig">Object containing stream configuration details</param>
        void InitializeMediaPlayer(LimelightStreamConfiguration streamConfig, AvStreamSource streamSource)
        {
            this._streamSource = streamSource;

            AudioEncodingProperties audioProperties = AudioEncodingProperties.CreatePcm(48000, 2, 16);

            VideoEncodingProperties videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.H264Es,
                (uint)streamConfig.GetWidth(), (uint)streamConfig.GetHeight());
            videoProperties.ProfileId = H264ProfileIds.High;

            _videoDesc = new VideoStreamDescriptor(videoProperties);
            _audioDesc = new AudioStreamDescriptor(audioProperties);

            _mss = new MediaStreamSource(_videoDesc, _audioDesc);
            _mss.BufferTime = TimeSpan.Zero;
            _mss.CanSeek = false;
            _mss.Duration = TimeSpan.Zero;
            _mss.SampleRequested += _mss_SampleRequested;

            // Set for low latency playback
            StreamDisplay.RealTimePlayback = true;

            // Set the audio category to take advantage of hardware audio offload
            StreamDisplay.AudioCategory = AudioCategory.ForegroundOnlyMedia;

            // Render on the full window to avoid extra compositing
            StreamDisplay.IsFullWindow = true;

            // Disable built-in transport controls
            StreamDisplay.AreTransportControlsEnabled = false;

            // Start playing right away
            StreamDisplay.AutoPlay = true;

            StreamDisplay.SetMediaStreamSource(_mss);
        }
Beispiel #7
0
 /// <summary>
 /// Callback from the Media Foundation pipeline when a new video frame is needed.
 /// </summary>
 /// <param name="sender">The stream source requesting a new sample.</param>
 /// <param name="args">The sample request to fullfil.</param>
 private void OnMediaStreamSourceRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args)
 {
     _videoBridge.TryServeVideoFrame(args);
 }
Beispiel #8
0
 private void OnMediaStreamSourceStarting(MediaStreamSource sender, MediaStreamSourceStartingEventArgs args)
 {
     //Debug.Assert(Dispatcher.HasThreadAccess == false);
     Logger.Log("Video playback stream source starting...");
     args.Request.SetActualStartPosition(TimeSpan.Zero);
 }
Beispiel #9
0
        protected virtual void MssOnClosed(MediaStreamSource sender, MediaStreamSourceClosedEventArgs args)
        {
            Debug.WriteLine("StreamSourceBase.MssOnClosed()");

            _closedCancellationTokenSource.Cancel();

            sender.Starting -= MssOnStarting;
            sender.SampleRequested -= MssOnSampleRequested;
            sender.Closed -= MssOnClosed;
        }
Beispiel #10
0
        public async Task <List <StorageFile> > ConvertFiles(List <StorageFile> files, bool story, Size?size, Rect?rectSize)
        {
            try
            {
                IsStoryVideo = story;
                if (story)
                {
                    StopTime = TimeSpan.FromSeconds(14.8);
                }
                else
                {
                    StopTime = new TimeSpan(0, 0, 59);
                }
                QueueList.Clear();
                ConvertedList.Clear();
                foreach (var item in files)
                {
                    if (item.IsVideo())
                    {
                        try
                        {
                            QueueList.Add(item);
                        }
                        catch { }
                    }
                }
                if (QueueList.Any())
                {
                    int          ix   = 1;
                    const string text = "Some of your file(s) needs to be converted first. Please wait...\r\n";
                    foreach (var item in QueueList)
                    {
                        try
                        {
                            if (item.IsVideo())
                            {
                                Output(text + $"{ix} of {QueueList.Count}");
                                IsConverting = true;
                                var vid = await ConvertVideo(item, size, rectSize);

                                ("vid null: " + vid == null).PrintDebug();
                                if (vid != null)
                                {
                                    ConvertedList.Add(vid);
                                }
                            }
                        }
                        catch { }
                        ix++;
                    }
                }
                try
                {
                    Mss       = null;
                    FFmpegMSS = null;
                }
                catch { }
            }
            catch (Exception ex)
            {
                ex.PrintException("ConvertFiles");
            }
            IsConverting = false;
            return(ConvertedList);
        }
Beispiel #11
0
        /// <summary>
        /// Occurs when the MediaStreamSource request a MediaStreamSample for a specified stream.
        /// </summary>
        /// <param name="sender">Represents a media source that delivers media samples directly to the media pipeline.</param>
        /// <param name="args">Provides the data for the SampleRequested event.</param>
        private async void OnStreamSourceSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args)
        {
            MediaStreamSourceSampleRequest request = args.Request;
            // check if the sample requested byte offset is within the file size
            if (this.m_byteOffset + sampleSize <= (ulong)this.m_audioStreamDownloader.TotalBytesToReceive)
            {
                //Calculate the current position within the track
                double ratio = (double)this.m_byteOffset / (double)this.m_audioStreamDownloader.TotalBytesToReceive;
                this.m_playerPosition = new TimeSpan((long)(this.CurrentTrack.Duration.Ticks * ratio));

                MediaStreamSourceSampleRequestDeferral deferal = request.GetDeferral();

                var inputStream = m_mediaStream.GetInputStreamAt(this.m_byteOffset);
                if (inputStream != null)
                {
                    // create the MediaStreamSample and assign to the request object. 
                    // You could also create the MediaStreamSample using createFromBuffer(...)
                    MediaStreamSample sample = await MediaStreamSample.CreateFromStreamAsync(inputStream, sampleSize, m_timeOffset);
                    sample.Duration = sampleDuration;
                    sample.KeyFrame = true;

                    // increment the time and byte offset
                    this.m_byteOffset += sampleSize;
                    this.m_timeOffset = this.m_timeOffset.Add(sampleDuration);
                    request.Sample = sample;
                }
                deferal.Complete();
            }
        }
Beispiel #12
0
 static void OnSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args)
 {
     Debug.WriteLine("NullMediaSource.OnSampleRequested()");
 }
Beispiel #13
0
 private void Paused(MediaStreamSource sender, object args)
 {
     //Debug.WriteLine(args == null);
 }
        private async void LoadVideoFile(object sender, TappedRoutedEventArgs e)
        {
            var picker = new FileOpenPicker();

            picker.ViewMode = PickerViewMode.Thumbnail;
            picker.SuggestedStartLocation = PickerLocationId.VideosLibrary;
            picker.FileTypeFilter.Add(".mp4");
            StorageFile file = await picker.PickSingleFileAsync();

            if (file != null)
            {
                if (mediaPlayer.CanPause == true)
                {
                    try
                    {
                        mediaPlayer.Pause();
                    }
                    catch (Exception exception)
                    {
//                    System.Console.WriteLine(exception);
                        throw;
                    }
                }
                else
                {
                    try
                    {
                        mediaPlayer.Stop();
                    }
                    catch (Exception exception)
                    {
//                        System.Console.WriteLine(exception);
                        throw;
                    }
                }

                IRandomAccessStream stream = await file.OpenAsync(FileAccessMode.Read);

                try
                {
                    FFmpegMSS = FFmpegInteropMSS.CreateFFmpegInteropMSSFromStream(stream, true, true);
                    MediaStreamSource mss = FFmpegMSS.GetMediaStreamSource();

                    if (mss != null)
                    {
                        mediaPlayer.AreTransportControlsEnabled = true;

                        mediaPlayer.TransportControls.IsStopButtonVisible = true;
                        mediaPlayer.TransportControls.IsStopEnabled       = true;

                        mediaPlayer.TransportControls.IsFastForwardButtonVisible = true;
                        mediaPlayer.TransportControls.IsFastForwardEnabled       = true;

                        mediaPlayer.TransportControls.IsFastRewindButtonVisible = true;
                        mediaPlayer.TransportControls.IsFastRewindEnabled       = true;
                        mediaPlayer.SetMediaStreamSource(mss);

                        mediaPlayer.Play();
                        VideoName.Text = "Now playing: " + file.Name;
                    }
                    else
                    {
                        var msg = new MessageDialog("errs");
                        await msg.ShowAsync();
                    }
                }
                catch (Exception exception)
                {
                    //                System.Console.WriteLine(exception);
                    throw;
                }
            }
        }
Beispiel #15
0
 private void OnMediaStreamSourceClosed(MediaStreamSource sender, MediaStreamSourceClosedEventArgs args)
 {
     //Debug.Assert(Dispatcher.HasThreadAccess == false);
     Logger.Log("Video playback stream source closed.");
 }
        private async Task SetupEncoding()
        {
            if (!GraphicsCaptureSession.IsSupported())
            {
                // Show message to user that screen capture is unsupported
                return;
            }

            // Create the D3D device and SharpDX device
            if (_device == null)
            {
                _device = Direct3D11Helpers.CreateD3DDevice();
            }
            if (_sharpDxD3dDevice == null)
            {
                _sharpDxD3dDevice = Direct3D11Helpers.CreateSharpDXDevice(_device);
            }



            try
            {
                // Let the user pick an item to capture
                var picker = new GraphicsCapturePicker();
                _captureItem = await picker.PickSingleItemAsync();

                if (_captureItem == null)
                {
                    return;
                }

                // Initialize a blank texture and render target view for copying frames, using the same size as the capture item
                _composeTexture          = Direct3D11Helpers.InitializeComposeTexture(_sharpDxD3dDevice, _captureItem.Size);
                _composeRenderTargetView = new SharpDX.Direct3D11.RenderTargetView(_sharpDxD3dDevice, _composeTexture);

                // This example encodes video using the item's actual size.
                var width  = (uint)_captureItem.Size.Width;
                var height = (uint)_captureItem.Size.Height;

                // Make sure the dimensions are are even. Required by some encoders.
                width  = (width % 2 == 0) ? width : width + 1;
                height = (height % 2 == 0) ? height : height + 1;


                var  temp      = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.HD1080p);
                var  bitrate   = temp.Video.Bitrate;
                uint framerate = 30;

                _encodingProfile = new MediaEncodingProfile();
                _encodingProfile.Container.Subtype                  = "MPEG4";
                _encodingProfile.Video.Subtype                      = "H264";
                _encodingProfile.Video.Width                        = width;
                _encodingProfile.Video.Height                       = height;
                _encodingProfile.Video.Bitrate                      = bitrate;
                _encodingProfile.Video.FrameRate.Numerator          = framerate;
                _encodingProfile.Video.FrameRate.Denominator        = 1;
                _encodingProfile.Video.PixelAspectRatio.Numerator   = 1;
                _encodingProfile.Video.PixelAspectRatio.Denominator = 1;

                var videoProperties = VideoEncodingProperties.CreateUncompressed(MediaEncodingSubtypes.Bgra8, width, height);
                _videoDescriptor = new VideoStreamDescriptor(videoProperties);

                // Create our MediaStreamSource
                _mediaStreamSource                  = new MediaStreamSource(_videoDescriptor);
                _mediaStreamSource.BufferTime       = TimeSpan.FromSeconds(0);
                _mediaStreamSource.Starting        += OnMediaStreamSourceStarting;
                _mediaStreamSource.SampleRequested += OnMediaStreamSourceSampleRequested;

                // Create our transcoder
                _transcoder = new MediaTranscoder();
                _transcoder.HardwareAccelerationEnabled = true;

                using (var stream = new InMemoryRandomAccessStream())
                    await EncodeAsync(stream);
            }
            catch (Exception ex)
            {
                return;
            }
        }
Beispiel #17
0
        //</SnippetUpdateMediaElementSource>

        //<SnippetOnNavigatedFrom>
        protected override void OnNavigatedFrom(NavigationEventArgs e)
        {
            mediaPlayerElement.Source = null;
            mediaStreamSource         = null;
            base.OnNavigatedFrom(e);
        }
 private void Source_SampleRendered(MediaStreamSource sender, MediaStreamSourceSampleRenderedEventArgs args)
 {
     Debug.WriteLine("Rendered");
 }
		public void Init(string sFile, ulong nFramesQty)
		{
			_cMSS = new MediaStreamSource(sFile, nFramesQty);
			_bPaused = true;
			MediaLoad();
		}
Beispiel #20
0
 private void SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args)
 {
 }
 /// <summary>
 /// Video stream source sample requested callback
 /// </summary>
 private void _videoMss_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args)
 {
     _streamSource.VideoSampleRequested(args);
 }
Beispiel #22
0
 private void Starting(MediaStreamSource sender, MediaStreamSourceStartingEventArgs args)
 {
     throw new NotImplementedException();
 }
Beispiel #23
0
        private void OnDownloadProgessStarted(object sender, EventArgs e)
        {
            // initialize Parsing Variables
            this.m_byteOffset = 0;
            this.m_timeOffset = new TimeSpan(0);

            if (this.m_audioStreamDownloader.TotalBytesToReceive > 0)
            {
                var stream = this.m_audioStreamDownloader.Stream;
                MpegFrame mpegFrame = stream.ReadPastId3V2Tags();

                AudioEncodingProperties audioProps = AudioEncodingProperties.CreateMp3((uint)mpegFrame.SamplingRate, 2, (uint)mpegFrame.Bitrate);
                AudioStreamDescriptor audioDescriptor = new AudioStreamDescriptor(audioProps);

                //close the MediaStreamSource and remove the MediaStreamSource event handlers
                CloseMediaStreamSource(this.m_mediaStreamSource);

                this.m_mediaStreamSource = new Windows.Media.Core.MediaStreamSource(audioDescriptor);
                this.m_mediaStreamSource.CanSeek = true;
                this.m_mediaStreamSource.Duration = this.m_currentTrack.Duration;

                // hooking up the MediaStreamSource event handlers 
                this.m_mediaStreamSource.Starting += OnStreamSourceStarting;
                this.m_mediaStreamSource.SampleRequested += OnStreamSourceSampleRequested;
                this.m_mediaStreamSource.Closed += OnStreamSourceClosed;

                this.m_mediaElement.SetMediaStreamSource(this.m_mediaStreamSource);
            }
        }
Beispiel #24
0
 public Test()
 {
     _source = new MediaStreamSource(null);
 }
Beispiel #25
0
 /// <summary>
 /// Close the MediaStreamSource and remove the MediaStreamSource event handlers
 /// </summary>
 /// <param name="mediaStreamSource"></param>
 private void CloseMediaStreamSource(MediaStreamSource mediaStreamSource)
 {
     // close the MediaStreamSource and remove the MediaStreamSource event handlers
     if (this.m_mediaStream != null)
     {
         this.m_mediaStream.Dispose();
         this.m_mediaStream = null;
     }
     if (mediaStreamSource != null)
     {
         mediaStreamSource.SampleRequested -= OnStreamSourceSampleRequested;
         mediaStreamSource.Starting -= OnStreamSourceStarting;
         mediaStreamSource.Closed -= OnStreamSourceClosed;
         m_mediaStreamSource = null;
     }
 }
Beispiel #26
0
        //private long _lagTest = 0;
        //private void Mss_SampleRendered(MediaStreamSource sender, MediaStreamSourceSampleRenderedEventArgs args)
        //{
        //    ++_lagTest;
        //    if (_lagTest % 30 == 0)
        //    {
        //        Debug.WriteLine($"sample lag: {args.SampleLag}");
        //    }
        //}

        //private void Mss_SwitchStreamsRequested(MediaStreamSource sender, MediaStreamSourceSwitchStreamsRequestedEventArgs args)
        //{
        //    //Debug.WriteLine("Mss_SwitchStreamsRequested");
        //}

        private void Mss_Closed(MediaStreamSource sender, MediaStreamSourceClosedEventArgs args)
        {
            Debug.WriteLine("Mss_Closed");
        }
Beispiel #27
0
        public virtual MediaStreamSource OpenSource()
        {
            var descriptor = new AudioStreamDescriptor(EncodingProperties);

            var mss = new MediaStreamSource(descriptor);

            mss.Starting += MssOnStarting;
            mss.SampleRequested += MssOnSampleRequested;
            mss.Closed += MssOnClosed;

            return mss;
        }
 private void OnMediaSourceClosed(MediaStreamSource sender, MediaStreamSourceClosedEventArgs e)
 {
     _currentTime = 0.0;
     if (_mediaDecoder != null)
     {
         _mediaDecoder.Finish();
     }
 }
Beispiel #29
0
 protected abstract void MssOnSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args);
 private void MediaStreamSource_Closed(MediaStreamSource sender, MediaStreamSourceClosedEventArgs args)
 {
     //todo fire an event?
     Disconnect();
 }
 private void OnMediaSourceStarting(MediaStreamSource sender, MediaStreamSourceStartingEventArgs e)
 {
     if (e.Request.StartPosition.HasValue)
     {
         _currentTime = e.Request.StartPosition.Value.TotalSeconds;
         _mediaDecoder.Seek(e.Request.StartPosition.Value);
     }
     e.Request.SetActualStartPosition(TimeSpan.FromSeconds(_currentTime));
 }
 private void MediaStreamSource_Starting(MediaStreamSource sender, MediaStreamSourceStartingEventArgs args)
 {
     //args.Request.SetActualStartPosition(timeOffSet);
     //args.Request.
 }
Beispiel #33
0
 private void Mss_Starting(MediaStreamSource sender, MediaStreamSourceStartingEventArgs args)
 {
     Debug.WriteLine("Mss_Starting");
 }
Beispiel #34
0
 private void OnMediaStreamSourcePaused(MediaStreamSource sender, object args)
 {
     //Debug.Assert(Dispatcher.HasThreadAccess == false);
     Logger.Log("Video playback stream source paused.");
 }
        private void OnMediaSourceSampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs e)
        {
            var instantBuffer = GetBuffer();
            var buffer = _mediaDecoder.ReadSample(instantBuffer, instantBuffer.Capacity);

            MediaStreamSample sample = null;

            if (buffer.Length > 0)
            {
                sample = MediaStreamSample.CreateFromBuffer(buffer, TimeSpan.FromSeconds(_currentTime));
                sample.Processed += OnSampleProcessed;

                var duration = _mediaDecoder.GetDurationFromBufferSize(buffer.Length);
                sample.Duration = TimeSpan.FromSeconds(duration);

                _currentTime += duration;
            }
            else
            {
                _currentTime = 0.0;
                _mediaDecoder.Seek(0);
            }

            e.Request.Sample = sample;
        }
Beispiel #36
0
        private async void Mss_SampleRequested(MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args)
        {
            if (!_sampleWatch.IsRunning)
            {
                _sampleWatch.Start();
            }

            //Debug.Write("+");

            // test flush
            //var frames = _frameServer.ReadAllFrames();
            //if (frames != null )
            //{
            //    Debug.Write($"F:{frames.Count}");
            //    args.Request.Sample = MediaStreamSample.CreateFromBuffer(frames.Content.AsBuffer(), frames.TimeIndex);
            //    args.Request.Sample.Duration = frames.Duration;
            //}
            //else
            //{
            //    Debug.Write($"NULL");
            //}

            // test multiple frames
            //var timeout = TimeSpan.FromMilliseconds(_frameTimeout.TotalMilliseconds * 5);
            //var stopwatch = Stopwatch.StartNew();
            //var frames = _frameServer.ReadFrames(args.Request, timeout, 5);
            //args.Request.Sample = MediaStreamSample.CreateFromBuffer(frames.Content.AsBuffer(), frames.TimeIndex);
            //args.Request.Sample.Duration = frames.Duration;
            //if (stopwatch.Elapsed > timeout)
            //{
            //    Debug.Write($" TO: {stopwatch.ElapsedMilliseconds.ToString("#,#")}ms ");
            //}

            // test single framees
            //var stopwatch = Stopwatch.StartNew();

            var sample = _frameServer.GetSample(_frameTimeout);

            if (sample != null && sample.Count > 0)
            {
                //Debug.Write("T");
                args.Request.Sample          = MediaStreamSample.CreateFromBuffer(sample.Content.AsBuffer(), sample.TimeIndex);
                args.Request.Sample.Duration = sample.Duration;
                if (_sampleRequestCount % 32 == 0)
                {
                    await Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, () =>
                    {
                        _mediaPosition         = _mediaElement.Position;
                        _mediaElement.Position = sample.TimeIndex;
                    });

                    Debug.WriteLine($"\nRQT: {_sampleWatch.Elapsed} - STI: {sample.TimeIndex} - MEP: {_mediaPosition}, RQCNT: {_sampleRequestCount}, SCNT: {sample.Count}, SDUR: {sample.Duration}, {(uint)(_sampleRequestCount / _sampleWatch.Elapsed.TotalSeconds)}RQ/s");
                }
            }
            ++_sampleRequestCount;

            //if (_frameServer.TryReadFrame(out var frame, _frameTimeout))
            //{
            //    //Debug.Write("T");
            //    args.Request.Sample = MediaStreamSample.CreateFromBuffer(frame.Content.AsBuffer(), frame.TimeIndex);
            //    args.Request.Sample.Duration = frame.Duration;
            //    if (frame.Index % (32 * 5) == 0)
            //    {
            //        Debug.WriteLine($"\nSR {_sampleWatch.Elapsed} - {frame.TimeIndex}: F#{frame.Index}, R#{_sampleRequestCount}, {(uint)(_sampleRequestCount / _sampleWatch.Elapsed.TotalSeconds)}R/s");
            //    }
            //}
            //++_sampleRequestCount;
            //else
            //{
            //    Debug.Write("F");
            //    if (stopwatch.Elapsed > _frameTimeout)
            //    {
            //        Debug.Write($" TO: {stopwatch.ElapsedMilliseconds.ToString("#,#")}ms ");
            //    }
            //}

            //Debug.Write("-");
        }
 private void MediaStreamSource_Closed(MediaStreamSource sender, MediaStreamSourceClosedEventArgs args)
 {
     MediaStreamSource.Starting -= MediaStreamSource_Starting;
     MediaStreamSource.Closed -= MediaStreamSource_Closed;
     MediaStreamSource.SampleRequested -= MediaStreamSource_SampleRequested;
 }
Beispiel #38
0
        public async Task WriteToStream(Stream output, HttpContent content, TransportContext context)
        {
            try
            {
                //
                // Create a PCM audio source
                //

                var generator = new AudioSampleGenerator();

                var source = new MediaStreamSource(
                    new AudioStreamDescriptor(
                        generator.EncodingProperties
                        )
                    );
                source.CanSeek = false;
                source.MusicProperties.Title = "CS_D_MediaStreamSource_EncodeAudio";

                source.SampleRequested += (MediaStreamSource sender, MediaStreamSourceSampleRequestedEventArgs args) =>
                {
                    try
                    {
                        Console.WriteLine("SampleRequested Time: {0}", generator.Time);

                        // Generate 5s of data
                        if (generator.Time.TotalSeconds < 5)
                        {
                            args.Request.Sample = generator.GenerateSample();
                        }
                    }
                    catch (Exception e)
                    {
                        Debug.WriteLine("Sample exception");
                    }
                };

                //
                // Encode PCM to ADTS
                //

                var profile = MediaEncodingProfile.CreateM4a(AudioEncodingQuality.Medium);
                profile.Container.Subtype = "ADTS";

                var output2 = new PseudoSeekableStream(output); // Pretend the stream is seekable so .AsRandomAccessStream() works

                var transcoder = new MediaTranscoder();
                var result     = await transcoder.PrepareMediaStreamSourceTranscodeAsync(
                    source,
                    output2.AsRandomAccessStream(),
                    profile
                    );

                await result.TranscodeAsync();
            }
            catch (Exception e)
            {
                Debug.WriteLine("Transcode exception");
            }

            output.Close();
        }
        public async Task ConnectAsync(uint sampleRate = 44100, string relativePath = ";")
        {
            await HandleConnection(relativePath);
            //Surprised that this commented-out-bit is broken.
            //AudioEncodingProperties obtainedProperties = await GetEncodingPropertiesAsync();

            switch (contentType)
            {
                case StreamAudioFormat.MP3:
                    {
                        MediaStreamSource = new Windows.Media.Core.MediaStreamSource(new AudioStreamDescriptor(AudioEncodingProperties.CreateMp3(sampleRate, 2, (uint)bitRate)));
                        //MediaStreamSource.AddStreamDescriptor(new AudioStreamDescriptor(AudioEncodingProperties.CreateMp3(48000, 2, (uint)bitRate)));
                        //MediaStreamSource.AddStreamDescriptor(new AudioStreamDescriptor(AudioEncodingProperties.CreateMp3(32000, 2, (uint)bitRate)));
                        //MediaStreamSource.AddStreamDescriptor(new AudioStreamDescriptor(AudioEncodingProperties.CreateMp3(24000, 2, (uint)bitRate)));
                        //MediaStreamSource.AddStreamDescriptor(new AudioStreamDescriptor(AudioEncodingProperties.CreateMp3(22050, 2, (uint)bitRate)));
                    }
                    break;
                case StreamAudioFormat.AAC:
                    {
                        MediaStreamSource = new MediaStreamSource(new AudioStreamDescriptor(AudioEncodingProperties.CreateAac(sampleRate, 2, (uint)bitRate)));
                    }
                    break;
            }

            MediaStreamSource.SampleRequested += MediaStreamSource_SampleRequested;
            MediaStreamSource.CanSeek = false;
            MediaStreamSource.Starting += MediaStreamSource_Starting;
            MediaStreamSource.Closed += MediaStreamSource_Closed;
        }
Beispiel #40
0
 private void MediaStreamSource_Closed(MediaStreamSource sender, MediaStreamSourceClosedEventArgs args)
 {
     //todo needs to be handled.
 }
Beispiel #41
0
 private void MediaStreamSource_Starting(MediaStreamSource sender, MediaStreamSourceStartingEventArgs args)
 {
     lastPauseTime = null;
 }
Beispiel #42
0
 private void MediaStreamSource_Paused(MediaStreamSource sender, object args)
 {
     lastPauseTime = DateTime.Now;
 }
        private async void AppendVideos_Click(object sender, RoutedEventArgs e)
        {
            // Combine two video files together into one
            var firstClip = await MediaClip.CreateFromFileAsync(firstVideoFile);
            var secondClip = await MediaClip.CreateFromFileAsync(secondVideoFile);

            composition = new MediaComposition();
            composition.Clips.Add(firstClip);
            composition.Clips.Add(secondClip);

            // Render to MediaElement.
            mediaElement.Position = TimeSpan.Zero;
            mediaStreamSource = composition.GeneratePreviewMediaStreamSource((int)mediaElement.ActualWidth, (int)mediaElement.ActualHeight);
            mediaElement.SetMediaStreamSource(mediaStreamSource);
            rootPage.NotifyUser("Clips appended", NotifyType.StatusMessage);
        }
Beispiel #44
0
        /// <summary>
        /// IInputStream:不支持Seek
        /// Stream 可以
        /// IRandomAccessStream 可以
        /// </summary>
        /// <param name="stream"></param>
        /// <returns></returns>
        MediaStreamSource createMediaStream(Stream stream)
        {
            var flv       = new FlvParse(stream);
            var audiodesc = flv.Audios[0].CreateAudioDesc(flv.MediaData);
            var videodesc = flv.Videos[0].CreateVideoDesc();
            var c         = new MediaStreamSource(audiodesc, videodesc);

            c.Duration = TimeSpan.FromSeconds(flv.MediaData.Duration);
            c.CanSeek  = true;

            Debug.WriteLine("-----------------------");
            Debug.WriteLine("flv.tags.Count:" + flv.tags.Count);



            var ai = 1;
            var at = new TimeSpan(0);
            var vi = 1;
            var vt = new TimeSpan(0);

            c.Starting += (s, e) => {
                var req = e.Request;

                Debug.WriteLine("==Starting==");
                if ((req.StartPosition != null) && req.StartPosition.Value <= c.Duration)
                {
                    vi = ai = 1;
                    var time = req.StartPosition.GetValueOrDefault();
                    foreach (var item in flv.Audios)
                    {
                        if (item.TimeSpan >= time)
                        {
                            Debug.WriteLine(time + "    " + ai);
                            break;
                        }
                        ai++;
                    }
                    foreach (var item in flv.Videos)
                    {
                        if (item.TimeSpan >= time)
                        {
                            Debug.WriteLine(time + "    " + ai);
                            break;
                        }
                        vi++;
                    }
                    Debug.WriteLine(time + "    " + ai);
                    Debug.WriteLine(time + "    " + vi);
                }
            };
            c.SampleRequested += async(s, e) => {
                var req     = e.Request;
                var deferal = req.GetDeferral();
                if (req.StreamDescriptor is AudioStreamDescriptor)
                {
                    if (flv.Audios.Count > ai)
                    {
                        var flvTag = flv.Audios[ai];
                        req.Sample = await flvTag.CreateAudioSample();

                        at = flvTag.TimeSpan;
                        ai++;
                    }
                }
                if (req.StreamDescriptor is VideoStreamDescriptor)
                {
                    if (flv.Videos.Count > vi)
                    {
                        var flvTag = flv.Videos[vi];                                      //每一段进度条移动距离
                        req.Sample = await flvTag.CreateVideoSample(flv.Videos[0], true); // vi == 1);

                        vt = flvTag.TimeSpan;
                        vi++;
                    }
                }
                deferal.Complete();
            };

            return(c);
        }
        private async void CreateOverlays()
        {
            var baseVideoClip = await MediaClip.CreateFromFileAsync(baseVideoFile);
            composition = new MediaComposition();
            composition.Clips.Add(baseVideoClip);

            var overlayVideoClip = await MediaClip.CreateFromFileAsync(overlayVideoFile);

            // Overlay video in upper left corner, retain its native aspect ratio
            Rect videoOverlayPosition;
            var encodingProperties = overlayVideoClip.GetVideoEncodingProperties();
            videoOverlayPosition.Height = mediaElement.ActualHeight / 3;
            videoOverlayPosition.Width = (double)encodingProperties.Width / (double)encodingProperties.Height * videoOverlayPosition.Height;
            videoOverlayPosition.X = 0;
            videoOverlayPosition.Y = 0;

            var videoOverlay = new MediaOverlay(overlayVideoClip);
            videoOverlay.Position = videoOverlayPosition;
            videoOverlay.Opacity = 0.75;

            var overlayLayer = new MediaOverlayLayer();
            overlayLayer.Overlays.Add(videoOverlay);
            composition.OverlayLayers.Add(overlayLayer);

            // Render to MediaElement
            mediaElement.Position = TimeSpan.Zero;
            mediaStreamSource = composition.GeneratePreviewMediaStreamSource((int)mediaElement.ActualWidth, (int)mediaElement.ActualHeight);
            mediaElement.SetMediaStreamSource(mediaStreamSource);
            rootPage.NotifyUser("Overlays created", NotifyType.StatusMessage);
        }
Beispiel #46
0
        async Task <MediaStreamSource> createMediaStream2(string url = "ms-appx:///Assets/test.mp3")
        {
            var file = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(new Uri(url));

            var stream = await file.OpenReadAsync();

            MusicProperties mp3FileProperties = await file.Properties.GetMusicPropertiesAsync();


            List <string> encodingPropertiesToRetrieve = new List <string>();

            encodingPropertiesToRetrieve.Add("System.Audio.SampleRate");
            encodingPropertiesToRetrieve.Add("System.Audio.ChannelCount");
            encodingPropertiesToRetrieve.Add("System.Audio.EncodingBitrate");
            var encodingProperties = await file.Properties.RetrievePropertiesAsync(encodingPropertiesToRetrieve);

            uint sampleRate   = (uint)encodingProperties["System.Audio.SampleRate"];
            uint channelCount = (uint)encodingProperties["System.Audio.ChannelCount"];
            uint bitRate      = (uint)encodingProperties["System.Audio.EncodingBitrate"];

            /*
             * 44100 2 128000
             * 44100 2 171896   music: 00:03:33.2114285
             * 44100 2 130000   music: 00:00:30.0930000
             * SampleRate/SamplesPerSec
             *    Stereo/Channels
             *       SR*SO*SS/BA
             *
             * 130.3 16  2
             * DataRate
             *   SampleSize/BitsPerSample
             *        BlockAlign
             */
            // var audiodesc = new AudioStreamDescriptor(AudioEncodingProperties.CreateMp3(44100, 2, 128000));
            var audiodesc = new AudioStreamDescriptor(AudioEncodingProperties.CreateMp3(sampleRate, channelCount, bitRate));
            var c         = new MediaStreamSource(audiodesc);

            c.Duration = mp3FileProperties.Duration;
            c.CanSeek  = true;

            Debug.WriteLine("music: " + c.Duration);
            Debug.WriteLine(mp3FileProperties.Title + "  " + sampleRate + " " + channelCount + " " + bitRate);



            UInt32   sampleSize     = 300;                          //每一段
            TimeSpan sampleDuration = new TimeSpan(0, 0, 0, 0, 70); //每一段进度条移动距离
            ulong    byteOffset     = 0;
            TimeSpan timeOffset     = new TimeSpan(0);

            c.Starting += (s, e) => {
                Debug.WriteLine("==Starting==");
                MediaStreamSourceStartingRequest request = e.Request;
                if ((request.StartPosition != null) && request.StartPosition.Value <= c.Duration)
                {
                    UInt64 sampleOffset = (UInt64)request.StartPosition.Value.Ticks / (UInt64)sampleDuration.Ticks;
                    timeOffset = new TimeSpan((long)sampleOffset * sampleDuration.Ticks);
                    byteOffset = sampleOffset * sampleSize;
                }
                request.SetActualStartPosition(timeOffset);
            };
            c.SampleRequested += async(s, e) => {
                //Debug.WriteLine(timeOffset);
                var deferal = e.Request.GetDeferral();
                if (byteOffset + sampleSize <= stream.Size)
                {
                    Debug.WriteLine(sampleSize + "    " + timeOffset);
                    var sample = await MediaStreamSample.CreateFromStreamAsync(stream.GetInputStreamAt(byteOffset), sampleSize, timeOffset); //每一段的数据大小

                    sample.Duration  = sampleDuration;                                                                                       //每一段进度条移动距离
                    sample.KeyFrame  = true;
                    e.Request.Sample = sample;
                    byteOffset      += sampleSize;
                    timeOffset       = timeOffset.Add(sampleDuration);
                }
                deferal.Complete();
            };
            return(c);
        }
Beispiel #47
0
        /// <summary>
        /// Occurs when the MediaStreamSource is ready to start requesting MediaStreamSample objects.
        /// </summary>
        /// <param name="sender">Represents a media source that delivers media samples directly to the media pipeline.</param>
        /// <param name="args">Provides data for the MediaStreamSource.Starting event.</param>
        private void OnStreamSourceStarting(MediaStreamSource sender, MediaStreamSourceStartingEventArgs args)
        {
            MediaStreamSourceStartingRequest request = args.Request;
            if ((request.StartPosition != null) && request.StartPosition.Value <= m_mediaStreamSource.Duration)
            {
                UInt64 sampleOffset = (UInt64)request.StartPosition.Value.Ticks / (UInt64)sampleDuration.Ticks;
                this.m_timeOffset = new TimeSpan((long)sampleOffset * sampleDuration.Ticks);
                this.m_byteOffset = sampleOffset * sampleSize;
            }

            if (this.m_mediaStream == null)
            {
                MediaStreamSourceStartingRequestDeferral deferal = request.GetDeferral();
                try
                {
                    this.m_mediaStream = this.m_audioStreamDownloader.Stream;
                    request.SetActualStartPosition(new TimeSpan());
                }
                catch (Exception)
                {
                    throw;
                }
                finally
                {
                    deferal.Complete();
                }
            }
            else
            {
                request.SetActualStartPosition(m_timeOffset);
            }
        }
Beispiel #48
0
 public void SetMediaStreamSource(MediaStreamSource streamSource)
 {
     mediaPlayer?.SetMediaSource(streamSource);
 }
Beispiel #49
0
 /// <summary>
 /// Occurs when the MediaStreamSource is shutting down.
 /// </summary>
 /// <param name="sender">Represents a media source that delivers media samples directly to the media pipeline.</param>
 /// <param name="args">Provides data for the MediaStreamSource.Closed event.</param>
 private void OnStreamSourceClosed(MediaStreamSource sender, MediaStreamSourceClosedEventArgs args)
 {
     if (sender == m_mediaStreamSource)
     {
         CloseMediaStreamSource(sender);
     }
 }
Beispiel #50
0
 internal static extern void LoadMediaStreamSource(MediaStreamSource IMediaSourceHandler);
        private async void AddAudio_Click(object sender, RoutedEventArgs e)
        {
            // Create the original MediaComposition
            var clip = await MediaClip.CreateFromFileAsync(pickedFile);
            composition = new MediaComposition();
            composition.Clips.Add(clip);

            // Add background audio
            var picker = new Windows.Storage.Pickers.FileOpenPicker();
            picker.SuggestedStartLocation = Windows.Storage.Pickers.PickerLocationId.MusicLibrary;
            picker.FileTypeFilter.Add(".mp3");
            picker.FileTypeFilter.Add(".wav");
            picker.FileTypeFilter.Add(".flac");
            var audioFile = await picker.PickSingleFileAsync();
            if (audioFile == null)
            {
                rootPage.NotifyUser("File picking cancelled", NotifyType.ErrorMessage);
                return;
            }

            var backgroundTrack = await BackgroundAudioTrack.CreateFromFileAsync(audioFile);
            composition.BackgroundAudioTracks.Add(backgroundTrack);

            // Render to MediaElement
            mediaElement.Position = TimeSpan.Zero;
            mediaStreamSource = composition.GeneratePreviewMediaStreamSource((int)mediaElement.ActualWidth, (int)mediaElement.ActualHeight);
            mediaElement.SetMediaStreamSource(mediaStreamSource);

            rootPage.NotifyUser("Background audio added", NotifyType.StatusMessage);
        }
Beispiel #52
0
        protected virtual void MssOnStarting(MediaStreamSource sender, MediaStreamSourceStartingEventArgs args)
        {
            Debug.WriteLine("StreamSourceBase.MssOnStarting()");

            if (args.Request.StartPosition.HasValue)
            {
                Time = args.Request.StartPosition.Value;
                Position = (ulong)Math.Round(_encodingProperties.SampleRate * Time.TotalSeconds);
            }

            args.Request.SetActualStartPosition(Time);
        }
        private async void TrimClip_Click(object sender, RoutedEventArgs e)
        {
            // Trim the front and back 25% from the clip
            var clip = await MediaClip.CreateFromFileAsync(pickedFile);
            clip.TrimTimeFromStart = new TimeSpan((long)(clip.OriginalDuration.Ticks * 0.25));
            clip.TrimTimeFromEnd = new TimeSpan((long)(clip.OriginalDuration.Ticks * 0.25));

            // Create a MediaComposition containing the clip and set it on the MediaElement.
            composition = new MediaComposition();
            composition.Clips.Add(clip);
            mediaElement.Position = TimeSpan.Zero;
            mediaStreamSource = composition.GeneratePreviewMediaStreamSource((int)mediaElement.ActualWidth, (int)mediaElement.ActualHeight);
            mediaElement.SetMediaStreamSource(mediaStreamSource);
            rootPage.NotifyUser("Clip trimmed", NotifyType.StatusMessage);
            save.IsEnabled = true;
        }
 protected override void OnNavigatedFrom(NavigationEventArgs e)
 {
     mediaElement.Source = null;
     mediaStreamSource = null;
     base.OnNavigatedFrom(e);
 }
Beispiel #55
0
        async Task <StorageFile> ConvertVideo(StorageFile inputFile, Size?imageSize, Rect?rectSize)
        {
            try
            {
                var outputFile = await GenerateRandomOutputFile();

                if (inputFile != null && outputFile != null)
                {
                    var    mediaProfile = MediaEncodingProfile.CreateMp4(VideoEncodingQuality.Auto);
                    int    height = 0, width = 0;
                    double duration = 0;
                    if (DeviceUtil.IsMobile)
                    {
                        var videoInfo = await inputFile.GetVideoInfoAsync();

                        height   = (int)videoInfo.Height;
                        width    = (int)videoInfo.Width;
                        duration = videoInfo.Duration.TotalSeconds;
                    }
                    else
                    {
                        FFmpegMSS = await FFmpegInteropMSS
                                    .CreateFromStreamAsync(await inputFile.OpenReadAsync(), Helper.FFmpegConfig);

                        Mss      = FFmpegMSS.GetMediaStreamSource();
                        height   = FFmpegMSS.VideoStream.PixelHeight;
                        width    = FFmpegMSS.VideoStream.PixelWidth;
                        duration = Mss.Duration.TotalSeconds;
                    }

                    var fileProfile = await Uploads.VideoConverterX.GetEncodingProfileFromFileAsync(inputFile);

                    if (fileProfile != null)
                    {
                        mediaProfile.Video.Bitrate = fileProfile.Video.Bitrate;
                        if (mediaProfile.Audio != null)
                        {
                            mediaProfile.Audio.Bitrate       = fileProfile.Audio.Bitrate;
                            mediaProfile.Audio.BitsPerSample = fileProfile.Audio.BitsPerSample;
                            mediaProfile.Audio.ChannelCount  = fileProfile.Audio.ChannelCount;
                            mediaProfile.Audio.SampleRate    = fileProfile.Audio.SampleRate;
                        }
                        "Media profile copied from original video".PrintDebug();
                    }
                    if (!IsStoryVideo)
                    {
                        if (duration > 59)
                        {
                            Transcoder.TrimStartTime = StartTime;
                            Transcoder.TrimStopTime  = StopTime;
                        }


                        var max = Math.Max(height, width);
                        if (max > 1920)
                        {
                            max = 1920;
                        }
                        if (imageSize == null)
                        {
                            mediaProfile.Video.Height = (uint)max;
                            mediaProfile.Video.Width  = (uint)max;
                        }
                        else
                        {
                            mediaProfile.Video.Height = (uint)imageSize.Value.Height;
                            mediaProfile.Video.Width  = (uint)imageSize.Value.Width;
                        }
                    }
                    else
                    {
                        if (duration > 14.9)
                        {
                            Transcoder.TrimStartTime = StartTime;
                            Transcoder.TrimStopTime  = StopTime;
                        }
                        var size = Helpers.AspectRatioHelper.GetAspectRatioX(width, height);
                        mediaProfile.Video.Height = (uint)size.Height;
                        mediaProfile.Video.Width  = (uint)size.Width;
                    }

                    var transform = new VideoTransformEffectDefinition
                    {
                        Rotation      = MediaRotation.None,
                        OutputSize    = imageSize.Value,
                        Mirror        = MediaMirroringOptions.None,
                        CropRectangle = rectSize == null ? Rect.Empty : rectSize.Value
                    };

                    Transcoder.AddVideoEffect(transform.ActivatableClassId, true, transform.Properties);

                    PrepareTranscodeResult preparedTranscodeResult;
                    if (DeviceUtil.IsMobile)
                    {
                        preparedTranscodeResult = await Transcoder.PrepareFileTranscodeAsync(inputFile, outputFile, mediaProfile);
                    }
                    else
                    {
                        preparedTranscodeResult = await Transcoder.PrepareMediaStreamSourceTranscodeAsync(Mss,
                                                                                                          await outputFile.OpenAsync(FileAccessMode.ReadWrite), mediaProfile);
                    }

                    Transcoder.VideoProcessingAlgorithm = MediaVideoProcessingAlgorithm.Default;
                    if (preparedTranscodeResult.CanTranscode)
                    {
                        var progress = new Progress <double>(ConvertProgress);
                        await preparedTranscodeResult.TranscodeAsync().AsTask(Cts.Token, progress);

                        ConvertComplete(outputFile);
                        return(outputFile);
                    }
                    else
                    {
                        preparedTranscodeResult.FailureReason.ToString().ShowMsg();
                    }
                }
            }
            catch (Exception ex) { ex.PrintException().ShowMsg("ConvertVideo"); }
            return(null);
        }
Beispiel #56
-1
        /// <summary>
        /// Initialize the media element for playback
        /// </summary>
        /// <param name="streamConfig">Object containing stream configuration details</param>
        void InitializeMediaPlayer(MoonlightStreamConfiguration streamConfig, AvStreamSource streamSource)
        {
            this._streamSource = streamSource;

            // This code is based upon the MS FFmpegInterop project on GitHub
            VideoEncodingProperties videoProps = VideoEncodingProperties.CreateH264();
            videoProps.ProfileId = H264ProfileIds.High;
            videoProps.Width = (uint)streamConfig.GetWidth();
            videoProps.Height = (uint)streamConfig.GetHeight();
            videoProps.Bitrate = (uint)streamConfig.GetBitrate();

            _videoMss = new MediaStreamSource(new VideoStreamDescriptor(videoProps));
            _videoMss.BufferTime = TimeSpan.Zero;
            _videoMss.CanSeek = false;
            _videoMss.Duration = TimeSpan.Zero;
            _videoMss.SampleRequested += _videoMss_SampleRequested;

            XAudio2 xaudio = new XAudio2();
            MasteringVoice masteringVoice = new MasteringVoice(xaudio, 2, 48000);
            WaveFormat format = new WaveFormat(48000, 16, 2);

            // Set for low latency playback
            StreamDisplay.RealTimePlayback = true;

            // Render on the full window to avoid extra compositing
            StreamDisplay.IsFullWindow = true;

            // Disable built-in transport controls
            StreamDisplay.AreTransportControlsEnabled = false;

            StreamDisplay.SetMediaStreamSource(_videoMss);
            AvStream.SetSourceVoice(new SourceVoice(xaudio, format));
        }