Play() public method

public Play ( ) : void
return void
        public NowPlayingPageViewModel()
        {
            App.Current.Suspending += Current_Suspending;
            App.Current.Resuming += Current_Resuming;
            SongHistory = new ObservableCollection<ShoutcastSongHistoryItem>();

            mediaElement = ((App)App.Current).MediaElement;

            PlayCommand = CommandManager.CreateCommand(() =>
            {
                if (mediaElement != null)
                    if (mediaElement.CurrentState != MediaElementState.Playing)
                    {
                        mediaElement.Play();
                        MediaControl.IsPlaying = true;
                    }
            });

            PauseCommand = CommandManager.CreateCommand(() =>
                {
                    if (mediaElement != null)
                        if (mediaElement.CurrentState != MediaElementState.Paused)
                        {
                            mediaElement.Pause();
                            MediaControl.IsPlaying = false;
                        }
                });
        }
Example #2
0
        static public async Task StartTextToSpeech(string text)
        {
            if (!string.IsNullOrEmpty(text))
            {
                try
                {
                    if (_speech != null)
                    {
                        StopTextToSpeech();
                    }

                    var voice = GetSpeechVoice();
                    if (voice != null)
                    {
                        _speech = new SpeechSynthesizer();
                        _speech.Voice = voice;

                        SpeechSynthesisStream speechStream = await _speech.SynthesizeTextToStreamAsync(Utility.DecodeHtml(text));
                        _soundPlayer = new MediaElement();
                        _soundPlayer.SetSource(speechStream, speechStream.ContentType);
                        _soundPlayer.Play();
                    }
                }
                catch (Exception ex)
                {
                    AppLogs.WriteError("SpeechServices", ex);
                }
            }
        }
Example #3
0
        public UWPVideoViewer(VideoPlayer view)
        {
            View = view;
            view.Buffered.HandleOn(Thread.UI, BufferVideo);
            View.PathChanged.HandleOn(Thread.UI, LoadVideo);
            View.Started.HandleOn(Thread.UI, () => Result.Play());
            View.Paused.HandleOn(Thread.UI, () => Result.Pause());
            View.Resumed.HandleOn(Thread.UI, () => Result.Play());
            View.Stopped.HandleOn(Thread.UI, () => Result.Stop());
            View.SoughtBeginning.HandleOn(Thread.UI, () => Result.Position = 0.Milliseconds());
            view.Muted.HandleOn(Thread.UI, () => Result.IsMuted            = view.IsMuted);

            Result = new controls.MediaElement {
                Stretch = media.Stretch.UniformToFill
            };
            Result.MediaEnded += (e, args) => View.FinishedPlaying.RaiseOn(Thread.UI);
        }
Example #4
0
 private async void SongsList_Tapped(object sender, TappedRoutedEventArgs e)
 {
     MediaElement PlayMusic = new MediaElement();
     var _list = SongsList as ListView;
     var _song = (StorageFile)_list.SelectedItem;
     var stream = await _song.OpenAsync(FileAccessMode.Read);
     PlayMusic.SetSource(stream, _song.ContentType);
     PlayMusic.Play();
 }
        public async void Speak(string text)
        {
            var mediaElement = new Windows.UI.Xaml.Controls.MediaElement();
            var synth        = new Windows.Media.SpeechSynthesis.SpeechSynthesizer();
            var stream       = await synth.SynthesizeTextToStreamAsync(text);

            mediaElement.SetSource(stream, stream.ContentType);
            mediaElement.Play();
        }
        public async void Speak(string text)
        {
            var mediaElement = new MediaElement();
            var synth = new SpeechSynthesizer();
            var stream = await synth.SynthesizeTextToStreamAsync(text);

            mediaElement.SetSource(stream, stream.ContentType);
            mediaElement.Play();
        }
Example #7
0
 private async void Fortune_Text_Click(object sender, TappedRoutedEventArgs e)
 {
     MediaElement media = new MediaElement();
     string text = textBlockFortune.Text;
     SpeechSynthesizer synthesizer = new SpeechSynthesizer();
     SpeechSynthesisStream stream = await synthesizer.SynthesizeTextToStreamAsync(text + ", God bless America");
     media.SetSource(stream, stream.ContentType);
     media.IsLooping = true;
     media.Play();
 }
 private void PlaySound(MediaElement SoundElement)
 {
     try
     {
         SoundElement.Play();
     }
     catch
     {
         System.Diagnostics.Debug.WriteLine("Problem playing sound: " + SoundElement.ToString());
     }
 }
Example #9
0
        public async Task PlayAsync(Stream speechStream, string contentFormat)
        {
            if (speechStream == null) throw new ArgumentNullException(nameof(speechStream));
            if (contentFormat == null) throw new ArgumentNullException(nameof(speechStream));

            var media = new MediaElement();
            media.SetSource(speechStream.AsRandomAccessStream(), contentFormat);
            media.Play();

            await Task.CompletedTask;
        }
		public async void Speak(string text)
		{
			SpeechSynthesizer synthesizer = new SpeechSynthesizer();
			SpeechSynthesisStream stream = await synthesizer.SynthesizeTextToStreamAsync(text);
			MediaElement media = new MediaElement();
			media.AutoPlay = true;
			media.SetSource(stream, stream.ContentType);
			media.Play();


		}
Example #11
0
        public async Task<bool> Play(string fileName)
        {
            var var_assets = await Package.Current.InstalledLocation.GetFolderAsync("Music");
            var var_file = await var_assets.GetFileAsync(fileName);
            var var_stream = await var_file.OpenAsync(FileAccessMode.Read);

            MediaElement m = new MediaElement();
            m.SetSource(var_stream, var_file.ContentType);
            m.Play();

            return true;
        }
        public async void Speak(string text)
        {
            MediaElement mediaElement = new MediaElement();

            var synth = new Windows.Media.SpeechSynthesis.SpeechSynthesizer();

            SpeechSynthesisStream stream = await synth.SynthesizeTextToStreamAsync("Hello World");

            mediaElement.SetSource(stream, stream.ContentType);
            mediaElement.Play();
            await synth.SynthesizeTextToStreamAsync(text);
        }
Example #13
0
        public async Task Play( string fileName)
        {
            var packageLocation = Windows.ApplicationModel.Package.Current.InstalledLocation;
            var assetsFolder = await packageLocation.GetFolderAsync("assets");
            var soundsFolder = await assetsFolder.GetFolderAsync("sounds");
            StorageFile myAudio = await soundsFolder.GetFileAsync(fileName);

            _mediaElement = new MediaElement();
            
            var stream = await myAudio.OpenAsync(FileAccessMode.Read);
            _mediaElement.SetSource(stream, myAudio.ContentType);

            _mediaElement.Play();
        }
        public async Task Play(string fileName)
        {
            var packageLocation = Windows.ApplicationModel.Package.Current.InstalledLocation;
            var assetsFolder = await packageLocation.GetFolderAsync("Assets");

            var myAudio = await assetsFolder.GetFileAsync(fileName);
            if (myAudio != null)
            {
                var stream = await myAudio.OpenAsync(Windows.Storage.FileAccessMode.Read);
                MediaElement snd = new MediaElement();
                snd.SetSource(stream, myAudio.ContentType);
                snd.Play();
            }
        }  
Example #15
0
        private async Task LoadMediaAsync()
        {
            try
            {
                StorageFile music_file = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///data/Shopen.mp3"));

                var m_stream = await music_file.OpenReadAsync();

                music.SetSource(m_stream, "");
                music.Play();
                isPlaying = true;
            }
            catch (Exception ex) { Debug.WriteLine(ex.Message); isPlaying = false; }
        }
Example #16
0
 public void Play()
 {
     Task task = Task.Run(async () => {
         StorageFolder location = Windows.ApplicationModel.Package.Current.InstalledLocation;
         StorageFolder folder = await location.GetFolderAsync("CountDown");
         StorageFolder subfolder = await folder.GetFolderAsync("Assets");
         StorageFile file = await subfolder.GetFileAsync("Ding.wav");
         Uri path = new Uri(file.Path, UriKind.Absolute);
         MediaElement mediaElement = new MediaElement()
         {
             AutoPlay = false,
             Source = path,
         };
         mediaElement.Play();
     });
 }
		// http://msdn.microsoft.com/en-us/library/windowsphone/develop/jj207057(v=vs.105).aspx
		public async void Speak(string text)
		{
			SpeechSynthesizer synth = new SpeechSynthesizer();

            try
            {
                var stream = await synth.SynthesizeTextToStreamAsync(text);

                var mediaElement = new MediaElement();
                mediaElement.SetSource(stream, stream.ContentType);
                mediaElement.Play();
            }
            catch (Exception pe) {
                Debug.WriteLine("couldn't play voice " + pe.Message);
            }
		}
       // SpeechRecognitionEngine sRecognize = new SpeechRecognitionEngine();


      
        private async void SpeakButton_Click(object sender, RoutedEventArgs e)
        {
            // The media object for controlling and playing audio.
            MediaElement mediaElement = new MediaElement();

            // The object for controlling the speech-synthesis engine (voice).
            SpeechSynthesizer synth = new Windows.Media.SpeechSynthesis.SpeechSynthesizer();



            // Generate the audio stream from plain text.
            SpeechSynthesisStream stream = await synth.SynthesizeTextToStreamAsync(txtText.Text);

            // Send the stream to the media object.
            mediaElement.SetSource(stream, stream.ContentType);
            mediaElement.Play();
        }
Example #19
0
 /// <summary>
 /// Synthetize a text into a speech and pronounces it.
 /// </summary>
 /// <param name="message">The message to be pronounced.</param>
 public async Task TextToSpeech(string message)
 {
     try
     {
         if (!string.IsNullOrEmpty(message))
         {
             // Speak a string.
             var result = await Speech.SynthesizeTextToStreamAsync(message);
             _mediaElement = new MediaElement();
             _mediaElement.SetSource(result, result.ContentType);
             _mediaElement.Play();
         }
     }
     catch (Exception ex)
     {
         Debug.WriteLine("SpeechServices", ex);
     }
 }
 public TrackDetails()
 {
     selectedTrack = StartPage.SelectedTrack;
     InitializeComponent();
     SystemNavigationManager.GetForCurrentView().AppViewBackButtonVisibility =
         AppViewBackButtonVisibility.Visible;
     _startPage = Window.Current.Content as StartPage;
     if (_startPage == null) return;
     _streamPlayer = _startPage.MediaElement;
     if (selectedTrack == null || _streamPlayer == null) return;
     if (!selectedTrack.id.Equals(StartPage.CurrentTrack.id))
     {
         StartPage.CurrentTrack = selectedTrack.DeepcopyTrack();
         _streamPlayer.Source = new Uri(selectedTrack.stream_url);
         if (!_loaded)
         {
             CreateTimer();
             _loaded = true;
         }
         else
         {
             StartPage.AudioTimer?.Stop();
             CreateTimer();
         }
         _streamPlayer.Play();
         _startPage?.SetPlayButton(_play);
         TrackButtonPlayImage.Source = StartPage.ImageFromRelativePath(this, "Assets/pause27.png");
     }
     else
     {
         StartPage.AudioTimer?.Stop();
         CreateTimer();
     }
     _startPage.TrackInfo.Text = selectedTrack.title;
     BorderBrushDetails.ImageSource = new BitmapImage(new Uri(selectedTrack.background_url));
     TrackDetailsMain.DataContext = selectedTrack;
     ArtworkImage.Source = new BitmapImage(new Uri(selectedTrack.artwork_url));
     ArtistImage.Source = new BitmapImage(new Uri(selectedTrack.user.avatar_url));
     BitmapTransform(selectedTrack.waveform_url);
     _totalTime = selectedTrack.duration;
     AudioSlider.Maximum = _totalTime;
     AudioSlider.DataContext = _streamPlayer.Source;
     FollowArtist.Text = (selectedTrack.user.following ?  "follow" : "unfollow");
 }
Example #21
0
        private static async void _Speak(string text)
        {
            MediaElement mediaElement = new MediaElement();
            SpeechSynthesizer synth = new SpeechSynthesizer();


            foreach (VoiceInformation voice in SpeechSynthesizer.AllVoices)
            {
                Debug.WriteLine(voice.DisplayName + ", " + voice.Description);
            }

            // Initialize a new instance of the SpeechSynthesizer. 
            SpeechSynthesisStream stream = await synth.SynthesizeTextToStreamAsync(text);

            // Send the stream to the media object. 
            mediaElement.SetSource(stream, stream.ContentType);
            mediaElement.Play();

            mediaElement.Stop();
            synth.Dispose();
        }
        private async void talkItButtonTap(object sender, RoutedEventArgs e)
        {
            SpeechSynthesizer synth = new SpeechSynthesizer();
            string message = textBox.Text;

            if (message != null)
            {
                var stream = await synth.SynthesizeTextToStreamAsync(message);
                var media = new MediaElement();
                media.SetSource(stream, stream.ContentType);
                media.Play();
            }
            else
            {
                message = "Text box is empty";
                var stream = await synth.SynthesizeTextToStreamAsync(message);
                var media = new MediaElement();
                media.SetSource(stream, stream.ContentType);
                media.Play();
            }
        }
Example #23
0
        public async void PlayLaserSound()
        {
            var package = Windows.ApplicationModel.Package.Current;
            var installedLocation = package.InstalledLocation;
            
            var storageFile = await installedLocation.GetFileAsync("Assets\\Samples\\Kick 5.wav");
            if (storageFile != null)
            {
                var stream = await storageFile.OpenAsync(Windows.Storage.FileAccessMode.Read);
                MediaElement snd = new MediaElement();
                snd.SetSource(stream, storageFile.ContentType);
                snd.Play();
            }

            var storageFile2 = await installedLocation.GetFileAsync("Assets\\Samples\\Snare 5.wav");
            if (storageFile2 != null)
            {
                var stream = await storageFile2.OpenAsync(Windows.Storage.FileAccessMode.Read);
                MediaElement snd = new MediaElement();
                snd.SetSource(stream, storageFile2.ContentType);
                snd.Play();
            }
        }
        private async void TakeVideo()
        {
            ShowPicture = false;
            ShowVideo = true;

            var cameraCaptureUI = new CameraCaptureUI();
            cameraCaptureUI.VideoSettings.Format = CameraCaptureUIVideoFormat.Mp4;

            var video = await cameraCaptureUI.CaptureFileAsync(CameraCaptureUIMode.Video);

            if ( video != null)
            {
                _videoStream = await video.OpenAsync(FileAccessMode.Read);

                CapturedMedia = new MediaElement {AutoPlay = true};
                CapturedMedia.Loaded += (sender, args) =>
                                            {
                                                CapturedMedia.SetSource(_videoStream, "video/mp4");
                                                CapturedMedia.Play();
                                            };
            }

        }
Example #25
0
 public async void HandleSpeakEvent(MediaElement mediaElement)
 {
     string currentLine = lineSection.line + lineSection.lineComplete;
     if (currentLine == "")
         return;
     SpeechSynthesisStream stream = await synth.SynthesizeTextToStreamAsync(currentLine);
     // if the SSML stream is not in the correct format throw an error message to the user
     if (stream == null)
     {
         MessageDialog dialog = new MessageDialog("unable to synthesize text");
         await dialog.ShowAsync();
         return;
     }
     // start this audio stream playing
     mediaElement.AutoPlay = true;
     mediaElement.SetSource(stream, stream.ContentType);
     mediaElement.Play();
     foreach (string word in currentLine.Split(' ')) loggedInUser.UpdateVocbulary(word);
     logSection.AddLineToLog(currentLine);
     lineSection.EnterLine();
 }
Example #26
0
        private async void readAge(string age, string gender)
        {
            // The media object for controlling and playing audio.
            mediaElement = new MediaElement();

            // The object for controlling the speech synthesis engine (voice).
            var synth = new Windows.Media.SpeechSynthesis.SpeechSynthesizer();

            string adjetivo, faixaEtaria;
            if (gender == "male")
            {
                adjetivo = "sir";
            }
            else
            {
                adjetivo = "miss";
            }

            if (Int16.Parse(age) < 25)
            {
                faixaEtaria = "a young person";
            }
            else if (Int16.Parse(age) > 50)
            {
                faixaEtaria = "a growth " + gender;
            }
            else
            {
                faixaEtaria = "";
            }

            // Generate the audio stream from plain text.
            SpeechSynthesisStream stream;

            if (isSmiling && Int16.Parse(age) < 25)
            {
                stream = await synth.SynthesizeTextToStreamAsync("Hello " + adjetivo + "! Today you're looking " + faixaEtaria + " with " + age + " years old. Now I understand your smile.");
            }
            else if (!isSmiling && Int16.Parse(age) > 25)
            {
                stream = await synth.SynthesizeTextToStreamAsync("Hello " + adjetivo + "! Before I tell you your age, let me tell to you to try to smile to the photo next time. Maybe you can look younger. Today you're looking " + faixaEtaria + " with " + age + " years old.");
            }
            else if (!isSmiling)
            {
                stream = await synth.SynthesizeTextToStreamAsync("Hello " + adjetivo + "! Really? No smiles? OK. Today you're looking " + faixaEtaria + " with " + age + " years old.");
            }
            else
            {
                stream = await synth.SynthesizeTextToStreamAsync("Hello " + adjetivo + "! Today you're looking " + faixaEtaria + " with " + age + " years old. Before I forget: beautiful smile!");
            }

            // Send the stream to the media object.
            mediaElement.SetSource(stream, stream.ContentType);
            mediaElement.Play();
        }
Example #27
0
        private async void ReadVoiceName(string name)
        {
            // The media object for controlling and playing audio.
            mediaElement = new MediaElement();

            // The object for controlling the speech synthesis engine (voice).
            var synth = new Windows.Media.SpeechSynthesis.SpeechSynthesizer();

            // Generate the audio stream from plain text.
            SpeechSynthesisStream stream;
           
            if (name == "Hara")
            {
                stream = await synth.SynthesizeTextToStreamAsync("Hello " + name + "! You have 18 years old plus " + (Int16.Parse(age) - 18).ToString() + " years of experience. But, let me check something for you.");
            }
            else
            {
                stream = await synth.SynthesizeTextToStreamAsync("Hello " + name + "! Let me check some products for you.");
            }

            // Send the stream to the media object.
            mediaElement.SetSource(stream, stream.ContentType);
            mediaElement.Play();
        }
Example #28
0
        private async void ReadVoice(Error name)
        {
            // The media object for controlling and playing audio.
            mediaElement = new MediaElement();

            // The object for controlling the speech synthesis engine (voice).
            var synth = new Windows.Media.SpeechSynthesis.SpeechSynthesizer();

            // Generate the audio stream from plain text.
            SpeechSynthesisStream stream;
            switch (name)
            {
                case Error.Not_Recognized:
                    stream = await synth.SynthesizeTextToStreamAsync("Oops! Someone was do not recognized. Please, show me someone that I met before!");
                    break;
                case Error.No_Face:
                    stream = await synth.SynthesizeTextToStreamAsync("I can't find a face. Do you really show me someone? Please, try again.");
                    break;
                case Error.Not_Found:
                    stream = await synth.SynthesizeTextToStreamAsync("I can't find another product for you.");
                    break;
                case Error.Expensive:
                    stream = await synth.SynthesizeTextToStreamAsync("You need to order your boss to raise your paycheck. Let me check another product for you, for now.");
                    break;
                default:
                    stream = await synth.SynthesizeTextToStreamAsync("Hello " + name + "! Let me check some products for you.");
                    break;
            }
            // Send the stream to the media object.
            mediaElement.SetSource(stream, stream.ContentType);
            mediaElement.Play();
        }
        /// <summary>
        /// Downloads the manifest from the given source, parses the manifest
        /// and sets the source on the media element 
        /// </summary>
        /// <param name="source">The URL of the source MPD</param>
        /// <param name="mediaElement">The MediaElement to start playback</param>
        /// <returns></returns>
        public async Task Initialize(Uri source, MediaElement mediaElement)
        {
            //1) Download manifest
            var sourceUrlText = source.AbsoluteUri;
            try
            {
                var manifest = new Manifest(sourceUrlText);
                var document = await manifest.LoadManifestAsync(sourceUrlText);

                //2) Parse manifest
                DashManifestParser mpdParser = new DashManifestParser(document, ref manifest);
                if (mpdParser.Parse())
                {

                    if (!manifest.IsSupportedProfile)
                    {
#if DEBUG
                        Logger.Log("The profiles attribute does not contain the \"urn:mpeg:dash:profile:isoff-live:2011\" profile, so it may not work as expected.");
#endif
                    }
                    if (manifest.IsLive)
                    {
                        //3) Play using MSE if it is live
                        MseStreamSource mseSource = new MseStreamSource();
                        player = new Player(mediaElement, mseSource, manifest);
                        if (haveSetLiveOffset && manifest.IsLive)
                        {
                            player.HasLiveOffsetValue = true;
                            player.LiveOffset = liveOffset;
                        }
                        player.Initialize();
                    }
                    else
                    {
                        // Otherwise, use our Adaptive Media Source for on demand content
                        var result = await AdaptiveMediaSource.CreateFromUriAsync(source);
                        if (result.Status != AdaptiveMediaSourceCreationStatus.Success)
                        {
                            throw new Exception("Unable to create media source because: " + result.Status);
                        }
                        var adaptiveSource = result.MediaSource;

                        mediaElement.SetMediaStreamSource(adaptiveSource);
                        mediaElement.Play();
                    }


                }

                else
                {
#if DEBUG
                    Logger.Log("The Parser failed to parse this mpd");
#endif
                    return;
                }
            }
            catch (Exception e)
            {
#if DEBUG
                Logger.Log("Exception when initializing player: " + e.Message + " " + Logger.Display(e));
#endif
            }
        }
Example #30
0
 private void InitPirsensorPin()
 {
     MediaElement mediaElement = new MediaElement();
     SpeechSynthesizer speech = new SpeechSynthesizer();
     if (gpio == null)
         return;
     pirPin = gpio.OpenPin(5);
     if (pirPin.IsDriveModeSupported(GpioPinDriveMode.InputPullDown))
         pirPin.SetDriveMode(GpioPinDriveMode.InputPullDown);
     else
         pirPin.SetDriveMode(GpioPinDriveMode.Input);
     pirPin.SetDriveMode(GpioPinDriveMode.Input);
     pirPin.ValueChanged += (GpioPin p, GpioPinValueChangedEventArgs args) =>
     {
         if (args.Edge == GpioPinEdge.RisingEdge)
         {
             var task = Dispatcher.RunAsync(CoreDispatcherPriority.Normal, async () => {
                 IoTHelper.SendReading(DateTime.Now);
                 var msg = messages.OrderBy(x => Guid.NewGuid()).Take(1).First();
                 PirStatus.Text = msg;
                 SpeechSynthesisStream stream = await speech.SynthesizeTextToStreamAsync(msg);
                 mediaElement.SetSource(stream, stream.ContentType);
                 mediaElement.Play();
             });
         }
     };
 }
 private async void PlaySound(string filePath)
 {
     var package = Windows.ApplicationModel.Package.Current;
     var installedLocation = package.InstalledLocation;
     var storageFile = await installedLocation.GetFileAsync(filePath);
     if (storageFile != null)
     {
         var stream = await storageFile.OpenAsync(Windows.Storage.FileAccessMode.Read);
         _sound = new MediaElement();
         _sound.SetSource(stream, storageFile.ContentType);
         _sound.Play();
     }
 }
Example #32
0
        private async void PlayMusic()
        {
            MediaElement PlayMusic = new MediaElement();
            PlayMusic.AudioCategory = Windows.UI.Xaml.Media.AudioCategory.Media;

            StorageFolder Folder = Windows.ApplicationModel.Package.Current.InstalledLocation;
            Folder = await Folder.GetFolderAsync("Sounds");
            StorageFile sf = await Folder.GetFileAsync("modem.wav");
            PlayMusic.SetSource(await sf.OpenAsync(FileAccessMode.Read), sf.ContentType);
            PlayMusic.Play();
        }
        /// <summary>
        /// Called when the flip view consent should shown.
        /// </summary>
        /// <param name="url"></param>
        public async void OnPrepareContent(Post post)
        {
            m_shouldBePlaying = false;
            m_loadingHidden = false;

            // Show loading
            m_host.ShowLoading();

            // Try to get the imgur url
            string gifUrl = GetImgurUrl(post.Url);

            // If that failed try to get a url from GfyCat
            if(gifUrl.Equals(String.Empty))
            {
                // We have to get it from gyfcat
                gifUrl = await GetGfyCatGifUrl(GetGfyCatApiUrl(post.Url));
            }

            // If we didn't get anything something went wrong.
            if(gifUrl.Equals(String.Empty))
            {
                m_host.ShowError();
                return;
            }

            // Since some of this can be costly, delay the work load until we aren't animating.
            await Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync(CoreDispatcherPriority.Low, () =>
            {
                // Make sure we aren't destroyed.
                if(m_isDestoryed)
                {
                    return;
                }

                // Create the media element
                m_gifVideo = new MediaElement();
                m_gifVideo.HorizontalAlignment = HorizontalAlignment.Stretch;
                m_gifVideo.Tapped += OnVideoTapped;
                m_gifVideo.CurrentStateChanged += OnVideoCurrentStateChanged;
                m_gifVideo.Source = new Uri(gifUrl, UriKind.Absolute);
                m_gifVideo.Play();
                m_gifVideo.IsLooping = true;
                ui_contentRoot.Children.Add(m_gifVideo);
            });
        }