async void generateJoke() { var joke = await JokeHelper.GetJoke(); if (joke != null) { TxtJoke.Text = joke.value.joke; } }
public static async Task SayJoke() { Joke joke = DataAccess.DeserializeModuleData(typeof(Joke), await DataAccess.GetModuleData(Modules.JOKE)); StringBuilder jokeString = new StringBuilder(); jokeString.Append("Lückenfüller Lückenfüller <break time='300ms'/>"); jokeString.AppendLine($"Einen {joke.Title.Remove(joke.Title.Length - 1)} gefällig: <break time='300ms'/><prosody rate=\"-15%\">{joke.Description}</prosody>"); await sayAsync(jokeString.ToString()); // Neuen Joke laden #pragma warning disable 4014 DataAccess.AddOrReplaceModuleData(Modules.JOKE, await JokeHelper.GetJoke()); #pragma warning restore 4014 }
private static async Task jokeModul(Modules modules) { await DataAccess.AddOrReplaceModuleData(modules, await JokeHelper.GetJoke()); Debug.WriteLine("Joke Module geladen"); }
/// <summary> /// Handle events fired when a result is generated. This may include a garbage rule that fires when general room noise /// or side-talk is captured (this will have a confidence of Rejected typically, but may occasionally match a rule with /// low confidence). /// </summary> /// <param name="sender">The Recognition session that generated this result</param> /// <param name="args">Details about the recognized speech</param> private async void ContinuousRecognitionSession_ResultGenerated(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionResultGeneratedEventArgs args) { // The garbage rule will not have a tag associated with it, the other rules will return a string matching the tag provided // when generating the grammar. string tag = "unknown"; if (args.Result.Constraint != null) { tag = args.Result.Constraint.Tag; } // Developers may decide to use per-phrase confidence levels in order to tune the behavior of their // grammar based on testing. if (args.Result.Confidence == SpeechRecognitionConfidence.Low || args.Result.Confidence == SpeechRecognitionConfidence.Medium || args.Result.Confidence == SpeechRecognitionConfidence.High) { await dispatcher.RunAsync(CoreDispatcherPriority.Normal, async() => { heardYouSayTextBlock.Visibility = Visibility.Visible; resultTextBlock.Visibility = Visibility.Visible; resultTextBlock.Text = string.Format("Heard: '{0}', (Tag: '{1}', Confidence: {2})", args.Result.Text, tag, args.Result.Confidence.ToString()); switch (tag) { case TagCommands.GetJoke: { var res = await JokeHelper.GetJoke(); if (!string.IsNullOrEmpty(res.value.joke)) { await speech.Read(res.value.joke); resultTextBlock.Text = res.value.joke; } } break; case TagCommands.HowOld: { var photo = await TakePhoto(); //call computer vision var faces = await ApiContainer.GetApi <FaceService>().UploadAndDetectFaceAttributes(photo); var res = ApiContainer.GetApi <FaceService>().HowOld(faces); if (!string.IsNullOrEmpty(res)) { await speech.Read(res); resultTextBlock.Text = res; } } break; case TagCommands.Calling: await speech.Read("Yes, what can I do Boss?"); break; case TagCommands.SeeMe: { var photo = await TakePhoto(); //call computer vision var res = await ApiContainer.GetApi <ComputerVisionService>().RecognizeImage(photo); if (!string.IsNullOrEmpty(res)) { await speech.Read(res); resultTextBlock.Text = "I see " + res; } } break; case TagCommands.ReadText: { var photo = await TakePhoto(); //call computer vision var res = await ApiContainer.GetApi <ComputerVisionService>().RecognizeText(photo); if (!string.IsNullOrEmpty(res)) { await speech.Read(res); resultTextBlock.Text = "read: " + res; } } break; case TagCommands.Stop: Player1.MediaPlayer.Pause(); break; case TagCommands.PlayBlues: case TagCommands.PlaySlow: case TagCommands.PlayRock: case TagCommands.PlayJazz: case TagCommands.PlayElectro: { var genre = Genre.Slow; switch (tag) { case TagCommands.PlayBlues: genre = Genre.Blues; break; case TagCommands.PlayRock: genre = Genre.Rock; break; case TagCommands.PlaySlow: genre = Genre.Slow; break; case TagCommands.PlayJazz: genre = Genre.Jazz; break; case TagCommands.PlayElectro: genre = Genre.Electro; break; } var rnd = new Random(Environment.TickCount); var selIds = SongIDs[genre]; var num = rnd.Next(0, selIds.Length - 1); var url = await YouTube.GetVideoUriAsync(selIds[num], YouTubeQuality.QualityLow); MediaPlayerHelper.CleanUpMediaPlayerSource(Player1.MediaPlayer); Player1.MediaPlayer.Source = new MediaItem(url.Uri.ToString()).MediaPlaybackItem; Player1.MediaPlayer.Play(); } break; case TagCommands.TakePhoto: await speech.Read("I will take your picture boss"); //GetPhotoFromCam(); break; case TagCommands.Thanks: await speech.Read("My pleasure boss"); break; case TagCommands.TurnOnLamp: { //await speech.Read("Turn on the light"); //var Pesan = Encoding.UTF8.GetBytes("LIGHT_ON"); //clientMqtt.PublishMessage(Pesan); } break; case TagCommands.TurnOffLamp: { //await speech.Read("Turn off the light"); //var Pesan = Encoding.UTF8.GetBytes("LIGHT_OFF"); //clientMqtt.Publish( Pesan); } break; case TagCommands.ReciteQuran: { try { Random rnd = new Random(Environment.TickCount); var surah = rnd.Next(1, 114); var rslt = await httpClient.GetAsync($"http://qurandataapi.azurewebsites.net/api/Ayah/GetAyahCountBySurah?Surah={surah}"); var ayah = int.Parse(await rslt.Content.ReadAsStringAsync()); ayah = rnd.Next(1, ayah); rslt = await httpClient.GetAsync($"http://qurandataapi.azurewebsites.net/api/Ayah/GetMediaByAyah?Surah={surah}&Ayah={ayah}&ReciterId=11"); var media = JsonConvert.DeserializeObject <QuranMedia>(await rslt.Content.ReadAsStringAsync()); if (media != null) { MediaPlayerHelper.CleanUpMediaPlayerSource(Player1.MediaPlayer); Player1.MediaPlayer.Source = new MediaItem(media.Url).MediaPlaybackItem; Player1.MediaPlayer.Play(); } } catch { await speech.Read("there is problem on the service"); } } break; case TagCommands.WhatDate: { await speech.Read("Today is " + DateTime.Now.ToString("dd MMMM yyyy")); }; break; case TagCommands.WhatTime: { await speech.Read("Current time is " + DateTime.Now.ToString("HH:mm")); }; break; default: for (int x = 0; x < Devices.Count; x++) { if (tag == $"TURNON{x}") { SwitchDevice(true, Devices[x].IP); break; } else if (tag == $"TURNOFF{x}") { SwitchDevice(false, Devices[x].IP); break; } } break; } }); } else { // In some scenarios, a developer may choose to ignore giving the user feedback in this case, if speech // is not the primary input mechanism for the application. await dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { heardYouSayTextBlock.Visibility = Visibility.Collapsed; resultTextBlock.Visibility = Visibility.Visible; resultTextBlock.Text = string.Format("Sorry, I didn't catch that. (Heard: '{0}', Tag: {1}, Confidence: {2})", args.Result.Text, tag, args.Result.Confidence.ToString()); }); } }