public async Task StartAsync(string fileName = null) { var speechConfig = SpeechConfig.FromSubscription(this.settings.SubscriptionKey, this.settings.Region); speechConfig.SpeechRecognitionLanguage = "de-de"; speechConfig.OutputFormat = OutputFormat.Detailed; using (var audioInput = fileName == null ? AudioConfig.FromDefaultMicrophoneInput() : AudioConfig.FromWavFileInput(fileName)) { using (var intentRecognizer = new IntentRecognizer(speechConfig, audioInput)) { stopRecognition = new TaskCompletionSource <int>(); var model = LanguageUnderstandingModel.FromAppId(this.settings.LuisAppId); intentRecognizer.AddAllIntents(model); intentRecognizer.SessionStarted += IntentRecognizer_SessionStarted; intentRecognizer.Recognized += IntentRecognizer_Recognized; intentRecognizer.Recognizing += IntentRecognizer_Recognizing; intentRecognizer.SessionStopped += IntentRecognizer_SessionStopped; intentRecognizer.SpeechEndDetected += IntentRecognizer_SpeechEndDetected; intentRecognizer.SpeechStartDetected += IntentRecognizer_SpeechStartDetected; intentRecognizer.Canceled += IntentRecognizer_Canceled; await intentRecognizer.StartContinuousRecognitionAsync().ConfigureAwait(false); Task.WaitAny(stopRecognition.Task); await intentRecognizer.StopContinuousRecognitionAsync().ConfigureAwait(false); } } }
public async Task RecognizeSpeech() { var audioConfig = AudioConfig.FromDefaultMicrophoneInput(); var speechConfig = SpeechConfig.FromSubscription(key, "westus2"); // Creates a speech recognizer. using (var recognizer = new IntentRecognizer(speechConfig, audioConfig)) { // Hide user secrets later var model = LanguageUnderstandingModel.FromAppId(Environment.GetEnvironmentVariable("LUIS_APP_ID")); recognizer.AddAllIntents(model); var stopRecognition = new TaskCompletionSource <int>(); // Can add logic to exit using voice command, "Thanks see you at the window" etc. // Subscribe to appropriate events recognizer.Recognizing += (s, e) => { // Use this to send partial responses Console.WriteLine($"Partial: {e.Result.Text}"); }; recognizer.Recognized += (s, e) => { var exit = ProcessRecognizedText(s, e); if (exit) { recognizer.StopContinuousRecognitionAsync().Wait(); //ConfigureAwait(false); } }; recognizer.SessionStarted += (s, e) => { Console.WriteLine("Session started event."); }; recognizer.SessionStopped += (s, e) => { Console.WriteLine("Session stopped event."); stopRecognition.TrySetResult(0); }; recognizer.Canceled += (s, e) => { Console.WriteLine(e.ErrorDetails); stopRecognition.TrySetResult(0); }; // Instantiate new Order object _order = new Order(); Console.WriteLine("Say something to get started, or \"Exit\" to quit."); await recognizer.StartContinuousRecognitionAsync().ConfigureAwait(false); // Need more understanding about this part Task.WaitAny(new[] { stopRecognition.Task }); } }
public IntentRegonisor() { var config = SpeechConfig.FromSubscription( Secret.LuisPredictionKey, Secret.Region); var model = LanguageUnderstandingModel.FromAppId(Secret.LuisAppId); recognizer = new IntentRecognizer(config); recognizer.AddAllIntents(model); }
async static Task Main(string[] args) { const string WAKE_WORD = "hey computer"; var speechConfig = SpeechConfig.FromSubscription("e073d2855d604ddda74ba6518ab2e6b3", "westeurope"); var Intentconfig = SpeechConfig.FromSubscription("9051c66d5ba949ac84e32b01c37eb9b4", "westus"); var audioConfig = AudioConfig.FromDefaultMicrophoneInput(); var model = LanguageUnderstandingModel.FromAppId("7f7a9344-69b6-4582-a01d-19ffa3c9bed8"); var continuousRecognizer = new SpeechRecognizer(speechConfig, audioConfig); var intentRecognizer = new IntentRecognizer(Intentconfig, audioConfig); intentRecognizer.AddAllIntents(model); var synthesizer = new SpeechSynthesizer(speechConfig); bool _waitingForCommand = false; continuousRecognizer.Recognized += async(s, e) => { if (!_waitingForCommand) { if (e.Result.Reason == ResultReason.RecognizedSpeech) { Console.WriteLine($"RECOGNIZED: Text={e.Result.Text}"); if (e.Result.Text.Contains(WAKE_WORD, StringComparison.CurrentCultureIgnoreCase)) { Console.WriteLine($"RECOGNIZED: {WAKE_WORD}"); _waitingForCommand = true; await ParseCommand(synthesizer, await awaitCommand(intentRecognizer, synthesizer)); _waitingForCommand = false; Console.WriteLine("Listening for wake word."); } } } }; await continuousRecognizer.StartContinuousRecognitionAsync(); Console.Write("Press any key!"); Console.Read(); }
static async Task RecognizeIntentAsync() { //simple speech recognition with intent var config = SpeechConfig.FromSubscription(luisKey, luisRegion); using (var recognizer = new IntentRecognizer(config)) { var model = LanguageUnderstandingModel.FromAppId(luisAppId); //add LUIS intents, you have the option to add only selected intents recognizer.AddAllIntents(model); Console.WriteLine("Say something..."); var result = await recognizer.RecognizeOnceAsync().ConfigureAwait(false); if (result.Reason == ResultReason.RecognizedIntent) { Console.WriteLine($"Recognized: Text = {result.Text}"); Console.WriteLine($"Language Understanding JSON: {result.Properties.GetProperty(PropertyId.LanguageUnderstandingServiceResponse_JsonResult)}"); } else if (result.Reason == ResultReason.RecognizedSpeech) { Console.WriteLine($"Recognized: Text = {result.Text}"); Console.WriteLine("Intent not recognized."); } else if (result.Reason == ResultReason.NoMatch) { Console.WriteLine("Speech could not be recognized."); } else if (result.Reason == ResultReason.Canceled) { var cancellation = CancellationDetails.FromResult(result); Console.WriteLine($"Canceled. Reason = {cancellation.Reason}"); } } }
private async void ConfigureIntentRecognizer() { var config = SpeechConfig.FromSubscription("e4c237841b7043b9b2c73a432a85416c", "westus"); config.SpeechRecognitionLanguage = "es-es"; var stopRecognition = new TaskCompletionSource <int>(); using (recognizer = new IntentRecognizer(config)) { var model = LanguageUnderstandingModel.FromAppId("ce892100-78a7-48b0-8e09-5dc18b16996d"); recognizer.AddAllIntents(model, "Id1"); recognizer.Recognized += async(s, e) => { await recognizedText.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { //recognizedText.Text = e.Result.Text; //var prediction = predictionResult.Prediction; //MyMessageBox(prediction.TopIntent); }); try { var predictionResult = GetPredictionAsync(e.Result.Text).Result; var prediction = predictionResult.Prediction; if (prediction.TopIntent == "None") { await recognizedText.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { recognizedText.Text = "No entiendo la acción que me pides."; }); muestraHora = false; muestraTexto = false; } else if (prediction.TopIntent == "MuestraHora") { await recognizedText.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { var now = DateTime.Now; //recognizedText.Text = now.ToString("HH:mm"); recognizedText.Text = "Son las " + now.Hour + ":" + now.Minute + "."; }); muestraHora = true; muestraTexto = false; } else if (prediction.TopIntent == "EscribeTexto") { await recognizedText.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => {; recognizedText.Text = "Esto es un texto."; }); muestraTexto = true; muestraHora = false; } else if (prediction.TopIntent == "OcultaHora") { await recognizedText.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { if (muestraHora) { recognizedText.Text = "Hora oculta."; muestraHora = false; muestraTexto = false; } }); } else if (prediction.TopIntent == "OcultaTexto") { await recognizedText.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { if (muestraTexto) { recognizedText.Text = "Texto ocultado."; muestraHora = false; muestraTexto = false; } }); } else if (prediction.TopIntent == "CambioColor") { string color = ""; await recognizedText.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { //string item = prediction.Entities.First().Value.ToString(); color = prediction.Entities.First().Value.ToString().Replace(System.Environment.NewLine, ""); color = color.Replace("[", ""); color = color.Replace("]", ""); color = color.Replace("\"", ""); color = color.Replace(" ", ""); recognizedText.Text = "Estableciendo fondo de color " + color + "."; muestraHora = false; muestraTexto = false; }); try { if (colors.ContainsKey(color)) { await rectangle.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => { rectangle.Fill = new SolidColorBrush(colors[color]); }); } } catch (NullReferenceException) { } } } catch (System.AggregateException) { } /*if (e.Result.Reason == ResultReason.RecognizedIntent) * { * * var jsonResponse = JObject.Parse(e.Result.Properties.GetProperty(PropertyId.LanguageUnderstandingServiceResponse_JsonResult)); * await textJson.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => * { * textJson.Text = jsonResponse.ToString(); * }); * var intent = jsonResponse.SelectToken("topScoringIntent.intent").ToString(); * * if (intent.Equals("CambioColor")) * { * try * { * string color = jsonResponse.SelectToken("$..entities[?(@.type=='Color')].entity").ToString(); * if (colors.ContainsKey(color)) * await rectangle.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => * { * rectangle.Fill = new SolidColorBrush(colors[color]); * }); * } * catch (NullReferenceException) { } * * * * } * * * } * else if (e.Result.Reason == ResultReason.RecognizedSpeech) * { * await recognizedText.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => * { * recognizedText.Text = e.Result.Text; * }); * } * else if (e.Result.Reason == ResultReason.NoMatch) * { * await recognizedText.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () => * { * recognizedText.Text = "Speech could not be recognized."; * }); * }*/ }; recognizer.Canceled += (s, e) => { Console.WriteLine($"CANCELED: Reason={e.Reason}"); if (e.Reason == CancellationReason.Error) { Console.WriteLine($"CANCELED: ErrorCode={e.ErrorCode}"); Console.WriteLine($"CANCELED: ErrorDetails={e.ErrorDetails}"); Console.WriteLine($"CANCELED: Did you update the subscription info?"); } }; recognizer.SessionStopped += (s, e) => { Console.WriteLine("\n Session stopped event."); Console.WriteLine("\nStop recognition."); stopRecognition.TrySetResult(0); }; Console.WriteLine("Say something..."); await recognizer.StartContinuousRecognitionAsync().ConfigureAwait(false); // Waits for completion. // Use Task.WaitAny to keep the task rooted. Task.WaitAny(new[] { stopRecognition.Task }); // Stops recognition. await recognizer.StopContinuousRecognitionAsync().ConfigureAwait(false); } }
public static async Task RecognizeOnceSpeechAsync(SpeechTranslationConfig config) { var allCultures = CultureInfo.GetCultures(CultureTypes.AllCultures); // Creates a speech recognizer. using (var recognizer = new IntentRecognizer(config)) { Console.WriteLine("Say something..."); var model = LanguageUnderstandingModel.FromAppId(ConfigurationManager.AppSettings.Get("LUISId")); recognizer.AddAllIntents(model); var result = await recognizer.RecognizeOnceAsync(); // Checks result. if (result.Reason == ResultReason.RecognizedIntent) { Console.WriteLine($"RECOGNIZED: Text={result.Text}"); Console.WriteLine($" Intent Id: {result.IntentId}."); Console.WriteLine($" Language Understanding JSON: {result.Properties.GetProperty(PropertyId.LanguageUnderstandingServiceResponse_JsonResult)}."); if (result.IntentId == "Translate") { var luisJson = JObject.Parse(result.Properties.GetProperty(PropertyId.LanguageUnderstandingServiceResponse_JsonResult)); string targetLng = luisJson["entities"].First(x => x["type"].ToString() == "TargetLanguage")["entity"].ToString(); string text = luisJson["entities"].First(x => x["type"].ToString() == "Text")["entity"].ToString(); var lng = allCultures.FirstOrDefault(c => c.DisplayName.ToLower() == targetLng.ToLower()) ?? allCultures.FirstOrDefault(c => c.DisplayName.ToLower() == "english"); var translated = Translate.TranslateText("de-DE", text); Console.WriteLine("Translation: " + translated); var synth = new System.Speech.Synthesis.SpeechSynthesizer(); // Configure the audio output. synth.SetOutputToDefaultAudioDevice(); // Speak a string. synth.SelectVoice(synth.GetInstalledVoices().First(x => x.VoiceInfo.Culture.TwoLetterISOLanguageName == lng.TwoLetterISOLanguageName).VoiceInfo.Name); synth.Speak(translated); } } else if (result.Reason == ResultReason.RecognizedSpeech) { Console.WriteLine($"RECOGNIZED: Text={result.Text}"); Console.WriteLine($" Intent not recognized."); } else if (result.Reason == ResultReason.NoMatch) { Console.WriteLine($"NOMATCH: Speech could not be recognized."); } else if (result.Reason == ResultReason.Canceled) { var cancellation = CancellationDetails.FromResult(result); Console.WriteLine($"CANCELED: Reason={cancellation.Reason}"); if (cancellation.Reason == CancellationReason.Error) { Console.WriteLine($"CANCELED: ErrorCode={cancellation.ErrorCode}"); Console.WriteLine($"CANCELED: ErrorDetails={cancellation.ErrorDetails}"); Console.WriteLine($"CANCELED: Did you update the subscription info?"); } } } }
public static async Task RecognizeSpeechAsync() { initMqttClient(mqttBrokerAddress); // Creates an instance of a speech config with specified subscription key and service region. // Replace with your own subscription key // and service region (e.g., "westus"). var intentConfig = SpeechConfig.FromSubscription("", "westus2"); // Creates a speech recognizer. using (var intentRecognizer = new IntentRecognizer(intentConfig)) { // The TaskCompletionSource to stop recognition. var stopRecognition = new TaskCompletionSource <int>(); var model = LanguageUnderstandingModel.FromAppId(""); intentRecognizer.AddAllIntents(model); // Subscribes to events. intentRecognizer.Recognizing += (s, e) => { Console.WriteLine($"RECOGNIZING: Text={e.Result.Text}"); }; intentRecognizer.Recognized += (s, e) => { if (e.Result.Reason == ResultReason.RecognizedIntent) { Console.WriteLine($"RECOGNIZED: Text={e.Result.Text}"); Console.WriteLine($" Intent Id: {e.Result.IntentId}."); Console.WriteLine($" Language Understanding JSON: {e.Result.Properties.GetProperty(PropertyId.LanguageUnderstandingServiceResponse_JsonResult)}."); if (e.Result.IntentId == "FollowPerson") { var jsonResult = e.Result.Properties.GetProperty(PropertyId.LanguageUnderstandingServiceResponse_JsonResult); dynamic stuff = JObject.Parse(jsonResult); try { string name = stuff.entities[0].entity; Console.WriteLine(name); int id = nameToIdDict.GetValueOrDefault(name); mqttClient.Publish("bcx19-seek-the-geek/tag/control", Encoding.UTF8.GetBytes($"target.{name}"), MqttMsgBase.QOS_LEVEL_EXACTLY_ONCE, false); Console.WriteLine("MQTT Message sent"); } catch { Console.WriteLine("Error"); } } else if (e.Result.IntentId == "Rover.Stop") { mqttClient.Publish("bcx19-seek-the-geek/tag/control", Encoding.UTF8.GetBytes("rover.stop"), MqttMsgBase.QOS_LEVEL_EXACTLY_ONCE, false); Console.WriteLine("MQTT Message sent"); } else if (e.Result.IntentId == "Rover.Start") { mqttClient.Publish("bcx19-seek-the-geek/tag/control", Encoding.UTF8.GetBytes("rover.start"), MqttMsgBase.QOS_LEVEL_EXACTLY_ONCE, false); Console.WriteLine("MQTT Message sent"); } else if (e.Result.IntentId == "Rover.Left") { mqttClient.Publish("bcx19-seek-the-geek/tag/control", Encoding.UTF8.GetBytes("rover.left"), MqttMsgBase.QOS_LEVEL_EXACTLY_ONCE, false); Console.WriteLine("MQTT Message sent"); } else if (e.Result.IntentId == "Rover.Right") { mqttClient.Publish("bcx19-seek-the-geek/tag/control", Encoding.UTF8.GetBytes("rover.right"), MqttMsgBase.QOS_LEVEL_EXACTLY_ONCE, false); Console.WriteLine("MQTT Message sent"); } else if (e.Result.IntentId == "Rover.Exit") { mqttClient.Publish("bcx19-seek-the-geek/tag/control", Encoding.UTF8.GetBytes("rover.exit"), MqttMsgBase.QOS_LEVEL_EXACTLY_ONCE, false); Console.WriteLine("MQTT Message sent"); } else if (e.Result.IntentId == "Rover.Back") { mqttClient.Publish("bcx19-seek-the-geek/tag/control", Encoding.UTF8.GetBytes("rover.back"), MqttMsgBase.QOS_LEVEL_EXACTLY_ONCE, false); Console.WriteLine("MQTT Message sent"); } } else if (e.Result.Reason == ResultReason.RecognizedSpeech) { Console.WriteLine($"RECOGNIZED: Text={e.Result.Text}"); Console.WriteLine($" Intent not recognized."); } else if (e.Result.Reason == ResultReason.NoMatch) { Console.WriteLine($"NOMATCH: Speech could not be recognized."); } }; intentRecognizer.Canceled += (s, e) => { Console.WriteLine($"CANCELED: Reason={e.Reason}"); if (e.Reason == CancellationReason.Error) { Console.WriteLine($"CANCELED: ErrorCode={e.ErrorCode}"); Console.WriteLine($"CANCELED: ErrorDetails={e.ErrorDetails}"); Console.WriteLine($"CANCELED: Did you update the subscription info?"); } stopRecognition.TrySetResult(0); }; intentRecognizer.SessionStarted += (s, e) => { Console.WriteLine("\n Session started event."); }; intentRecognizer.SessionStopped += (s, e) => { Console.WriteLine("\n Session stopped event."); Console.WriteLine("\nStop recognition."); stopRecognition.TrySetResult(0); }; // Starts continuous recognition. Uses StopContinuousRecognitionAsync() to stop recognition. Console.WriteLine("Say something..."); await intentRecognizer.StartContinuousRecognitionAsync().ConfigureAwait(false); // Waits for completion. // Use Task.WaitAny to keep the task rooted. Task.WaitAny(new[] { stopRecognition.Task }); // Stops recognition. await intentRecognizer.StopContinuousRecognitionAsync().ConfigureAwait(false); } }
private async static Task RecognizeSpeechAndIntentAsync() { var config = SpeechConfig.FromEndpoint( new Uri("https://eastus2.api.cognitive.microsoft.com/sts/v1.0/issuetoken"), "MySuscriptionKey"); config.SpeechRecognitionLanguage = "es-ES"; using var speechRecognition = new IntentRecognizer(config); var luisModel = LanguageUnderstandingModel.FromAppId("ba417c40-bb51-4704-966a-f9c58afaf1c8"); speechRecognition.AddAllIntents(luisModel); speechRecognition.AddIntent("chao"); var endRecognition = new TaskCompletionSource <int>(); speechRecognition.Recognized += (s, e) => { switch (e.Result.Reason) { case ResultReason.NoMatch: if (!endRecognition.Task.IsCompleted) { Console.WriteLine($"No entendí na':{e.Result.Text}"); } break; case ResultReason.Canceled: Console.WriteLine($"Se canceló la escucha:{e.Result.Text}"); break; case ResultReason.RecognizingSpeech: Console.WriteLine($"Escuchando:{e.Result.Text}"); break; case ResultReason.RecognizedSpeech: Console.WriteLine($"Entendí esto:{e.Result.Text}"); break; case ResultReason.RecognizedIntent: Console.WriteLine($"Detectado comando de voz:{e.Result.Text}"); Console.WriteLine($"Saliendo ...."); endRecognition.TrySetResult(0); break; default: Console.WriteLine($"LLegué aquí porque:{e.Result.Reason}"); break; } }; speechRecognition.Canceled += (s, e) => { if (e.Reason == CancellationReason.Error) { Console.WriteLine($"ocurrió un error:{e.ErrorCode} => {e.ErrorDetails}"); } endRecognition.TrySetResult(0); }; speechRecognition.SessionStopped += (s, e) => { Console.WriteLine("Deteniendo"); endRecognition.TrySetResult(0); }; Console.WriteLine("Ahora empieza a hablar..."); await speechRecognition.StartContinuousRecognitionAsync().ConfigureAwait(false); Task.WaitAny(new[] { endRecognition.Task }); await speechRecognition.StopContinuousRecognitionAsync().ConfigureAwait(false); }
static async Task ContinuousRecognizeIntentAsync() { //continuous speech recognition and detect intent var config = SpeechConfig.FromSubscription(luisKey, luisRegion); using (var recognizer = new IntentRecognizer(config)) { var stopRecognition = new TaskCompletionSource <int>(); var model = LanguageUnderstandingModel.FromAppId(luisAppId); recognizer.AddAllIntents(model); recognizer.Recognizing += (s, e) => { Console.WriteLine($"Recognizing: Text = {e.Result.Text}"); }; recognizer.Recognized += (s, e) => { if (e.Result.Reason == ResultReason.RecognizedIntent) { Console.WriteLine($"Recognized: Text = {e.Result.Text}"); LUIS.GetLuisResult(e.Result.Properties.GetProperty(PropertyId.LanguageUnderstandingServiceResponse_JsonResult)); string intent = LUIS.GetLuisIntent(e.Result.Properties.GetProperty(PropertyId.LanguageUnderstandingServiceResponse_JsonResult)); LUIS.ResponseToLuisIntent(intent, e.Result.Text); } else if (e.Result.Reason == ResultReason.RecognizedSpeech) { Console.WriteLine($"Recognized: Text = {e.Result.Text}"); Console.WriteLine("Intent not recognized."); } }; recognizer.Canceled += (s, e) => { Console.WriteLine($"Canceled: Reason={e.Reason}"); if (e.Reason == CancellationReason.Error) { Console.WriteLine($"CANCELED: ErrorCode={e.ErrorCode}"); Console.WriteLine($"CANCELED: ErrorDetails={e.ErrorDetails}"); Console.WriteLine($"CANCELED: Did you update the subscription info?"); } stopRecognition.TrySetResult(0); }; recognizer.SessionStarted += (s, e) => { Console.WriteLine("\n Session started event."); }; recognizer.SessionStopped += (s, e) => { Console.WriteLine("\n Session stopped."); stopRecognition.TrySetResult(0); }; Console.WriteLine("[Continous Recognition] Say something..."); await recognizer.StartContinuousRecognitionAsync().ConfigureAwait(false); do { Console.WriteLine("Press Enter to stop"); } while (Console.ReadKey().Key != ConsoleKey.Enter); await recognizer.StopContinuousRecognitionAsync().ConfigureAwait(false); } }