private AIRequest CreateAIRequest(SpeechRecognitionResult recognitionResults) { var texts = new List <string> { recognitionResults.Text }; var confidences = new List <float> { ConfidenceToFloat(recognitionResults.Confidence) }; var aiRequest = new AIRequest(); var alternates = recognitionResults.GetAlternates(5); if (alternates != null) { foreach (var a in alternates) { texts.Add(a.Text); confidences.Add(ConfidenceToFloat(a.Confidence)); } } aiRequest.Query = texts.ToArray(); aiRequest.Confidence = confidences.ToArray(); return(aiRequest); }
public async Task <string> RecordSpeechFromMicrophoneAsync() { string recognizedText = string.Empty; using (SpeechRecognizer recognizer = new SpeechRecognizer(SpeechRecognizer.SystemSpeechLanguage)) { recognizer.Constraints.Add(new SpeechRecognitionListConstraint(acceptedUserInput)); await recognizer.CompileConstraintsAsync(); SpeechRecognitionResult result = await recognizer.RecognizeAsync(); StringBuilder stringBuilder = new StringBuilder(); if (result.Status == SpeechRecognitionResultStatus.Success) { if (result.Confidence == SpeechRecognitionConfidence.High) { stringBuilder.Append(result.Text); } else { IReadOnlyList <SpeechRecognitionResult> alternatives = result.GetAlternates(1); if (alternatives.First().RawConfidence > 0.5) { stringBuilder.Append(alternatives.First().Text); } } recognizedText = stringBuilder.ToString(); } } return(recognizedText); }
/// <summary> /// Handle events fired when a result is generated in the continuous recognition mode. /// </summary> /// <param name="sender">The Recognition session that generated this result</param> /// <param name="args">Details about the recognized speech</param> private static void OnContinuousRecognitionSessionResultGeneratedHandler(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionResultGeneratedEventArgs args) { SpeechRecognitionResult result = args.Result; if (result.Confidence != SpeechRecognitionConfidence.Rejected) { var altResults = result.GetAlternates(s_maxRecognitionResultAlternates); uint idx = 0; foreach (var curentAltResult in altResults) { if (curentAltResult.Confidence == SpeechRecognitionConfidence.Rejected) { break; } int num; string speechResult = curentAltResult.Text.Remove(curentAltResult.Text.Length - 1); bool isNumber = Int32.TryParse(speechResult, out num); if (isNumber) { MainPage.SetAudioTempCommand(speechResult); } else { UiUtils.ShowNotification("Your message could not be parsed as number. Please specify a number!"); } idx++; } } else { UiUtils.ShowNotification("Sorry, could not get that. Can you repeat?"); } }
public async Task <string> GetTextFromSpeechAsync(bool withUI = false) { if (_recognizer == null) { await InitializeRecognizerAsync(); } SpeechRecognitionResult recognition = null; if (withUI) { recognition = await _recognizer.RecognizeWithUIAsync(); } else { recognition = await _recognizer.RecognizeAsync(); } if (recognition.Status == SpeechRecognitionResultStatus.Success && recognition.Confidence != SpeechRecognitionConfidence.Rejected) { Debug.WriteLine($"[Speech to Text]: result: {recognition.Text}, {recognition.RawConfidence.ToString()}, {recognition.Confidence.ToString()}"); var alternativeResults = recognition.GetAlternates(MaxRecognitionResultAlternates); foreach (var r in alternativeResults) { Debug.WriteLine($"[Speech to Text]: alternative: {r.Text}, {r.RawConfidence.ToString()}, {r.Confidence.ToString()}"); } var topResult = alternativeResults.Where(r => r.Confidence == SpeechRecognitionConfidence.High).FirstOrDefault(); if (topResult != null) { return(topResult.Text); } topResult = alternativeResults.Where(r => r.Confidence == SpeechRecognitionConfidence.Medium).FirstOrDefault(); if (topResult != null) { return(topResult.Text); } topResult = alternativeResults.Where(r => r.Confidence == SpeechRecognitionConfidence.Low).FirstOrDefault(); if (topResult != null) { return(topResult.Text); } } return(string.Empty); }
private AIRequest CreateAIRequest(SpeechRecognitionResult recognitionResults) { var texts = new List<string> { recognitionResults.Text }; var confidences = new List<float> { ConfidenceToFloat(recognitionResults.Confidence) }; var aiRequest = new AIRequest(); var alternates = recognitionResults.GetAlternates(5); if (alternates != null) { foreach (var a in alternates) { texts.Add(a.Text); confidences.Add(ConfidenceToFloat(a.Confidence)); } } aiRequest.Query = texts.ToArray(); aiRequest.Confidence = confidences.ToArray(); return aiRequest; }