private async Task SendToBot(RecognizedPhrase recognizedPhrase) { Activity activity = new Activity() { From = new ChannelAccount { Id = conversationResult.Id }, Conversation = new ConversationAccount { Id = conversationResult.Id }, Recipient = new ChannelAccount { Id = "Bot" }, ServiceUrl = "https://skype.botframework.com", ChannelId = "skype", }; activity.Text = recognizedPhrase.DisplayText; using (var scope = Microsoft.Bot.Builder.Dialogs.Conversation .Container.BeginLifetimeScope(DialogModule.LifetimeScopeTag, Configure)) { scope.Resolve <IMessageActivity> (TypedParameter.From((IMessageActivity)activity)); DialogModule_MakeRoot.Register (scope, () => new Dialogs.RentLuisDialog()); var postToBot = scope.Resolve <IPostToBot>(); await postToBot.PostAsync(activity, CancellationToken.None); } }
internal static void DisplayBasicPhraseInfo(string text, RecognizedPhrase result, SpeechRecognizer recognizer) { if (result != null && text != null) { // Blankif (recognizer != null) { //Clearlabel.Text += String.Format(" Recognizer currently at: {0} mSec\n" +" Audio Device currently at: {1} mSec\n",recognizer.RecognizerAudioPosition.TotalMilliseconds,recognizer.AudioPosition.TotalMilliseconds); } if (result != null) { RecognitionResult recResult = result as RecognitionResult; if (recResult != null) { RecognizedAudio resultRecognizedAudio = recResult.Audio; if (resultRecognizedAudio == null) { text += String.Format(" Emulated input\n"); } else { text += String.Format( " Candidate Phrase at: {0} mSec\n" + " Phrase Length: {1} mSec\n" + " Input State Time: {2}\n" + " Input Format: {3}\n", resultRecognizedAudio.AudioPosition.TotalMilliseconds, resultRecognizedAudio.Duration.TotalMilliseconds, resultRecognizedAudio.StartTime.ToShortTimeString(), resultRecognizedAudio.Format.EncodingFormat.ToString()); } } text += String.Format(" Confidence Level: {0}\n", result.Confidence); if (result.Grammar != null) { text += String.Format(" Recognizing Grammar: {0}\n" + " Recognizing Rule: {1}\n", ((result.Grammar.Name != null) ? (result.Grammar.Name) : "None"), ((result.Grammar.RuleName != null) ? (result.Grammar.RuleName) : "None")); } if (result.ReplacementWordUnits.Count != 0) { text += String.Format(" Replacement text:\n"); foreach (ReplacementText rep in result.ReplacementWordUnits) { text += String.Format(" At index {0} for {1} words. Text: {2}\n", rep.FirstWordIndex, rep.CountOfWords, rep.Text); } text += String.Format("\n\n"); Console.WriteLine(text); } } } }
private async Task SendToBot(RecognizedPhrase recognizedPhrase, Func <IDialog <object> > dialog) { var activity = new Activity { From = new ChannelAccount { Id = _conversationResult.Id }, Conversation = new ConversationAccount { Id = _conversationResult.Id }, Recipient = new ChannelAccount { Id = "Bot" }, ServiceUrl = "https://skype.botframework.com", ChannelId = "skype", Locale = "pt-Br", Text = recognizedPhrase.DisplayText }; using (var scope = Conversation .Container.BeginLifetimeScope(DialogModule.LifetimeScopeTag, Configure)) { scope.Resolve <IMessageActivity>(TypedParameter.From((IMessageActivity)activity)); DialogModule_MakeRoot.Register(scope, dialog); var postToBot = scope.Resolve <IPostToBot>(); await postToBot.PostAsync(activity, CancellationToken.None); } }
private static RecognizedPhrase RedactEntityInRecognizedPhrase(RecognizedPhrase recognizedPhrase, TextAnalyticsEntity entity) { if (!IsMaskableEntityType(entity) || !recognizedPhrase.NBest.Any()) { return(recognizedPhrase); } var nBest = recognizedPhrase.NBest.FirstOrDefault(); var displayForm = nBest.Display; var preMask = displayForm.Substring(0, entity.Offset); var postMask = displayForm.Substring(entity.Offset + entity.Length, displayForm.Length - (entity.Offset + entity.Length)); if (entity.Type == EntityType.Quantity && entity.SubType.Equals("Number", StringComparison.OrdinalIgnoreCase)) { displayForm = preMask + new string('#', entity.Length) + postMask; } else { if (!string.IsNullOrEmpty(entity.SubType)) { displayForm = $"{preMask}#{entity.Type}-{entity.SubType}#{postMask}"; } else { displayForm = $"{preMask}#{entity.Type}#{postMask}"; } } nBest.Display = displayForm; return(recognizedPhrase); }
public void MusicCommand(RecognizedPhrase e) { try { SpeechHandler.SetResponse(Processor.MusicCommand(e.Text)); } catch (Exception) { SpeechHandler.Invalid(); } }
public void Converter(RecognizedPhrase e) { string num = e.Words[e.Words.Count - 1].Text; try { SpeechHandler.SetResponse(Processor.Converter(e.Text, num)); } catch (Exception) { SpeechHandler.Invalid(); } }
public void Type(RecognizedPhrase e) { string tobeTyped = e.Text.Substring(e.Text.IndexOf("this") + 4); try { Processor.Type(tobeTyped); } catch (Exception) { SpeechHandler.Invalid(); } }
public void Search(RecognizedPhrase e) { int len = e.Words.Count; string searchprompt = e.Text.Substring(e.Text.IndexOf(":") + 1); try { SpeechHandler.SetResponse(Processor.Search(searchprompt)); } catch (Exception) { SpeechHandler.Invalid(); } }
public void Opener(RecognizedPhrase e) { int len = e.Words.Count; string app = e.Words[len - 1].Text; try { SpeechHandler.SetResponse(Processor.Opener(app) + app); } catch (Exception) { SpeechHandler.Invalid(); } }
public void RandomNum(RecognizedPhrase e) { int len = e.Words.Count; string numOne = e.Words[len - 3].Text; string numTwo = e.Words[len - 1].Text; try { SpeechHandler.SetResponse(Processor.RandomNum(numOne, numTwo)); } catch (Exception) { SpeechHandler.Invalid(); } }
/// <summary> /// Initializes a new instance of the <see cref="RecognitionResult"/> class. /// </summary> /// <param name="speechPhraseMessage"> /// The speech.phrase message returned by the service. /// </param> internal RecognitionResult(SpeechPhraseMessage speechPhraseMessage) { this.RecognitionStatus = speechPhraseMessage.RecognitionStatus; if (speechPhraseMessage.NBest != null) { var phraseResults = new RecognizedPhrase[speechPhraseMessage.NBest.Length]; for (int i = 0; i < phraseResults.Length; i++) { phraseResults[i] = new RecognizedPhrase(speechPhraseMessage.NBest[i]); } this.Results = phraseResults; } }
private static RecognizedPhrase AddSentimentToSegment(RecognizedPhrase recognizedPhrase, TextAnalyticsSentence sentence) { var nBest = recognizedPhrase.NBest.FirstOrDefault(); if (nBest != null) { if (nBest.Sentiment == null) { nBest.Sentiment = new Sentiment(); } nBest.Sentiment.Negative = sentence.SentenceScores.Negative; nBest.Sentiment.Positive = sentence.SentenceScores.Positive; nBest.Sentiment.Neutral = sentence.SentenceScores.Neutral; } return(recognizedPhrase); }
private string CreateTranscriptJson() { List <RecognizedPhrase> recognizedPhrases = new List <RecognizedPhrase>(); int totalduration = 0; string totaldisplay = string.Empty; string totallexical = string.Empty; string totalitn = string.Empty; string totalmasked = string.Empty; // var log = (rt == RecoType.Base) ? this.baseModelLogText : this.customModelLogText; // source.TrySetResult(0); foreach (var utt in FinalResultsCumulative) { totaldisplay = totaldisplay + utt.DisplayText.PadRight(1, ' '); if (utt.NBest != null && utt.NBest.Count > 0) { totallexical = totallexical + utt.NBest[0].Lexical.PadRight(1, ' '); totalitn = totalitn + utt.NBest[0].ITN.PadRight(1, ' '); totalmasked = totalmasked + utt.NBest[0].MaskedITN.PadRight(1, ' '); } totalduration = totalduration + utt.Duration; var durationTicks = new TimeSpan(0, 0, 0, 0, utt.Duration).Ticks; var offsetTicks = new TimeSpan(0, 0, 0, 0, utt.Offset).Ticks; RecognizedPhrase recognizedPhrase = new RecognizedPhrase(utt.RecognitionStatus, this.channel, 0, utt.Offset.ToString(CultureInfo.InvariantCulture.NumberFormat), utt.Duration.ToString(CultureInfo.InvariantCulture.NumberFormat), offsetTicks, durationTicks, utt.NBest); recognizedPhrases.Add(recognizedPhrase); } var totalDurationTicks = new TimeSpan(0, 0, 0, 0, totalduration).Ticks; CombinedRecognizedPhrase combined = new CombinedRecognizedPhrase(this.channel, totallexical, totalitn, totalmasked, totaldisplay, null); string timestamp = DateTime.UtcNow.ToString(CultureInfo.InvariantCulture.DateTimeFormat); SpeechTranscript transcript = new SpeechTranscript(this.fileSource, timestamp, totalDurationTicks, totalduration.ToString(CultureInfo.InvariantCulture.NumberFormat), new List <CombinedRecognizedPhrase> { combined }, recognizedPhrases); if (log != null) { log.LogInformation($"Speech transcript JSON created at : {0} UTC", timestamp); } return(JsonConvert.SerializeObject(transcript)); }
private Hypothesis createHypothesis(RecognizedPhrase phrase) { Hypothesis hyp = new Hypothesis(); hyp.text = phrase.Text; hyp.confidence = phrase.Confidence; if (phrase.Semantics != null) { hyp.semantics = createStruct(phrase.Semantics); } foreach (RecognizedWordUnit word in phrase.Words) { Word rword = new Word(); rword.text = word.Text; rword.confidence = word.Confidence; hyp.Add(rword); } return(hyp); }
/// <summary> /// Initializes a new instance of the Speech RecognizedPhrase class. /// </summary> /// <param name="phrase"></param> public SpeechRecognizedPhrase(RecognizedPhrase phrase) { SpeechSemanticValue semanticValue = null; try { semanticValue = new SpeechSemanticValue(phrase.Semantics); } catch (InvalidOperationException) { // Failed to parse semantics, leave it empty. } this.text = phrase.Text; this.confidence = phrase.Confidence; this.grammar = phrase.Grammar.Name; this.semantics = semanticValue; this.words = SpeechWord.CreateArray(phrase.Words); }
private async void WriteResponseResult(SpeechResponseEventArgs e) { RecognizedPhrase _final = null; if (e.PhraseResponse.Results.Length == 0) { this.WriteLine("No phrase response is available."); } else { this.WriteLine("********* Final n-BEST Results *********"); for (int i = 0; i < e.PhraseResponse.Results.Length; i++) { this.WriteLine( "[{0}] Confidence={1}, Text=\"{2}\"", i, e.PhraseResponse.Results[i].Confidence, e.PhraseResponse.Results[i].DisplayText); if (_final == null) { _final = e.PhraseResponse.Results[i]; } else { if (_final.Confidence < e.PhraseResponse.Results[i].Confidence) { _final = e.PhraseResponse.Results[i]; } } } if (_final != null) { CheckLuisForAction(_final.DisplayText); } this.WriteLine(); } }