Example #1
0
        public static CortanaCommand ProcessCommand(SpeechRecognitionResult speechRecognitionResult, CommandDiagnostics diagnostics)
        {
            // Get the name of the voice command and the text spoken
            string voiceCommandName = speechRecognitionResult.RulePath[0];
            string textSpoken = speechRecognitionResult.Text;

            string argument = null;
            CortanaCommand processedCommand = null;

            //bool modelUsed = ModelHolder != null;
            //if (modelUsed) {
            //    UserCortanaCommand userCommand = ProcessUserCommand(voiceCommandName, speechRecognitionResult, diagnostics);
            //    bool wasUserCommand = userCommand != null;
            //    if (wasUserCommand) {
            //        return userCommand;
            //    }
            //}

            switch (voiceCommandName)
            {
                case CortanaCommand.Execute:
                    argument = GetPhraseArg(speechRecognitionResult, "filename"); // filename
                    //argument = CortanaCommand.StripOffCommandName(voiceCommandName, textSpoken);
                    processedCommand = new ExecuteCortanaCommand(argument, diagnostics);
                    break;

                case CortanaCommand.ToggleListening:
                    processedCommand = new ToggleListeningCortanaCommand(null, diagnostics); // no argument needed
                    break;

                case CortanaCommand.YouTube:
                    const string youtube = "YouTube";
                    argument = CortanaCommand.StripOffCommandName(youtube, textSpoken); // search text
                    processedCommand = new YoutubeCortanaCommand(argument, diagnostics);
                    break;

                case CortanaCommand.Notepad:
                    const string notepad = "Notepad";
                    argument = CortanaCommand.StripOffCommandName(notepad, textSpoken); // text
                    processedCommand = new NotepadCortanaCommand(argument, diagnostics);
                    break;

                case CortanaCommand.FeedMe:
                    processedCommand = new FeedMeCortanaCommand(null, diagnostics); // no argument needed
                    break;

                case CortanaCommand.Calibrate:
                    processedCommand = new CalibrateCortanaCommand(null, diagnostics); // no argument needed
                    break;

                case CortanaCommand.BriefMe:
                    processedCommand = new BriefMeCortanaCommand(null, diagnostics); // no argument needed
                    break;

                default:
                    Debug.WriteLine("Command Name Not Found:  " + voiceCommandName);
                    break;
            }
            return processedCommand;
        }
Example #2
0
        private void ProcessResult(SpeechRecognitionResult result)
        {
            var handler = Heard;
            if (handler == null)
                return;

            var possibleWord = new PossibleWord
                {
                    ProbablePercent = ToPercent(result.TextConfidence),
                    Word = result.Text
                };

            handler(this, new MvxValueEventArgs<PossibleWord>(possibleWord));
        }
Example #3
0
 public async void ProcessCommands(SpeechRecognitionResult result)
 {
     string voiceCommandName = result.RulePath.First();
     SpeechSynthesizer synthesizer = new SpeechSynthesizer();
     switch (voiceCommandName)
     {
         case "showSessionsByRoom":
             var roomNumber = result.SemanticInterpretation.Properties["room"][0];
             await ViewModel.FindSessionsByRoom(roomNumber);
             break;
         case "showSessions":
             var stream = await synthesizer.SynthesizeTextToStreamAsync("There are " + ViewModel.CodecampSessions.Count + " sessions!");
             AudioPlayer.SetSource(stream, string.Empty);
             break;
         case "findSessionsByKeyword":
             string tag = result.SemanticInterpretation.Properties["tag"][0];
             var sessionCount = ViewModel.CodecampSessions.Count(s => s.Tags.Contains(tag));
             var findStream = await synthesizer.SynthesizeTextToStreamAsync("There are " + sessionCount + " sessions related to " + tag + "!");
             AudioPlayer.SetSource(findStream, string.Empty);
             break;
     }
 }
Example #4
0
        private async Task HandleVoiceCommand(SpeechRecognitionResult voiceResult)
        {
            if (voiceResult.Status == SpeechRecognitionResultStatus.Success)
            {
                this.SetBusyAnimation(true);
                this.StatusText.Text = "absorbing MixRadio's music knowledge...";

                var text = voiceResult.Text.ToLowerInvariant();

                App.LogAnalyticsEvent("Country", App.Country, null, 0);

                if (text.CompareTo("play me") == 0 || text.CompareTo("my favourites") == 0 || text.CompareTo("my favorites") == 0)
                {
                    this.SetBusyAnimation(false);
                    this.SetReadyToGoAndExample();

                    App.LogAnalyticsEvent("HandleVoiceCommand", voiceResult.Text + " => PlayMe", null, 0);

                    // Start Play Me...
                    await new PlayMeTask().Show();
                }
                else
                {
                    // strip "some"...
                    text = text.Replace("some ", string.Empty);

                    // strip "me"...
                    text = text.Replace("me ", string.Empty);

                    // look for a mix...
                    await App.LoadMixes();

                    var candidates = (from m in App.Mixes
                                      where m.Name.IndexOf(text, StringComparison.OrdinalIgnoreCase) > -1
                                      select m).ToArray();

                    if (candidates.Any())
                    {
                        // got one, choose one at random in case there are many (e.g. "rock" has loads)...
                        Random rnd   = new Random();
                        int    index = rnd.Next(candidates.Count() - 1);
                        var    mix   = candidates[index];

                        App.LogAnalyticsEvent("HandleVoiceCommand", voiceResult.Text + " => Curated " + mix.Id, null, 0);

                        this.SetBusyAnimation(false);
                        this.SetReadyToGoAndExample();
                        await mix.Play();
                    }
                    else
                    {
                        // try to find an artist...
                        var artist = await App.FindArtist(text);

                        this.SetBusyAnimation(false);
                        this.SetReadyToGoAndExample();

                        if (artist != null)
                        {
                            App.LogAnalyticsEvent("HandleVoiceCommand", voiceResult.Text + " => Artist " + artist.Id, null, 0);

                            // start the artist mix...
                            await artist.PlayMix();
                        }
                        else
                        {
                            // nothing found
                            this.StatusText.Text = "sorry, could not find any '" + text + "' to play.\r\ntry '" + this.GetExample() + "'";
                            App.LogAnalyticsEvent("HandleVoiceCommand", voiceResult.Text + " => No matches", null, 0);
                        }
                    }
                }
            }
            else
            {
                App.LogAnalyticsEvent("HandleVoiceCommand", "SpeechRecognitionResultStatus==" + voiceResult.Status.ToString(), null, 0);
            }
        }
Example #5
0
        private SpeechRecognitionEvent ParseRecognizeResponse(IDictionary resp)
        {
            if (resp == null)
            {
                return(null);
            }

            try
            {
                List <SpeechRecognitionResult> results = new List <SpeechRecognitionResult>();
                IList iresults = resp["results"] as IList;
                if (iresults == null)
                {
                    return(null);
                }

                foreach (var r in iresults)
                {
                    IDictionary iresult = r as IDictionary;
                    if (iresults == null)
                    {
                        continue;
                    }

                    SpeechRecognitionResult result = new SpeechRecognitionResult();
                    result.final = (bool)iresult["final"];

                    IList iwordAlternatives = iresult["word_alternatives"] as IList;
                    if (iwordAlternatives != null)
                    {
                        List <WordAlternativeResults> wordAlternatives = new List <WordAlternativeResults>();
                        foreach (var w in iwordAlternatives)
                        {
                            IDictionary iwordAlternative = w as IDictionary;
                            if (iwordAlternative == null)
                            {
                                continue;
                            }

                            WordAlternativeResults wordAlternativeResults = new WordAlternativeResults();
                            if (iwordAlternative.Contains("start_time"))
                            {
                                wordAlternativeResults.start_time = (double)iwordAlternative["start_time"];
                            }
                            if (iwordAlternative.Contains("end_time"))
                            {
                                wordAlternativeResults.end_time = (double)iwordAlternative["end_time"];
                            }
                            if (iwordAlternative.Contains("alternatives"))
                            {
                                List <WordAlternativeResult> wordAlternativeResultList = new List <WordAlternativeResult>();
                                IList iwordAlternativeResult = iwordAlternative["alternatives"] as IList;
                                if (iwordAlternativeResult == null)
                                {
                                    continue;
                                }

                                foreach (var a in iwordAlternativeResult)
                                {
                                    WordAlternativeResult wordAlternativeResult = new WordAlternativeResult();
                                    IDictionary           ialternative          = a as IDictionary;
                                    if (ialternative.Contains("word"))
                                    {
                                        wordAlternativeResult.word = (string)ialternative["word"];
                                    }
                                    if (ialternative.Contains("confidence"))
                                    {
                                        wordAlternativeResult.confidence = (double)ialternative["confidence"];
                                    }
                                    wordAlternativeResultList.Add(wordAlternativeResult);
                                }

                                wordAlternativeResults.alternatives = wordAlternativeResultList.ToArray();
                            }

                            wordAlternatives.Add(wordAlternativeResults);
                        }

                        result.word_alternatives = wordAlternatives.ToArray();
                    }

                    IList ialternatives = iresult["alternatives"] as IList;
                    if (ialternatives != null)
                    {
                        List <SpeechRecognitionAlternative> alternatives = new List <SpeechRecognitionAlternative>();
                        foreach (var a in ialternatives)
                        {
                            IDictionary ialternative = a as IDictionary;
                            if (ialternative == null)
                            {
                                continue;
                            }

                            SpeechRecognitionAlternative alternative = new SpeechRecognitionAlternative();
                            alternative.transcript = (string)ialternative["transcript"];
                            if (ialternative.Contains("confidence"))
                            {
                                alternative.confidence = (double)ialternative["confidence"];
                            }

                            if (ialternative.Contains("timestamps"))
                            {
                                IList itimestamps = ialternative["timestamps"] as IList;

                                TimeStamp[] timestamps = new TimeStamp[itimestamps.Count];
                                for (int i = 0; i < itimestamps.Count; ++i)
                                {
                                    IList itimestamp = itimestamps[i] as IList;
                                    if (itimestamp == null)
                                    {
                                        continue;
                                    }

                                    TimeStamp ts = new TimeStamp();
                                    ts.Word       = (string)itimestamp[0];
                                    ts.Start      = (double)itimestamp[1];
                                    ts.End        = (double)itimestamp[2];
                                    timestamps[i] = ts;
                                }

                                alternative.Timestamps = timestamps;
                            }
                            if (ialternative.Contains("word_confidence"))
                            {
                                IList iconfidence = ialternative["word_confidence"] as IList;

                                WordConfidence[] confidence = new WordConfidence[iconfidence.Count];
                                for (int i = 0; i < iconfidence.Count; ++i)
                                {
                                    IList iwordconf = iconfidence[i] as IList;
                                    if (iwordconf == null)
                                    {
                                        continue;
                                    }

                                    WordConfidence wc = new WordConfidence();
                                    wc.Word       = (string)iwordconf[0];
                                    wc.Confidence = (double)iwordconf[1];
                                    confidence[i] = wc;
                                }

                                alternative.WordConfidence = confidence;
                            }

                            alternatives.Add(alternative);
                        }

                        result.alternatives = alternatives.ToArray();
                    }

                    IDictionary iKeywords = iresult["keywords_result"] as IDictionary;
                    if (iKeywords != null)
                    {
                        result.keywords_result = new KeywordResults();
                        List <KeywordResult> keywordResults = new List <KeywordResult>();
                        foreach (string keyword in Keywords)
                        {
                            if (iKeywords[keyword] != null)
                            {
                                IList iKeywordList = iKeywords[keyword] as IList;
                                if (iKeywordList == null)
                                {
                                    continue;
                                }

                                foreach (var k in iKeywordList)
                                {
                                    IDictionary   iKeywordDictionary = k as IDictionary;
                                    KeywordResult keywordResult      = new KeywordResult();
                                    keywordResult.keyword         = keyword;
                                    keywordResult.confidence      = (double)iKeywordDictionary["confidence"];
                                    keywordResult.end_time        = (double)iKeywordDictionary["end_time"];
                                    keywordResult.start_time      = (double)iKeywordDictionary["start_time"];
                                    keywordResult.normalized_text = (string)iKeywordDictionary["normalized_text"];
                                    keywordResults.Add(keywordResult);
                                }
                            }
                        }
                        result.keywords_result.keyword = keywordResults.ToArray();
                    }

                    results.Add(result);
                }

                return(new SpeechRecognitionEvent(results.ToArray()));
            }
            catch (Exception e)
            {
                Log.Error("SpeechToText.ParseRecognizeResponse()", "ParseJsonResponse exception: {0}", e.ToString());
                return(null);
            }
        }
Example #6
0
 public VoiceCommandInfo(SpeechRecognitionResult speechRecognitionResult)
 {
     this.Result = speechRecognitionResult;
     this.VoiceCommandName = speechRecognitionResult?.RulePath[0];
     this.TextSpoken = speechRecognitionResult?.Text;
 }
Example #7
0
        private async Task HandleVoiceCommand(SpeechRecognitionResult voiceResult)
        {
            if (voiceResult.Status == SpeechRecognitionResultStatus.Success)
            {
                this.SetBusyAnimation(true);
                this.StatusText.Text = "absorbing MixRadio's music knowledge...";

                var text = voiceResult.Text.ToLowerInvariant();

                App.LogAnalyticsEvent("Country", App.Country, null, 0);

                if (text.CompareTo("play me") == 0 || text.CompareTo("my favourites") == 0 || text.CompareTo("my favorites") == 0)
                {
                    this.SetBusyAnimation(false);
                    this.SetReadyToGoAndExample();

                    App.LogAnalyticsEvent("HandleVoiceCommand", voiceResult.Text + " => PlayMe", null, 0);

                    // Start Play Me...
                    await new PlayMeTask().Show();
                }
                else
                {
                    // strip "some"...
                    text = text.Replace("some ", string.Empty);

                    // strip "me"...
                    text = text.Replace("me ", string.Empty);

                    // look for a mix...
                    await App.LoadMixes();

                    var candidates = (from m in App.Mixes
                                      where m.Name.IndexOf(text, StringComparison.OrdinalIgnoreCase) > -1
                                      select m).ToArray();

                    if (candidates.Any())
                    {
                        // got one, choose one at random in case there are many (e.g. "rock" has loads)...
                        Random rnd = new Random();
                        int index = rnd.Next(candidates.Count() - 1);
                        var mix = candidates[index];

                        App.LogAnalyticsEvent("HandleVoiceCommand", voiceResult.Text + " => Curated " + mix.Id, null, 0);

                        this.SetBusyAnimation(false);
                        this.SetReadyToGoAndExample();
                        await mix.Play();
                    }
                    else
                    {
                        // try to find an artist...
                        var artist = await App.FindArtist(text);
                        this.SetBusyAnimation(false);
                        this.SetReadyToGoAndExample();

                        if (artist != null)
                        {
                            App.LogAnalyticsEvent("HandleVoiceCommand", voiceResult.Text + " => Artist " + artist.Id, null, 0);

                            // start the artist mix...
                            await artist.PlayMix();
                        }
                        else
                        {
                            // nothing found
                            this.StatusText.Text = "sorry, could not find any '" + text + "' to play.\r\ntry '" + this.GetExample() + "'";
                            App.LogAnalyticsEvent("HandleVoiceCommand", voiceResult.Text + " => No matches", null, 0);
                        }
                    }
                }
            }
            else
            {
                App.LogAnalyticsEvent("HandleVoiceCommand", "SpeechRecognitionResultStatus==" + voiceResult.Status.ToString(), null, 0);
            }
        }
Example #8
0
 /// <summary>
 /// Retrieves the best match from phraseName's list of options (declared in VCD file)
 /// </summary>
 private static string GetPhraseArg(SpeechRecognitionResult speechRecognitionResult, string phraseName)
 {
     string phraseArg = speechRecognitionResult.SemanticInterpretation.Properties[phraseName][0];
     return phraseArg;
 }
 /// <summary>
 /// Creates a new instance of the SpeechRecognizedEventArgs class.
 /// </summary>
 public SpeechRecognizedEventArgs(SpeechRecognitionResult result)
 {
     Result = result;
     Canceled = false;
 }
 public bool HandleResultFromFile(SpeechRecognitionResult result)
 {
     return(HandleResult(result, MySpeechResponseFromFile));
 }
        private async Task ProcessRecognitionResultsAsync(SpeechRecognitionResult results, RequestExtras requestExtras)
        {
            if (!string.IsNullOrWhiteSpace(results.Text))
            {
                var request = new AIRequest();
                request.Query = new[] { results.Text };
                try
                {
                    request.Confidence = new[] { Convert.ToSingle(results.RawConfidence) };
                }
                catch
                {
                }

                try
                {
                    if (requestExtras != null)
                    {
                        requestExtras.CopyTo(request);
                    }

                    var response = await dataService.RequestAsync(request);
                    FireOnResult(response);
                }
                catch (Exception e)
                {
                    FireOnError(new AIServiceException(e));
                }
            }
        }
Example #12
0
        protected async override void OnActivated(IActivatedEventArgs args)
        {
            base.OnActivated(args);

            if (args.Kind == ActivationKind.VoiceCommand)
            {
                // The arguments can represent many different activation types. Cast it so we can get the
                // parameters we care about out.
                var commandArgs = args as VoiceCommandActivatedEventArgs;

                SpeechRecognitionResult speechRecognitionResult = commandArgs.Result;

                // Get the name of the voice command and the text spoken. See AdventureWorksCommands.xml for
                // the <Command> tags this can be filled with.
                string voiceCommandName = speechRecognitionResult.RulePath[0];
                string textSpoken       = speechRecognitionResult.Text;

                // The commandMode is either "voice" or "text", and it indictes how the voice command
                // was entered by the user.
                // Apps should respect "text" mode by providing feedback in silent form.
                string commandMode = speechRecognitionResult.SemanticInterpretation.Properties["commandMode"].FirstOrDefault();

                switch (voiceCommandName)
                {
                case "switch":
                    // Access the value of the {destination} phrase in the voice command
                    string status = speechRecognitionResult.SemanticInterpretation.Properties["status"].FirstOrDefault();
                    System.Diagnostics.Debug.WriteLine(status);

                    string messageString = "[]";
                    if (status.Equals("打开"))
                    {
                        SendMessage("[{\"port\": 6, \"status\": 0}]");
                        await Task.Delay(10000);

                        SendMessage("[{\"port\": 6, \"status\": 1}]");
                        await Task.Delay(100);

                        SendMessage("[{\"port\": 5, \"status\": 0}]");
                        await Task.Delay(1000);

                        SendMessage("[{\"port\": 5, \"status\": 1}]");
                        await Task.Delay(1000);

                        SendMessage("[{\"port\": 5, \"status\": 0}]");
                        await Task.Delay(1000);

                        SendMessage("[{\"port\": 5, \"status\": 1}]");
                        await Task.Delay(1000);

                        SendMessage("[{\"port\": 5, \"status\": 0}]");
                        await Task.Delay(1000);

                        SendMessage("[{\"port\": 5, \"status\": 1}]");
                        await Task.Delay(1000);

                        SendMessage("[{\"port\": 5, \"status\": 0}]");
                        await Task.Delay(1000);

                        SendMessage("[{\"port\": 5, \"status\": 1}]");
                        await Task.Delay(100);

                        SendMessage("[{\"port\": 13, \"status\": 0}]");
                        await Task.Delay(10000);

                        SendMessage("[{\"port\": 13, \"status\": 1}]");
                    }
                    else if (status.Equals("关闭"))
                    {
                        SendMessage("[{\"port\": 5, \"status\": 1}]");
                        SendMessage("[{\"port\": 6, \"status\": 1}]");
                        SendMessage("[{\"port\": 13, \"status\": 1}]");
                    }

                    break;

                default:
                    break;
                }
            }
        }
Example #13
0
        /// <summary>
        /// Retrieves the best match from phraseName's list of options (declared in VCD file)
        /// </summary>
        private static string GetPhraseArg(SpeechRecognitionResult speechRecognitionResult, string phraseName)
        {
            string phraseArg = speechRecognitionResult.SemanticInterpretation.Properties[phraseName][0];

            return(phraseArg);
        }
 protected override void OnNavigatedTo(NavigationEventArgs e)
 {
     base.OnNavigatedTo(e);
     speechRecognition = e.Parameter as SpeechRecognitionResult;
 }
        /// <summary>
        /// Uses the result from the speech recognizer to change the colors of the shapes.
        /// </summary>
        /// <param name="recoResult">The result from the recognition event</param>
        private void HandleRecognitionResult(SpeechRecognitionResult recoResult)
        {
            // Declare a string that will contain messages when the color rule matches GARBAGE.
            string garbagePrompt = "";

            // BACKGROUND: Check to see if the recognition result contains the semantic key for the background color,
            // and not a match for the GARBAGE rule, and change the color.
            if (recoResult.SemanticInterpretation.Properties.ContainsKey("background") && recoResult.SemanticInterpretation.Properties["background"][0].ToString() != "...")
            {
                string backgroundColor = recoResult.SemanticInterpretation.Properties["background"][0].ToString();
                colorRectangle.Fill = new SolidColorBrush(getColor(backgroundColor.ToLower()));
            }

            // If "background" was matched, but the color rule matched GARBAGE, prompt the user.
            else if (recoResult.SemanticInterpretation.Properties.ContainsKey("background") && recoResult.SemanticInterpretation.Properties["background"][0].ToString() == "...")
            {
                garbagePrompt       += "Didn't get the background color \n\nTry saying blue background\n";
                resultTextBlock.Text = garbagePrompt;
            }

            // BORDER: Check to see if the recognition result contains the semantic key for the border color,
            // and not a match for the GARBAGE rule, and change the color.
            if (recoResult.SemanticInterpretation.Properties.ContainsKey("border") && recoResult.SemanticInterpretation.Properties["border"][0].ToString() != "...")
            {
                string borderColor = recoResult.SemanticInterpretation.Properties["border"][0].ToString();
                colorRectangle.Stroke = new SolidColorBrush(getColor(borderColor.ToLower()));
            }

            // If "border" was matched, but the color rule matched GARBAGE, prompt the user.
            else if (recoResult.SemanticInterpretation.Properties.ContainsKey("border") && recoResult.SemanticInterpretation.Properties["border"][0].ToString() == "...")
            {
                garbagePrompt       += "Didn't get the border color\n\n Try saying red border\n";
                resultTextBlock.Text = garbagePrompt;
            }

            // CIRCLE: Check to see if the recognition result contains the semantic key for the circle color,
            // and not a match for the GARBAGE rule, and change the color.
            if (recoResult.SemanticInterpretation.Properties.ContainsKey("circle") && recoResult.SemanticInterpretation.Properties["circle"][0].ToString() != "...")
            {
                string circleColor = recoResult.SemanticInterpretation.Properties["circle"][0].ToString();
                colorCircle.Fill = new SolidColorBrush(getColor(circleColor.ToLower()));
            }

            // If "circle" was matched, but the color rule matched GARBAGE, prompt the user.
            else if (recoResult.SemanticInterpretation.Properties.ContainsKey("circle") && recoResult.SemanticInterpretation.Properties["circle"][0].ToString() == "...")
            {
                garbagePrompt       += "Didn't get the circle color\n\n Try saying green circle\n";
                resultTextBlock.Text = garbagePrompt;
            }

            // Initialize a string that will describe the user's color choices.
            string textBoxColors = "You selected -> \n";

            // Write the color choices contained in the semantics of the recognition result to the text box.
            foreach (KeyValuePair <String, IReadOnlyList <string> > child in recoResult.SemanticInterpretation.Properties)
            {
                // Check to see if any of the semantic values in recognition result contains a match for the GARBAGE rule.
                if (!child.Value.Equals("..."))
                {
                    // Cycle through the semantic keys and values and write them to the text box.
                    textBoxColors += (string.Format(" {0} {1}\n",
                                                    child.Value[0], child.Key ?? "null"));

                    resultTextBlock.Text = textBoxColors;
                }

                // If there was no match to the colors rule or if it matched GARBAGE, prompt the user.
                else
                {
                    resultTextBlock.Text = garbagePrompt;
                }
            }
        }
Example #16
0
        async Task RecordSpeechFromMicrophoneAsync(VoiceInformation voiceInformation, Func <SpeechRecognitionResult, Task> doNext)
        {
            if (!await AudioCapturePermissions.RequestMicrophonePermission())
            {
                return;
            }

            if (voiceInformation == null)
            {
                return;
            }


            if (!await DoRecognition())
            {
                //media.StopMedia();
                //In some cases DoRecognition ends prematurely e.g. when
                //the user allows access to the microphone but there is no
                //microphone available so do not stop media.
                await SpeakAndListen();
            }

            async Task <bool> DoRecognition()
            {
                using (SpeechRecognizer speechRecognizer = new SpeechRecognizer(new Windows.Globalization.Language(voiceInformation.Language)))
                {
                    SpeechRecognitionConstraints.ToList().ForEach(c => speechRecognizer.Constraints.Add(c));

                    speechRecognizer.Timeouts.InitialSilenceTimeout = TimeSpan.FromSeconds(SpeechRecognitionConstants.InitialSilenceTimeout);
                    speechRecognizer.Timeouts.EndSilenceTimeout     = TimeSpan.FromSeconds(SpeechRecognitionConstants.EndSilenceTimeout);

                    await speechRecognizer.CompileConstraintsAsync();

                    SpeechRecognitionResult result = await speechRecognizer.RecognizeAsync();

                    if (
                        !(result.Status == SpeechRecognitionResultStatus.Success &&
                          new HashSet <SpeechRecognitionConfidence>
                    {
                        SpeechRecognitionConfidence.High,
                        SpeechRecognitionConfidence.Medium,
                        SpeechRecognitionConfidence.Low
                    }.Contains(result.Confidence)
                          )
                        )
                    {
                        return(false);
                    }

                    if (result.Constraint.Tag == SpeechRecognitionConstants.GOBACKTAG)
                    {
                        if (UiNotificationService.CanGoBack)
                        {
                            await GoBack();

                            return(true);
                        }
                        else
                        {
                            return(false);
                        }
                    }
                    else
                    {//Options constraint succeeded
                        await doNext(result);

                        return(true);
                    }
                }
            }
        }
 private void SpeechRecognizer_ResultGenerated(object sender, SpeechRecognitionResult result)
 {
   if (result.Constraint != null)
     OnSpeechReceived(sender, new SpeechEventArgs(result.Text));
 }
Example #18
0
        /// <summary>
        /// 使用在导航过程中传递的内容填充页。  在从以前的会话
        /// 重新创建页时,也会提供任何已保存状态。
        /// </summary>
        /// <param name="sender">
        /// 事件的来源; 通常为 <see cref="NavigationHelper"/>
        /// </param>
        /// <param name="e">事件数据,其中既提供在最初请求此页时传递给
        /// <see cref="Frame.Navigate(Type, Object)"/> 的导航参数,又提供
        /// 此页在以前会话期间保留的状态的
        /// 字典。 首次访问页面时,该状态将为 null。</param>
        private void NavigationHelper_LoadState(object sender, LoadStateEventArgs e)
        {
            if (e.NavigationParameter == null)
            {
                return;
            }

            if (e.NavigationParameter.GetType().Equals(typeof(string)))
            {
                string CS = e.NavigationParameter.ToString();
                this.DefaultViewModel["CS"] = CS;

                //处理浏览器
                _htmltianqiV.Navigate(new Uri("ms-appx-web:///DataModel/dwhs/bwc_dwhs_listname.html"));
            }
            if (e.NavigationParameter.GetType().Equals(typeof(VoiceCommandActivatedEventArgs)))
            {
                VoiceCommandActivatedEventArgs commandArgs_thisone     = e.NavigationParameter as VoiceCommandActivatedEventArgs;
                SpeechRecognitionResult        speechRecognitionResult = commandArgs_thisone.Result;

                string voiceCommandName = speechRecognitionResult.RulePath[0];
                string textSpoken       = speechRecognitionResult.Text;
                string navigationTarget = speechRecognitionResult.SemanticInterpretation.Properties["NavigationTarget"][0];

                string key = textSpoken.Replace("换算", "").Replace("单位", "").Replace("器", "").Trim();
                string uk  = "bwc_dwhs_listname";
                if (key.IndexOf("长度") >= 0)
                {
                    uk = "bwc_dwhs_changdu";
                }
                if (key.IndexOf("重量") >= 0)
                {
                    uk = "bwc_dwhs_zhongliang";
                }
                if (key.IndexOf("面积") >= 0)
                {
                    uk = "bwc_dwhs_mianji";
                }
                if (key.IndexOf("体积") >= 0)
                {
                    uk = "bwc_dwhs_tiji";
                }
                if (key.IndexOf("温度") >= 0)
                {
                    uk = "bwc_dwhs_wendu";
                }
                if (key.IndexOf("压力") >= 0)
                {
                    uk = "bwc_dwhs_yali";
                }
                if (key.IndexOf("能量") >= 0)
                {
                    uk = "bwc_dwhs_nengliang";
                }
                if (key.IndexOf("功率") >= 0)
                {
                    uk = "bwc_dwhs_gonglv";
                }
                _htmltianqiV.Navigate(new Uri("ms-appx-web:///DataModel/dwhs/" + uk + ".html"));
            }
        }
 internal static string SemanticInterpretation(this SpeechRecognitionResult result, string interpretationKey)
 {
     return(result.SemanticInterpretation.Properties[interpretationKey].FirstOrDefault());
 }
Example #20
0
    public int UpdateSpeechRecognition()
    {
        if (speechRecognizer != null)
        {
            if (speechRecognizeTask == null)
            {
                UnityEngine.WSA.Application.InvokeOnUIThread(() =>
                {
                    speechRecognizeTask = RecognizeSpeechAsync();
                }, true);
            }

            if (speechRecognizeTask != null)
            {
                // check for error
                if (speechRecognizeTask.IsFaulted)
                {
                    Debug.LogError("RecognizeSpeechAsync() has faulted.");
                    if (speechRecognizeTask.Exception != null)
                    {
                        Debug.LogError(speechRecognizeTask.Exception);
                    }

                    speechRecognizeTask = null;
                }
                else if (speechRecognizeTask.IsCanceled)
                {
                    speechRecognizeTask = null;
                }
                else if (speechRecognizeTask.IsCompleted)
                {
                    SpeechRecognitionResult result = speechRecognizeTask.Result;

                    if (result.Status == SpeechRecognitionResultStatus.Success)
                    {
                        if (result.Confidence != SpeechRecognitionConfidence.Rejected)
                        {
                            //Debug.LogError("Phrase: " + result.Text + ", Confidence: " + result.Confidence.ToString() + ", RawConf: " + result.RawConfidence);

                            float fConfidence = (float)result.RawConfidence; // (3f - (float)result.Confidence) / 3f;
                            if (fConfidence >= requiredPhraseConfidence)
                            {
                                isPhraseRecognized  = true;
                                recognizedPhraseTag = result.SemanticInterpretation.Properties.ContainsKey("<ROOT>") ?
                                                      result.SemanticInterpretation.Properties["<ROOT>"][0] : result.Text;
                                recognizedPhraseConfidence = fConfidence;
                            }
                        }
                    }
                    //else
                    //{
                    //    Debug.LogError("Speech recognition failed: " + result.Status.ToString());
                    //}

                    speechRecognizeTask = null;
                }
            }
        }

        return(0);
    }
 public UserCortanaCommand Spawn(SpeechRecognitionResult speechRecognitionResult)
 {
     //TODO: use argument
     UserCortanaCommand clone = new UserCortanaCommand(Name, Tasks.ToArray());
     return clone;
 }
Example #22
0
        private async void UseActivatedArgs(IActivatedEventArgs args, INavigationService service)
        {
            if (service == null)
            {
                service = WindowContext.GetForCurrentView().NavigationServices.FirstOrDefault();
            }

            if (service == null || args == null)
            {
                return;
            }

            if (args is ShareTargetActivatedEventArgs share)
            {
                var package = new DataPackage();

                try
                {
                    var operation = share.ShareOperation.Data;
                    if (operation.AvailableFormats.Contains(StandardDataFormats.ApplicationLink))
                    {
                        package.SetApplicationLink(await operation.GetApplicationLinkAsync());
                    }
                    if (operation.AvailableFormats.Contains(StandardDataFormats.Bitmap))
                    {
                        package.SetBitmap(await operation.GetBitmapAsync());
                    }
                    //if (operation.Contains(StandardDataFormats.Html))
                    //{
                    //    package.SetHtmlFormat(await operation.GetHtmlFormatAsync());
                    //}
                    //if (operation.Contains(StandardDataFormats.Rtf))
                    //{
                    //    package.SetRtf(await operation.GetRtfAsync());
                    //}
                    if (operation.AvailableFormats.Contains(StandardDataFormats.StorageItems))
                    {
                        package.SetStorageItems(await operation.GetStorageItemsAsync());
                    }
                    if (operation.AvailableFormats.Contains(StandardDataFormats.Text))
                    {
                        package.SetText(await operation.GetTextAsync());
                    }
                    //if (operation.Contains(StandardDataFormats.Uri))
                    //{
                    //    package.SetUri(await operation.GetUriAsync());
                    //}
                    if (operation.AvailableFormats.Contains(StandardDataFormats.WebLink))
                    {
                        package.SetWebLink(await operation.GetWebLinkAsync());
                    }
                }
                catch { }

                var query = "tg://";

                var contactId = await ContactsService.GetContactIdAsync(share.ShareOperation.Contacts.FirstOrDefault());

                if (contactId is int userId)
                {
                    var response = await _lifetime.ActiveItem.ProtoService.SendAsync(new CreatePrivateChat(userId, false));

                    if (response is Chat chat)
                    {
                        query = $"ms-contact-profile://meh?ContactRemoteIds=u" + userId;
                        App.DataPackages[chat.Id] = package.GetView();
                    }
                    else
                    {
                        App.DataPackages[0] = package.GetView();
                    }
                }
                else
                {
                    App.DataPackages[0] = package.GetView();
                }

                App.ShareOperation = share.ShareOperation;
                App.ShareWindow    = _window;

                var options = new Windows.System.LauncherOptions();
                options.TargetApplicationPackageFamilyName = Package.Current.Id.FamilyName;

                try
                {
                    await Windows.System.Launcher.LaunchUriAsync(new Uri(query), options);
                }
                catch
                {
                    // It's too early?
                }
            }
            else if (args is VoiceCommandActivatedEventArgs voice)
            {
                SpeechRecognitionResult speechResult = voice.Result;
                string command = speechResult.RulePath[0];

                if (command == "ShowAllDialogs")
                {
                    service.NavigateToMain(null);
                }
                if (command == "ShowSpecificDialog")
                {
                    //#TODO: Fix that this'll open a specific dialog
                    service.NavigateToMain(null);
                }
                else
                {
                    service.NavigateToMain(null);
                }
            }
            else if (args is ContactPanelActivatedEventArgs contact)
            {
                SetContactPanel(contact.ContactPanel);

                if (Application.Current.Resources.TryGet("PageHeaderBackgroundBrush", out SolidColorBrush backgroundBrush))
                {
                    contact.ContactPanel.HeaderColor = backgroundBrush.Color;
                }

                var contactId = await ContactsService.GetContactIdAsync(contact.Contact.Id);

                if (contactId is int userId)
                {
                    var response = await _lifetime.ActiveItem.ProtoService.SendAsync(new CreatePrivateChat(userId, false));

                    if (response is Chat chat)
                    {
                        service.NavigateToChat(chat);
                    }
                    else
                    {
                        ContactPanelFallback(service);
                    }
                }
                else
                {
                    ContactPanelFallback(service);
                }
            }
            else if (args is ProtocolActivatedEventArgs protocol)
            {
                if (service?.Frame?.Content is MainPage page)
                {
                    page.Activate(protocol.Uri.ToString());
                }
                else
                {
                    service.NavigateToMain(protocol.Uri.ToString());
                }

                if (App.ShareOperation != null)
                {
                    try
                    {
                        App.ShareOperation.ReportCompleted();
                        App.ShareOperation = null;
                    }
                    catch { }
                }

                if (App.ShareWindow != null)
                {
                    try
                    {
                        await App.ShareWindow.Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                        {
                            App.ShareWindow.Close();
                            App.ShareWindow = null;
                        });
                    }
                    catch { }
                }
            }
            else if (args is FileActivatedEventArgs file)
            {
                if (service?.Frame?.Content is MainPage page)
                {
                    //page.Activate(launch);
                }
                else
                {
                    service.NavigateToMain(string.Empty);
                }

                await new ThemePreviewPopup(file.Files[0].Path).ShowQueuedAsync();
            }
            else
            {
                var activate = args as ToastNotificationActivatedEventArgs;
                var launched = args as LaunchActivatedEventArgs;
                var launch   = activate?.Argument ?? launched?.Arguments;

                if (service?.Frame?.Content is MainPage page)
                {
                    page.Activate(launch);
                }
                else
                {
                    service.NavigateToMain(launch);
                }
            }
        }
        private async Task<AIResponse> ProcessRecognitionResultsAsync(SpeechRecognitionResult results, RequestExtras requestExtras, CancellationToken cancellationToken)
        {
            if (!string.IsNullOrWhiteSpace(results.Text))
            {
                var request = CreateAIRequest(results);

                requestExtras?.CopyTo(request);

                var response = await DataService.RequestAsync(request, cancellationToken);
                return response;   
            }
            else
            {
                return null;
            }
        }
 private void FillUi(SpeechRecognitionResult result)
 {
     TextConfidenceTextBlock.Text = result.TextConfidence.ToString();
     ConfidenceScoreTextBlock.Text = result.Details.ConfidenceScore.ToString();
     RuleNameTextBlock.Text = result.RuleName;
 }
        /// <summary>
        /// Uses the result from the speech recognizer to change the colors of the shapes.
        /// </summary>
        /// <param name="recoResult">The result from the recognition event</param>
        private void HandleRecognitionResult(SpeechRecognitionResult recoResult)
        {
            // Check the confidence level of the recognition result.
            if (recoResult.Confidence == SpeechRecognitionConfidence.High ||
            recoResult.Confidence == SpeechRecognitionConfidence.Medium)
            {
                // Declare a string that will contain messages when the color rule matches GARBAGE.
                string garbagePrompt = "";

                // BACKGROUND: Check to see if the recognition result contains the semantic key for the background color,
                // and not a match for the GARBAGE rule, and change the color.
                if (recoResult.SemanticInterpretation.Properties.ContainsKey("KEY_BACKGROUND") && recoResult.SemanticInterpretation.Properties["KEY_BACKGROUND"][0].ToString() != "...")
                {
                    string backgroundColor = recoResult.SemanticInterpretation.Properties["KEY_BACKGROUND"][0].ToString();
                    colorRectangle.Fill = new SolidColorBrush(getColor(backgroundColor));
                }

                // If "background" was matched, but the color rule matched GARBAGE, prompt the user.
                else if (recoResult.SemanticInterpretation.Properties.ContainsKey("KEY_BACKGROUND") && recoResult.SemanticInterpretation.Properties["KEY_BACKGROUND"][0].ToString() == "...")
                {

                    garbagePrompt += speechResourceMap.GetValue("SRGSBackgroundGarbagePromptText", speechContext).ValueAsString;
                    resultTextBlock.Text = garbagePrompt;
                }

                // BORDER: Check to see if the recognition result contains the semantic key for the border color,
                // and not a match for the GARBAGE rule, and change the color.
                if (recoResult.SemanticInterpretation.Properties.ContainsKey("KEY_BORDER") && recoResult.SemanticInterpretation.Properties["KEY_BORDER"][0].ToString() != "...")
                {
                    string borderColor = recoResult.SemanticInterpretation.Properties["KEY_BORDER"][0].ToString();
                    colorRectangle.Stroke = new SolidColorBrush(getColor(borderColor));
                }

                // If "border" was matched, but the color rule matched GARBAGE, prompt the user.
                else if (recoResult.SemanticInterpretation.Properties.ContainsKey("KEY_BORDER") && recoResult.SemanticInterpretation.Properties["KEY_BORDER"][0].ToString() == "...")
                {
                    garbagePrompt += speechResourceMap.GetValue("SRGSBorderGarbagePromptText", speechContext).ValueAsString;
                    resultTextBlock.Text = garbagePrompt;
                }

                // CIRCLE: Check to see if the recognition result contains the semantic key for the circle color,
                // and not a match for the GARBAGE rule, and change the color.
                if (recoResult.SemanticInterpretation.Properties.ContainsKey("KEY_CIRCLE") && recoResult.SemanticInterpretation.Properties["KEY_CIRCLE"][0].ToString() != "...")
                {
                    string circleColor = recoResult.SemanticInterpretation.Properties["KEY_CIRCLE"][0].ToString();
                    colorCircle.Fill = new SolidColorBrush(getColor(circleColor));
                }

                // If "circle" was matched, but the color rule matched GARBAGE, prompt the user.
                else if (recoResult.SemanticInterpretation.Properties.ContainsKey("KEY_CIRCLE") && recoResult.SemanticInterpretation.Properties["KEY_CIRCLE"][0].ToString() == "...")
                {
                    garbagePrompt += speechResourceMap.GetValue("SRGSCircleGarbagePromptText", speechContext).ValueAsString;
                    resultTextBlock.Text = garbagePrompt;
                }

                // Initialize a string that will describe the user's color choices.
                string textBoxColors = "You selected (Semantic Interpretation)-> \n";

                // Write the color choices contained in the semantics of the recognition result to the text box.
                foreach (KeyValuePair<String, IReadOnlyList<string>> child in recoResult.SemanticInterpretation.Properties)
                {

                    // Check to see if any of the semantic values in recognition result contains a match for the GARBAGE rule.
                    if (!child.Value.Equals("..."))
                    {

                        // Cycle through the semantic keys and values and write them to the text box.
                        textBoxColors += (string.Format(" {0} {1}\n",
                        child.Value[0], child.Key ?? "null"));

                        resultTextBlock.Text = textBoxColors;
                    }

                    // If there was no match to the colors rule or if it matched GARBAGE, prompt the user.
                    else
                    {
                        resultTextBlock.Text = garbagePrompt;
                    }
                }
            }

            // Prompt the user if recognition failed or recognition confidence is low.
            else if (recoResult.Confidence == SpeechRecognitionConfidence.Rejected ||
            recoResult.Confidence == SpeechRecognitionConfidence.Low)
            {
                resultTextBlock.Text = speechResourceMap.GetValue("SRGSGarbagePromptText", speechContext).ValueAsString;
            }
        }
        private async void InitializeSpeechRecognizer()
        {
            try
            {
                if (speechRecognizer != null)
                {
                    speechRecognizer.RecognizeAsync().Cancel();
                    speechRecognizer.RecognizeAsync().Close();
                    this.speechRecognizer.Dispose();
                    this.speechRecognizer = null;
                }
                speechRecognizer = new SpeechRecognizer();
                var topicConstraing = new SpeechRecognitionTopicConstraint(SpeechRecognitionScenario.Dictation, "Development");
                speechRecognizer.Constraints.Add(topicConstraing);
                await speechRecognizer.CompileConstraintsAsync();

                this.Operation = await speechRecognizer.RecognizeAsync();
                if (Operation.Status == SpeechRecognitionResultStatus.Success)
                {
                    ResultGenerated(Operation.Text);
                    speechRecognizer.RecognizeAsync().Cancel();
                    speechRecognizer.Dispose();
                    speechRecognizer = null;
                }
            }
            catch (Exception)
            {
            }
        }
        async void ISpeechRecognition.StartListening()
        {
            _inactive = false;

            // Start recognition.
            try
            {
                if (_speechModule.TextToSpeech.IsSpeaking)
                {
                    _speechModule.LanguageModel.AI.Engine.Debugger.Log(
                        Galatea.Diagnostics.DebuggerLogLevel.Diagnostic,
                        "TTS is speaking; Listening paused...");
                }
                else
                {
                    //// Get out of this f*****g loop
                    //if (_isListening) return;

                    //_isListening = true;

                    // Start Listening
                    int ruleId = -1;
                    SpeechRecognitionStatus status = SpeechRecognitionStatus.Empty;
                    SpeechRecognitionResult speechRecognitionResult = await speechRecognizer.RecognizeAsync();

                    // If successful, display the recognition result.
                    if (speechRecognitionResult.Status == SpeechRecognitionResultStatus.Success)
                    {
                        if (string.IsNullOrEmpty(speechRecognitionResult.Text))
                        {
                            ruleId = 0;
                            status = speechRecognitionResult.Status.Convert();
                        }
                    }
                    else
                    {
                        //resultTextBlock.Visibility = Visibility.Visible;
                        //resultTextBlock.Text = string.Format("Speech Recognition Failed, Status: {0}", speechRecognitionResult.Status.ToString());
                    }

                    // Fire Event
                    Recognized?.Invoke(this, new SpeechRecognizedEventArgs(ruleId, speechRecognitionResult.Text, null, status));

                    //_isListening = false;
                }
            }
            catch (TaskCanceledException exception)
            {
                // TaskCanceledException will be thrown if you exit the scenario while the recognizer is actively
                // processing speech. Since this happens here when we navigate out of the scenario, don't try to
                // show a message dialog for this exception.
                System.Diagnostics.Debug.WriteLine("TaskCanceledException caught while recognition in progress (can be ignored):");
                System.Diagnostics.Debug.WriteLine(exception.ToString());
            }
            catch (System.InvalidOperationException exception)
            {
                // No idea why it keeps throwing this Exception
                _speechModule.LanguageModel.AI.Engine.Debugger.Log(Galatea.Diagnostics.DebuggerLogLevel.Error, exception.Message);
                _speechModule.LanguageModel.AI.Engine.Debugger.Log(Galatea.Diagnostics.DebuggerLogLevel.StackTrace, exception.StackTrace);
            }
            catch (Exception exception)
            {
                string msg;
                // Handle the speech privacy policy error.
                if ((uint)exception.HResult == HResultPrivacyStatementDeclined)
                {
                    msg = Galatea.Globalization.RoboticsResources.SpeechRecognition_PrivacySettings_NotAccepted;
                    throw new TeaSpeechException(msg, exception);
                }
                //else
                //{
                //    msg = exception.Message;
                //}
                //var messageDialog = new Windows.UI.Popups.MessageDialog(msg, "Exception");
                //await messageDialog.ShowAsync();

                throw;
            }
        }
        // add the onactivated event handler (override)
        // check for voice activated
        async protected override void OnActivated(IActivatedEventArgs args)
        {
            base.OnActivated(args);

            if (args.Kind != ActivationKind.VoiceCommand)
            {
                return;
            }
            else
            {
                // get commands spoken
                VoiceCommandActivatedEventArgs commandArgs             = args as VoiceCommandActivatedEventArgs;
                SpeechRecognitionResult        speechRecognitionResult = commandArgs.Result;

                // use this for debugging
                MessageDialog msgDialog    = new MessageDialog("");
                string        voiceCmdName = speechRecognitionResult.RulePath[0];
                string        textSpoken   = speechRecognitionResult.Text;

                // need this list for phrases spoken and recognised.
                IReadOnlyList <string> recognisedVoiceCmdPhrases; // {xPosition} {yPosition}

                msgDialog.Content = ("Parameters so far: " + System.Environment.NewLine + voiceCmdName + System.Environment.NewLine + textSpoken);

                await msgDialog.ShowAsync();

                // decide which command - new game or make move
                switch (voiceCmdName)
                {
                case "newGame":
                    msgDialog.Content = "New Game Command";
                    break;

                case "makeMove":
                    msgDialog.Content = "Make a move Command";

                    // find the new positions to move to
                    // move [to] [square] X is {xPosition}, Y is {yPosition}
                    string xValue = "xDefault", yValue = "yDefault";

                    if (speechRecognitionResult.SemanticInterpretation.
                        Properties.TryGetValue("xPosition", out recognisedVoiceCmdPhrases))
                    {
                        // save the x position
                        xValue = recognisedVoiceCmdPhrases.First();
                    }

                    if (speechRecognitionResult.SemanticInterpretation.
                        Properties.TryGetValue("yPosition", out recognisedVoiceCmdPhrases))
                    {
                        // save the y position
                        yValue = recognisedVoiceCmdPhrases.First();
                    }

                    msgDialog.Content = "Move to [" + xValue + "], [" + yValue + "]";

                    break;

                default:
                    msgDialog.Content = "Unknown Command";
                    break;
                }

                await msgDialog.ShowAsync();
            }
        }
Example #29
0
 public static bool IsRecognisedWithLowConfidence(this SpeechRecognitionResult result)
 {
     return(result.Confidence == SpeechRecognitionConfidence.Low);
 }
        /// <summary>
        /// Uses the result from the speech recognizer to change the colors of the shapes.
        /// </summary>
        /// <param name="recoResult">The result from the recognition event</param>
        private void HandleRecognitionResult(SpeechRecognitionResult recoResult)
        {
            // Check the confidence level of the recognition result.
            if (recoResult.Confidence == SpeechRecognitionConfidence.High ||
                recoResult.Confidence == SpeechRecognitionConfidence.Medium)
            {
                // Declare a string that will contain messages when the color rule matches GARBAGE.
                string garbagePrompt = "";

                // BACKGROUND: Check to see if the recognition result contains the semantic key for the background color,
                // and not a match for the GARBAGE rule, and change the color.
                if (recoResult.SemanticInterpretation.Properties.ContainsKey("KEY_BACKGROUND") && recoResult.SemanticInterpretation.Properties["KEY_BACKGROUND"][0].ToString() != "...")
                {
                    string backgroundColor = recoResult.SemanticInterpretation.Properties["KEY_BACKGROUND"][0].ToString();
                    colorRectangle.Fill = new SolidColorBrush(getColor(backgroundColor));
                }

                // If "background" was matched, but the color rule matched GARBAGE, prompt the user.
                else if (recoResult.SemanticInterpretation.Properties.ContainsKey("KEY_BACKGROUND") && recoResult.SemanticInterpretation.Properties["KEY_BACKGROUND"][0].ToString() == "...")
                {
                    garbagePrompt       += speechResourceMap.GetValue("SRGSBackgroundGarbagePromptText", speechContext).ValueAsString;
                    resultTextBlock.Text = garbagePrompt;
                }

                // BORDER: Check to see if the recognition result contains the semantic key for the border color,
                // and not a match for the GARBAGE rule, and change the color.
                if (recoResult.SemanticInterpretation.Properties.ContainsKey("KEY_BORDER") && recoResult.SemanticInterpretation.Properties["KEY_BORDER"][0].ToString() != "...")
                {
                    string borderColor = recoResult.SemanticInterpretation.Properties["KEY_BORDER"][0].ToString();
                    colorRectangle.Stroke = new SolidColorBrush(getColor(borderColor));
                }

                // If "border" was matched, but the color rule matched GARBAGE, prompt the user.
                else if (recoResult.SemanticInterpretation.Properties.ContainsKey("KEY_BORDER") && recoResult.SemanticInterpretation.Properties["KEY_BORDER"][0].ToString() == "...")
                {
                    garbagePrompt       += speechResourceMap.GetValue("SRGSBorderGarbagePromptText", speechContext).ValueAsString;
                    resultTextBlock.Text = garbagePrompt;
                }

                // CIRCLE: Check to see if the recognition result contains the semantic key for the circle color,
                // and not a match for the GARBAGE rule, and change the color.
                if (recoResult.SemanticInterpretation.Properties.ContainsKey("KEY_CIRCLE") && recoResult.SemanticInterpretation.Properties["KEY_CIRCLE"][0].ToString() != "...")
                {
                    string circleColor = recoResult.SemanticInterpretation.Properties["KEY_CIRCLE"][0].ToString();
                    colorCircle.Fill = new SolidColorBrush(getColor(circleColor));
                }

                // If "circle" was matched, but the color rule matched GARBAGE, prompt the user.
                else if (recoResult.SemanticInterpretation.Properties.ContainsKey("KEY_CIRCLE") && recoResult.SemanticInterpretation.Properties["KEY_CIRCLE"][0].ToString() == "...")
                {
                    garbagePrompt       += speechResourceMap.GetValue("SRGSCircleGarbagePromptText", speechContext).ValueAsString;
                    resultTextBlock.Text = garbagePrompt;
                }

                // Initialize a string that will describe the user's color choices.
                string textBoxColors = "You selected (Semantic Interpretation)-> \n";

                // Write the color choices contained in the semantics of the recognition result to the text box.
                foreach (KeyValuePair <String, IReadOnlyList <string> > child in recoResult.SemanticInterpretation.Properties)
                {
                    // Check to see if any of the semantic values in recognition result contains a match for the GARBAGE rule.
                    if (!child.Value.Equals("..."))
                    {
                        // Cycle through the semantic keys and values and write them to the text box.
                        textBoxColors += (string.Format(" {0} {1}\n",
                                                        child.Value[0], child.Key ?? "null"));

                        resultTextBlock.Text = textBoxColors;
                    }

                    // If there was no match to the colors rule or if it matched GARBAGE, prompt the user.
                    else
                    {
                        resultTextBlock.Text = garbagePrompt;
                    }
                }
            }

            // Prompt the user if recognition failed or recognition confidence is low.
            else if (recoResult.Confidence == SpeechRecognitionConfidence.Rejected ||
                     recoResult.Confidence == SpeechRecognitionConfidence.Low)
            {
                resultTextBlock.Text = speechResourceMap.GetValue("SRGSGarbagePromptText", speechContext).ValueAsString;
            }
        }
 internal abstract Task ProcessAsync(SpeechRecognitionResult input,
                                     IEnumerable <IVoiceCommandProcessor> processors);
Example #32
0
 private void HandleRecognitionResult(SpeechRecognitionResult result)
 {
     if(result.Text.Equals("make coffee", StringComparison.CurrentCultureIgnoreCase) || 
         result.Text.Equals("start coffee", StringComparison.CurrentCultureIgnoreCase) ||
         result.Text.Equals("i want coffee", StringComparison.CurrentCultureIgnoreCase) ||
         result.Text.Equals("coffee please", StringComparison.CurrentCultureIgnoreCase))
     {
         Debug.WriteLine("Starting your coffee.");
         EnableCoffeeMakerRelay();
     }
     else if(result.Text.Equals("stop", StringComparison.CurrentCultureIgnoreCase) ||
             result.Text.Equals("turn off", StringComparison.CurrentCultureIgnoreCase) ||
             result.Text.Equals("that's enough", StringComparison.CurrentCultureIgnoreCase))
     {
         Debug.WriteLine("Stopping your coffee.");
         DisableCoffeeMakerRelay();
     }
 }
Example #33
0
        public override async Task OnStartAsync(StartKind startKind, IActivatedEventArgs args)
        {
            if (SettingsHelper.IsAuthorized)
            {
                if (args is ShareTargetActivatedEventArgs share)
                {
                    ShareOperation = share.ShareOperation;
                    NavigationService.Navigate(typeof(ShareTargetPage));
                }
                else if (args is VoiceCommandActivatedEventArgs voice)
                {
                    SpeechRecognitionResult speechResult = voice.Result;
                    string command = speechResult.RulePath[0];

                    if (command == "ShowAllDialogs")
                    {
                        NavigationService.Navigate(typeof(MainPage));
                    }
                    if (command == "ShowSpecificDialog")
                    {
                        //#TODO: Fix that this'll open a specific dialog
                        NavigationService.Navigate(typeof(MainPage));
                    }
                    else
                    {
                        NavigationService.Navigate(typeof(MainPage));
                    }
                }
                else if (args is ContactPanelActivatedEventArgs contact)
                {
                    var backgroundBrush = Application.Current.Resources["TelegramBackgroundTitlebarBrush"] as SolidColorBrush;
                    contact.ContactPanel.HeaderColor = backgroundBrush.Color;

                    var annotationStore = await ContactManager.RequestAnnotationStoreAsync(ContactAnnotationStoreAccessType.AppAnnotationsReadWrite);

                    var store = await ContactManager.RequestStoreAsync(ContactStoreAccessType.AppContactsReadWrite);

                    if (store != null && annotationStore != null)
                    {
                        var full = await store.GetContactAsync(contact.Contact.Id);

                        if (full == null)
                        {
                            goto Navigate;
                        }

                        var annotations = await annotationStore.FindAnnotationsForContactAsync(full);

                        var first = annotations.FirstOrDefault();
                        if (first == null)
                        {
                            goto Navigate;
                        }

                        var remote = first.RemoteId;
                        if (int.TryParse(remote.Substring(1), out int userId))
                        {
                            NavigationService.Navigate(typeof(DialogPage), new TLPeerUser {
                                UserId = userId
                            });
                        }
                        else
                        {
                            goto Navigate;
                        }
                    }
                    else
                    {
                        NavigationService.Navigate(typeof(MainPage));
                    }

Navigate:
                    NavigationService.Navigate(typeof(MainPage));
                }
                else if (args is ProtocolActivatedEventArgs protocol)
                {
                    NavigationService.Navigate(typeof(MainPage), protocol.Uri.ToString());
                }
                else
                {
                    var activate = args as ToastNotificationActivatedEventArgs;
                    var launch   = activate?.Argument ?? null;

                    NavigationService.Navigate(typeof(MainPage), launch);
                }
            }
            else
            {
                NavigationService.Navigate(typeof(SignInWelcomePage));
            }

            Window.Current.Activated         -= Window_Activated;
            Window.Current.Activated         += Window_Activated;
            Window.Current.VisibilityChanged -= Window_VisibilityChanged;
            Window.Current.VisibilityChanged += Window_VisibilityChanged;
            Window.Current.CoreWindow.Dispatcher.AcceleratorKeyActivated -= Dispatcher_AcceleratorKeyActivated;
            Window.Current.CoreWindow.Dispatcher.AcceleratorKeyActivated += Dispatcher_AcceleratorKeyActivated;

            UpdateBars();
            ApplicationView.GetForCurrentView().SetPreferredMinSize(new Size(320, 500));
            SystemNavigationManager.GetForCurrentView().AppViewBackButtonVisibility = AppViewBackButtonVisibility.Visible;

            Theme.Current.Update();
            App.RaiseThemeChanged();

            Task.Run(() => OnStartSync());
            //return Task.CompletedTask;
        }
 private void ProcessCommands(SpeechRecognitionResult result)
 {
     switch (result.Text)
     {
         case "stop":
             //StopVoiceRecognition();
             break;
         case "plus":
             FontSize += 2;
             break;
         case "minus":
             FontSize -= 2;
             break;
         case "next":
             GoToNext();
             break;
         case "back":
             GoBack();
             break;
     }
 }
        public static void RunCommand(VoiceCommandActivatedEventArgs cmd, Action action)
        {
            SpeechRecognitionResult result = cmd.Result;
            string commandName             = result.RulePath[0];

            var vcdLookup = new Dictionary <string, Delegate> {
                {
                    "WatchVideoCommand", (Action)(async() =>
                    {
                        CallApi("video");
                        action();
                    })
                },
                {
                    "DrawingCommand", (Action)(async() =>
                    {
                        CallApi("drawing");
                        action();
                    })
                },
                {
                    "PhotoEditCommand", (Action)(async() =>
                    {
                        CallApi("photo");
                        action();
                    })
                },
                {
                    "GoToMeetingCommand", (Action)(async() =>
                    {
                        CallApi("meeting");
                        action();
                    })
                },
                {
                    "WriteBirthdayCardCommand", (Action)(async() =>
                    {
                        CallApi("powerpointBirthday");
                        action();
                    })
                },
                {
                    "WriteCVCommand", (Action)(async() =>
                    {
                        CallApi("wordResume");
                        action();
                    })
                },
                {
                    "WriteResumeCommand", (Action)(async() =>
                    {
                        CallApi("wordResume");
                        action();
                    })
                },
                {
                    "WriteLetterCommand", (Action)(async() =>
                    {
                        CallApi("wordLetter");
                        action();
                    })
                },
                {
                    "WriteLetterToCommand", (Action)(async() =>
                    {
                        CallApi("wordLetter;" + result.Text.Substring(result.Text.LastIndexOf(" to") + 3).Trim());
                        action();
                    })
                },
                {
                    "WatchAVideoCommand", (Action)(async() =>
                    {
                        CallApi("youtube;" + result.Text.Trim().Replace("watch", ""));
                        action();
                    })
                }
            };

            Delegate command;

            if (vcdLookup.TryGetValue(commandName, out command))
            {
                command.DynamicInvoke();
            }
        }
 /// <summary>
 /// Creates a new instance of the SpeechRecognizedEventArgs class.
 /// </summary>
 public SpeechRecognizedEventArgs(SpeechRecognitionResult result)
 {
     Result   = result;
     Canceled = false;
 }
 static string GetSpeech(SpeechRecognitionResult result) {
     return new Regex(@"\<profanity\>(.*?)\<\/profanity\>").Replace(result.Text, match => match.Groups[1].Value);
 }
        public static async void StartListening(string exampleText = "", bool readBackEnabled = false, bool showConfirmation = false, bool showUI = true)
        {
            if (cortanaRecognizerState == CortanaRecognizerState.Listening)
            {
                return;
            }
            cortanaRecognizerState = CortanaRecognizerState.Listening;

            Debug.WriteLine("Entering: StartListening()");

            speechRecognizer = new SpeechRecognizer();
            speechRecognizer.StateChanged += speechRecognizer_StateChanged;
            speechRecognizer.RecognitionQualityDegrading += speechRecognizer_RecognitionQualityDegrading;

            // Set special commands
            if (showUI)
            {
                speechRecognizer.UIOptions.ExampleText       = exampleText;
                speechRecognizer.UIOptions.IsReadBackEnabled = readBackEnabled;
                speechRecognizer.UIOptions.ShowConfirmation  = showConfirmation;
            }

            Debug.WriteLine("Speech Recognizer Set");
            SpeechRecognitionResult speechRecognitionResult = null;

            Debug.WriteLine("Setting States");
            try {
                await speechRecognizer.CompileConstraintsAsync();

                if (showUI)
                {
                    speechResultTask = speechRecognizer.RecognizeWithUIAsync();
                }
                else
                {
                    speechResultTask = speechRecognizer.RecognizeAsync();
                }

                Debug.WriteLine("Beginning Recognition");

                //Continuously loop until we are completed
                while (speechResultTask.Status == Windows.Foundation.AsyncStatus.Started)
                {
                }
                if (speechResultTask.Status == Windows.Foundation.AsyncStatus.Completed)
                {
                    speechRecognitionResult = speechResultTask.GetResults();
                }
            }
            catch (Exception) { }

            Debug.WriteLine("Recognition Recieved");

            cortanaRecognizerState = CortanaRecognizerState.NotListening;
            speechRecognizer       = null;
            speechResultTask       = null;

            if (speechRecognitionResult != null)
            {
                if (CortanaVoiceRecognitionResult != null)
                {
                    CortanaVoiceRecognitionResult(null, new VoiceRecognitionResultEventArgs(APIResponse.Successful, speechRecognitionResult));
                }
            }
            else
            {
                if (CortanaVoiceRecognitionResult != null)
                {
                    CortanaVoiceRecognitionResult(null, new VoiceRecognitionResultEventArgs(APIResponse.Failed));
                }
            }

            Debug.WriteLine("Exiting StartRecognition()");
            return;
        }
 private static bool isVoiceCommand(SpeechRecognitionResult commandResult) =>
     commandResult?.SemanticInterpretation?.Properties["commandMode"]?.FirstOrDefault() == "voice";
Example #40
0
        public async Task <RecognizedSpeech> Recognize(string constraints, bool ui)
        {
            SpeechRecognitionGrammarFileConstraint grammarFileConstraint = null;
            var  result  = new RecognizedSpeech();
            bool isTable = false;
            Dictionary <string, string> dictionary = null;

            if (!string.IsNullOrWhiteSpace(constraints))
            {
                isTable = constraints.StartsWith("{table:");

                if (isTable)
                {
                    var name = constraints.Substring(7);
                    var i    = name.IndexOf("}", StringComparison.CurrentCultureIgnoreCase);
                    name = name.Substring(0, i);

                    var constraintBuilder = new StringBuilder();
                    dictionary = MainPage.Instance.mainDictionary[name];

                    Debug.WriteLine("table " + name + " count=" + dictionary.Count);

                    foreach (var key in dictionary.Keys)
                    {
                        constraintBuilder.Append(key.Replace(",", " "));
                        constraintBuilder.Append(",");
                    }

                    if (constraintBuilder.Length < 2)
                    {
                        result.error = -3;
                        return(result);
                    }

                    constraints = constraintBuilder.ToString(0, constraintBuilder.Length - 1);
                    constraints = constraints.Replace(";", "-").Replace("&amp", " and ").Replace("&", " and ");
                }

                //build grammar constraints
                var grammarFileTemplate =
                    await
                    StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///GrammarConstraintTemplate.grxml"));

                const string wordTemplate = "<item>{0}</item>";
                const string itemTemplate = "<item><one-of>{0}</one-of><tag>out=\"{1}\";</tag></item>";

                var    itemBuilder = new StringBuilder();
                var    items       = constraints.Split(';');
                string keyword     = null;
                foreach (var itemPart in items)
                {
                    var item = itemPart;

                    var equals = item.IndexOf('=');
                    if (equals > -1)
                    {
                        keyword = item.Substring(0, equals);
                        item    = item.Substring(equals + 1);
                    }

                    var words       = item.Split(',');
                    var wordBuilder = new StringBuilder();
                    foreach (var word in words)
                    {
                        wordBuilder.AppendFormat(wordTemplate, word);
                    }

                    if (!string.IsNullOrWhiteSpace(keyword))
                    {
                        itemBuilder.AppendFormat(itemTemplate, wordBuilder, keyword);
                    }
                    else
                    {
                        itemBuilder.Append(wordBuilder);
                    }
                }

                var localFolder = ApplicationData.Current.LocalFolder;

                var grammarTemplate = await FileIO.ReadTextAsync(grammarFileTemplate);

                var grammarFile =
                    await
                    localFolder.CreateFileAsync("GrammarConstraint.grxml", CreationCollisionOption.ReplaceExisting);

                var finalGrammarText = string.Format(grammarTemplate, itemBuilder);
                await FileIO.WriteTextAsync(grammarFile, finalGrammarText);

                grammarFileConstraint = new SpeechRecognitionGrammarFileConstraint(grammarFile, "constraints");
            }

            if (isRecognizing && recognizer != null)
            {
                await recognizer.StopRecognitionAsync();
            }

            recognizer = new SpeechRecognizer();

            //if (recognizer != null)
            //{
            //}
            //else
            //{
            //    //recognizer.Constraints?.Clear();
            //    //await recognizer.CompileConstraintsAsync();
            //}

            if (grammarFileConstraint != null)
            {
                recognizer.Constraints.Add(grammarFileConstraint);
            }

            SpeechRecognitionResult recognize = null;

            try
            {
                isRecognizing = false;
                SpeechStatusChanged?.Invoke(this, new SpeechArgs {
                    Status = SpeechStatus.None
                });

                await recognizer.CompileConstraintsAsync();

                isRecognizing = true;
                SpeechStatusChanged?.Invoke(this, new SpeechArgs {
                    Status = SpeechStatus.Listening
                });

                recognize = await(ui ? recognizer.RecognizeWithUIAsync() : recognizer.RecognizeAsync());
            }
            catch (Exception e)
            {
                Debug.WriteLine(e.GetType() + ":" + e.Message);

                if (recognize != null)
                {
                    result.status = recognize.Status;
                }

                result.confidence = 5;
                return(result);
            }
            finally
            {
                isRecognizing = false;
                SpeechStatusChanged?.Invoke(this, new SpeechArgs {
                    Status = isUserStopped ? SpeechStatus.Stopped : SpeechStatus.None
                });
            }

            result.status = isUserStopped ? SpeechRecognitionResultStatus.UserCanceled : recognize.Status;

            if (constraints == null)
            {
                result.text = recognize.Text;
                return(result);
            }

            result.confidence = (int)recognize.Confidence;

            var text = recognize.Text.ToUpperInvariant();

            var    items2   = constraints.Split(';');
            string keyword2 = null;
            var    index    = 1;

            foreach (var itemPart in items2)
            {
                var item = itemPart;

                var equals = item.IndexOf('=');
                if (equals > -1)
                {
                    keyword2 = item.Substring(0, equals);
                    item     = item.Substring(equals + 1);
                }

                var words      = item.Split(',');
                var innerIndex = 1;
                foreach (var word in words)
                {
                    if (word.ToUpperInvariant().Equals(text))
                    {
                        result.text = keyword2 ?? word;
                        if (isTable)
                        {
                            result.action = dictionary[result.text];
                        }

                        result.index = items2.Length == 1 ? innerIndex : index;
                        return(result);
                    }

                    innerIndex++;
                }

                index++;
            }

            result.text = recognize.Text;
            return(result);
        }
 protected virtual void OnRecognitionCommandFound(SpeechRecognitionResult sender, VoiceRecognitionResult args)
 {
     RecognitionCommandFound?.Invoke(sender, args);
 }
Example #42
0
        public static async Task <Translate> SpeechContinuousRecognitionAsync(string filePath)
        {
            Utils.ConsoleWriteInfo($"Iniciando conversão do arquivo: {filePath}");
            // Creates an instance of a speech config with specified subscription key and service region.
            // Replace with your own subscription key and service region (e.g., "westus").
            string       subscriptionKey    = W3Config.properties["subscriptionKey"];
            string       subscriptionRegion = W3Config.properties["subscriptionRegion"];
            SpeechConfig config             = SpeechConfig.FromSubscription(subscriptionKey, subscriptionRegion);

            string initialSilenceTimeoutMs = W3Config.properties["initialSilenceTimeoutMs"];
            string endSilenceTimeoutMs     = W3Config.properties["endSilenceTimeoutMs"];

            config.SetProperty(PropertyId.SpeechServiceConnection_InitialSilenceTimeoutMs, initialSilenceTimeoutMs);
            config.SetProperty(PropertyId.SpeechServiceConnection_EndSilenceTimeoutMs, endSilenceTimeoutMs);
            config.OutputFormat = OutputFormat.Detailed;

            string    language         = W3Config.properties["languageScope"];
            bool      endOfFile        = false;
            Translate translateContent = null;

            // Creates a speech recognizer from file
            using (AudioConfig audioInput = AudioConfig.FromWavFileInput(filePath))
                using (SpeechRecognizer recognizer = new SpeechRecognizer(config, language, audioInput))
                {
                    // Subscribes to events.
                    recognizer.Recognizing += (s, e) =>
                    {
                        // Console.SetCursorPosition(Console.CursorLeft, Console.CursorTop - 1);
                        // Utils.ConsoleWriteInfo($"{W3Config.properties["message:recognizing"]}: {e.Result.Text}");
                    };

                    recognizer.Recognized += (s, e) =>
                    {
                        SpeechRecognitionResult result = e.Result;
                        if (result.Reason == ResultReason.RecognizedSpeech)
                        {
                            Utils.ConsoleWriteInfo($"{W3Config.properties["message:recognizing"]}: {e.Result.Text}");
                        }
                        {
                            translateContent = new Translate {
                                Filename = filePath,
                                Content  = result.Text
                            };
                            endOfFile = true;
                        }
                    };

                    recognizer.Canceled += (s, e) =>
                    {
                        Console.WriteLine($" | -- Recognition Canceled. Reason: {e.Reason.ToString()}, CanceledReason: {e.Reason}");
                    };


                    recognizer.SessionStarted += (s, e) =>
                    {
                        Console.WriteLine($" | -- {W3Config.properties["message:sessionStarted"]} \n");
                    };

                    recognizer.SessionStopped += (s, e) =>
                    {
                        Console.WriteLine($" | -- {W3Config.properties["message:sessionStopped"]}");
                    };

                    recognizer.SpeechEndDetected += (s, e) =>
                    {
                        Console.WriteLine($" | -- {W3Config.properties["message:speechEndDetected"]}");
                        endOfFile = true;
                    };

                    // Starts continuous recognition. Uses StopContinuousRecognitionAsync() to stop recognition.
                    await recognizer.StartContinuousRecognitionAsync().ConfigureAwait(false);

                    do
                    {
                        Thread.Sleep(1000);
                    } while (!endOfFile);

                    // Stops recognition.
                    await recognizer.StopContinuousRecognitionAsync().ConfigureAwait(false);

                    Console.WriteLine("<================================================================>");
                    return(translateContent);
                }
        }
Example #43
0
 public static UserCortanaCommand ProcessUserCommand(string voiceCommandName, SpeechRecognitionResult speechRecognitionResult, CommandDiagnostics commandArgs)
 {
     SharedModel model = ModelHolder.Model;
     UserCortanaCommand command = null;
     if (model != null) {
         IList<UserCortanaCommand> commands = model.UserCortanaCommands;
         command = commands.Where( c => c.Name.Equals(voiceCommandName) ).First();
         if (command != null) {
             command = command.Spawn(speechRecognitionResult);
         }
     }
     return command;
 }
        private async void ExecuteVoiceCommand(SpeechRecognitionResult result)
        {
            bool isConnected = await LoadAndConnnect();
            if (!isConnected)
                return;

            string voiceCommandName = result.RulePath[0];
            string textSpoken = result.Text;
            
            switch (voiceCommandName)
            {
                case "PlayArtist":
                    searchType = SearchType.Artist;
                    string artistName = SemanticInterpretation("musicTopic", result);
                    allArtists = await AudioLibrary.GetArtists();
                    var filteredArtists = allArtists.Where(t => t.Label.ToLower().Contains(artistName.ToLower())).ToList();
                    if (filteredArtists.Count > 1)
                    {
                        searchHitState = SearchHitState.Multiple;
                        ReceivedCommandTextBlock.Text = "We found multiple artists. Choose one...";
                        SearchedItemsListView.ItemsSource = filteredArtists;
                    }
                    else if (filteredArtists.Count > 0)
                    {
                        searchHitState = SearchHitState.Single;
                        ReceivedCommandTextBlock.Text = "This is the artist we found...";
                        SearchedItemsListView.ItemsSource = filteredArtists;
                        Player.PlayArtist(filteredArtists[0]);
                        QuestionNameTextBlock.Text = "Did we get the right one?";
                        QuestionWrapper.Visibility = Windows.UI.Xaml.Visibility.Visible;
                    }
                    else
                    {
                        searchHitState = SearchHitState.None;
                        ReceivedCommandTextBlock.Text = "Sorry, we couldn't find what you asked for.";
                        //SearchedItemsListView.ItemsSource = allArtists;
                        QuestionNameTextBlock.Text = "Would you like to see a list of all artists?";
                        QuestionWrapper.Visibility = Windows.UI.Xaml.Visibility.Visible;
                    }
                    break;
                case "PlayMovie":
                    searchType = SearchType.Movie;
                    string movieName = SemanticInterpretation("movieTopic", result);
                    allMovies = await VideoLibrary.GetMovies();
                    var filteredMovies = allMovies.Where(t => t.Title.ToLower().Contains(movieName.ToLower())).ToList();
                    if (filteredMovies.Count > 1)
                    {
                        searchHitState = SearchHitState.Multiple;
                        ReceivedCommandTextBlock.Text = "We found multiple movies. Choose one...";
                        SearchedItemsListView.ItemsSource = filteredMovies;
                    }
                    else if (filteredMovies.Count > 0)
                    {
                        searchHitState = SearchHitState.Single;
                        ReceivedCommandTextBlock.Text = "This is the movie we found...";
                        SearchedItemsListView.ItemsSource = filteredMovies;
                        Player.PlayMovie(filteredMovies[0]);
                        QuestionNameTextBlock.Text = "Did we find the right one?";
                        QuestionWrapper.Visibility = Windows.UI.Xaml.Visibility.Visible;
                    }
                    else
                    {
                        searchHitState = SearchHitState.None;
                        ReceivedCommandTextBlock.Text = "Sorry, we couldn't find what you asked for. Here is the list of all movies.";
                        //SearchedItemsListView.ItemsSource = allMovies;
                        QuestionNameTextBlock.Text = "Would you like to see a list of all movies?";
                        QuestionWrapper.Visibility = Windows.UI.Xaml.Visibility.Visible;
                    }
                    break;
                case "PlayAlbum":
                    searchType = SearchType.Album;
                    string albumName = SemanticInterpretation("musicTopic", result);
                    allAlbums = await AudioLibrary.GetAlbums();
                    var filteredAlbums = allAlbums.Where(t => t.Title.ToLower().Contains(albumName.ToLower())).ToList();
                    if (filteredAlbums.Count > 1)
                    {
                        searchHitState = SearchHitState.Multiple;
                        ReceivedCommandTextBlock.Text = "We found multiple albums. Choose one...";
                        SearchedItemsListView.ItemsSource = filteredAlbums;
                    }
                    else if (filteredAlbums.Count > 0)
                    {
                        searchHitState = SearchHitState.Single;
                        ReceivedCommandTextBlock.Text = "This is the album we found...";
                        SearchedItemsListView.ItemsSource = filteredAlbums;
                        Player.PlayAlbum(filteredAlbums[0]);
                        QuestionNameTextBlock.Text = "Did we get the right one?";
                        QuestionWrapper.Visibility = Windows.UI.Xaml.Visibility.Visible;
                    }
                    else
                    {
                        searchHitState = SearchHitState.None;
                        ReceivedCommandTextBlock.Text = "Sorry, we couldn't find what you asked for. Here is the list of all albums.";
                        //SearchedItemsListView.ItemsSource = allAlbums;
                        QuestionNameTextBlock.Text = "Would you like to see a list of all albums?";
                        QuestionWrapper.Visibility = Windows.UI.Xaml.Visibility.Visible;
                    }
                    break;
                case "StartParty":
                    await Player.PlayPartyMode();
                    ReceivedCommandTextBlock.Text = "Started party mode!";
                    await Task.Delay(1000);
                    Frame.Navigate(typeof(CoverPage));
                    break;
                default:
                    break;
            }
            if (searchHitState == SearchHitState.Single)
            {
                GlobalVariables.CurrentTracker.SendEvent(EventCategories.VoiceCommand, EventActions.VoiceCommand, "Single" + voiceCommandName, 0);
            }
            else if (searchHitState == SearchHitState.None)
            {
                GlobalVariables.CurrentTracker.SendEvent(EventCategories.VoiceCommand, EventActions.VoiceCommand, "Zero" + voiceCommandName, 0);
            }
        }
Example #45
0
 public VoiceCommandInfo(SpeechRecognitionResult speechRecognitionResult)
 {
     this.Result           = speechRecognitionResult;
     this.VoiceCommandName = speechRecognitionResult?.RulePath[0];
     this.TextSpoken       = speechRecognitionResult?.Text;
 }
Example #46
0
 private string SemanticInterpretation(string v, SpeechRecognitionResult speechRecognition)
 {
     return(speechRecognition.SemanticInterpretation.Properties[v].FirstOrDefault());
 }
        private AIRequest CreateAIRequest(SpeechRecognitionResult recognitionResults)
        {
            var texts = new List<string> { recognitionResults.Text };
            var confidences = new List<float> { ConfidenceToFloat(recognitionResults.Confidence) };

            var aiRequest = new AIRequest();

            var alternates = recognitionResults.GetAlternates(5);
            if (alternates != null)
            {
                foreach (var a in alternates)
                {
                    texts.Add(a.Text);
                    confidences.Add(ConfidenceToFloat(a.Confidence));
                }
            }
            aiRequest.Query = texts.ToArray();
            aiRequest.Confidence = confidences.ToArray();
            return aiRequest;
        }
Example #48
0
        protected async override void OnNavigatedTo(NavigationEventArgs e)
        {
            base.OnNavigatedTo(e);
            if (!IsInternet())
            {
                await new MessageDialog("Seems you are not connected to the Internet").ShowAsync();
                return;
            }
            else
            {
                progress.IsActive = true;
                speechRecognition = e.Parameter as SpeechRecognitionResult;
                if (speechRecognition != null)
                {
                    mSource      = this.SemanticInterpretation("source", speechRecognition);
                    mDestination = this.SemanticInterpretation("destination", speechRecognition);
                    if (mSource != null)
                    {
                        SearchSourceBox.Text = mSource;
                    }
                    if (mDestination != null)
                    {
                        SearchDestinationBox.Text = mDestination;
                    }
                }
                try
                {
                    var parameter = e.Parameter as WwwFormUrlDecoder;
                    _destination = parameter.GetFirstValueByName("nmlocation");
                    if (_destination == null || _destination.Equals(""))
                    {
                        _destination = "Gachibowli";
                    }
                    System.Diagnostics.Debug.WriteLine(_destination);
                    _isFromInsight = true;
                }
                catch (Exception)
                {
                }
                finally
                {
                    MyMap.MapServiceToken = "YP9WTSzOHUo0dbahsH8J~J-K6u01dT98SF4uCCKpiwA~AnGaYM6yJxoLF1tGHEIXHFskGfwSRJTr1S5aO1dB-TCXjQ1ZX0xEWgeYslbC3Fov";
                    Geolocator locator = new Geolocator();
                    locator.DesiredAccuracyInMeters = 50;
                    position = await locator.GetGeopositionAsync();

                    await MyMap.TrySetViewAsync(position.Coordinate.Point, 17D);

                    progress.IsActive = false;
                    mySlider.Value    = MyMap.ZoomLevel;
                    AddMapIcon(position.Coordinate.Point.Position.Latitude, position.Coordinate.Point.Position.Longitude);
                    CabsAPI api = new CabsAPI();
                    latlng = position.Coordinate.Point.Position.Latitude.ToString() + "," + position.Coordinate.Point.Position.Longitude.ToString();
                    ReverseGeoResposne res = await api.GetReverseCodingResultlatlng(Token, latlng);

                    if (res.Code == ResponseCode.SUCCESS)
                    {
                        CurrentSource        = res.FormattedAddress;
                        SearchSourceBox.Text = CurrentSource;
                        _source = CurrentSource;
                        string lat = position.Coordinate.Point.Position.Latitude.ToString();
                        string lng = position.Coordinate.Point.Position.Longitude.ToString();
                        _cabsView = new CabsListViewModel(lng, lat, Token);
                        latlng    = position.Coordinate.Point.Position.Latitude.ToString() + "," + position.Coordinate.Point.Position.Longitude.ToString();
                        if (_isFromInsight)
                        {
                            SearchDestinationBox.Text = _destination;
                            GeoResponse location;
                            var         localSettings = Windows.Storage.ApplicationData.Current.LocalSettings;
                            token    = localSettings.Values["Token"].ToString();
                            location = await api.GeoCodingResult(token, _destination);

                            if (location.Code == ResponseCode.SUCCESS)
                            {
                                dlat = location.Position.Latitude;
                                dlng = location.Position.Longitude;
                                AddMapIcon(double.Parse(dlat), double.Parse(dlng));
                                _cabsView.SetLatLng(dlat, dlng);
                                ShowLoader(true);
                                await _cabsView.RefreshView(CabsListViewModel.REFRESH_ESTIMATE);

                                CabsListView.ItemsSource = _cabsView.Cabs;
                                ShowLoader(false);
                            }
                            else
                            {
                                await new MessageDialog("Error fetching coordinates").ShowAsync();
                            }
                        }
                        else
                        {
                            ShowLoader(true);
                            await _cabsView.RefreshView(CabsListViewModel.REFRESH_SURGE);

                            CabsListView.ItemsSource = _cabsView.Cabs;
                            ShowLoader(false);
                        }
                    }
                    else
                    {
                        await new MessageDialog("No such location exists").ShowAsync();
                    }
                }
            }
        }
Example #49
0
 private void ProcessCommands(SpeechRecognitionResult result)
 {
     switch (result.Text)
     {
         case "stop":
             StopVoiceRecognition();
             break;
     }
 }
Example #50
0
 private string SemanticInterpretation(string interpretationKey, SpeechRecognitionResult speechRecognitionResult)
 {
     return(speechRecognitionResult.SemanticInterpretation.Properties[interpretationKey].FirstOrDefault());
 }
Example #51
0
 /// <summary>
 /// Returns the semantic interpretation of a speech result. Returns null if there is no interpretation for
 /// that key.
 /// </summary>
 /// <param name="interpretationKey">The interpretation key.</param>
 /// <param name="speechRecognitionResult">The result to get an interpretation from.</param>
 /// <returns></returns>
 private string SemanticInterpretation(string interpretationKey, SpeechRecognitionResult speechRecognitionResult)
 {
     return speechRecognitionResult.SemanticInterpretation.Properties[interpretationKey].FirstOrDefault();
 }
 public static string NormalizeText(this SpeechRecognitionResult result)
 => result.Text.EndsWith("?") ? result.Text : $"{result.Text}?";