private async void STT_Freeform(object sender, RoutedEventArgs e)
    {
        this.speechRecognizer.Recognizer.Grammars.Clear();

        // Use the short message dictation grammar with the speech recognizer.
        this.speechRecognizer.Recognizer.Grammars.AddGrammarFromPredefinedType("message", SpeechPredefinedGrammar.Dictation);

        await this.speechRecognizer.Recognizer.PreloadGrammarsAsync();

        try
        {
            // Use the built-in UI to prompt the user and get the result.
            SpeechRecognitionUIResult recognitionResult = await this.speechRecognizer.RecognizeWithUIAsync();

            if (recognitionResult.ResultStatus == SpeechRecognitionUIStatus.Succeeded)
            {
                // Output the speech recognition result.
                txtDictationResult.Text = "You said: " + recognitionResult.RecognitionResult.Text;
            }
        }
        catch (Exception ex)
        {
            MessageBox.Show(ex.Message);
        }
    }
Ejemplo n.º 2
0
        private async void SpeakButton_Click(object sender, EventArgs e)
        {
            try
            {
                await speechSynthesizer.SpeakTextAsync("Say the item name");

                this.recoWithUI = new SpeechRecognizerUI();
                recoWithUI.Recognizer.Grammars.AddGrammarFromPredefinedType("webSearch", SpeechPredefinedGrammar.WebSearch);
                SpeechRecognitionUIResult recoResultName = await recoWithUI.RecognizeWithUIAsync();

                Name.Text = recoResultName.ResultStatus == SpeechRecognitionUIStatus.Succeeded ? recoResultName.RecognitionResult.Text : "Unknown";

                if (recoResultName.ResultStatus != SpeechRecognitionUIStatus.Cancelled)
                {
                    await speechSynthesizer.SpeakTextAsync("Say the item price");

                    this.recoWithUI = new SpeechRecognizerUI();
                    SpeechRecognitionUIResult recoResultPrice = await recoWithUI.RecognizeWithUIAsync();

                    Amount.Text = GetOnlyNumberFromSpeech(recoResultPrice);
                }
            }
            catch
            {
            }
        }
Ejemplo n.º 3
0
        private async void Sp(object sender, RoutedEventArgs e)
        {
            try
            {
                if (sender == null)
                {
                }
                if (e == null)
                {
                }

                _recoWithUi = new SpeechRecognizerUI();
                _recoWithUi.Settings.ReadoutEnabled   = false;
                _recoWithUi.Settings.ShowConfirmation = false;
                _recoWithUi.Settings.ExampleText      = "";
                string[] b = _vita.GetAllCommands();
                _recoWithUi.Recognizer.Grammars.AddGrammarFromList("frenchNumbers", b);

                IEnumerable <SpeechRecognizerInformation> frenchRecognizers = from recognizerInfo in InstalledSpeechRecognizers.All
                                                                              where recognizerInfo.Language == "ru-RU"
                                                                              select recognizerInfo;

                _recoWithUi.Recognizer.SetRecognizer(frenchRecognizers.ElementAt(0));
                SpeechRecognitionUIResult recoResult = await _recoWithUi.RecognizeWithUIAsync();

                //SpeechSynthesizer synth = new SpeechSynthesizer();
                //await synth.SpeakTextAsync(recoResult.RecognitionResult.Text);
                MoonPadTcpClient.Send(recoResult.RecognitionResult.Text);
                _fl = 1;
            }
            catch (Exception ex)
            {
                _fl = 1;
            }
        }
Ejemplo n.º 4
0
        private async void FindFrequency_Click(object sender, RoutedEventArgs e)
        {
            try
            {
                SpeechRecognizerUI speechRecognition = new SpeechRecognizerUI();
                speechRecognition.Settings.ListenText  = "Enter Sentence!";
                speechRecognition.Settings.ExampleText = "plot will show frequency of letters(lower case)";
                SpeechSynthesizer synth = new SpeechSynthesizer();
                await synth.SpeakTextAsync("Say something");

                SpeechRecognitionUIResult recoResult = await speechRecognition.RecognizeWithUIAsync();

                if (recoResult.ResultStatus == SpeechRecognitionUIStatus.Succeeded)
                {
                    int[] alpha = Enumerable.Repeat(0, 26).ToArray();

                    recoResult.RecognitionResult.Text.ToLower();
                    for (int i = 0; i < recoResult.RecognitionResult.Text.Length; i++)
                    {
                        if (Char.IsLetter(recoResult.RecognitionResult.Text[i]))
                        {
                            alpha[((int)recoResult.RecognitionResult.Text[i]) - 97]++;
                        }
                    }
                    for (int i = 0; i < recoResult.RecognitionResult.Text.Length; i++)
                    {
                        DataPoint model = new DataPoint((char)(i + 97), alpha[i]);
                        CollectionCoordinates.Add(model);
                    }
                }
            }
            catch (Exception r)
            {
            }
        }
Ejemplo n.º 5
0
        private async void SpeakAppBr_Click(object sender, EventArgs e)
        {
            MessageBox.Show("Sorry! I only listen to you in English!");

            strLngFrom      = "en";
            btnFrom.Content = "English";

            this.speechRecognizer.Recognizer.Grammars.Clear();
            this.speechRecognizer.Recognizer.Grammars.AddGrammarFromPredefinedType("message", SpeechPredefinedGrammar.Dictation);
            await this.speechRecognizer.Recognizer.PreloadGrammarsAsync();

            try
            {
                SpeechRecognitionUIResult recognitionResult = await this.speechRecognizer.RecognizeWithUIAsync();

                if (recognitionResult.ResultStatus == SpeechRecognitionUIStatus.Succeeded)
                {
                    strTextToTranslate = recognitionResult.RecognitionResult.Text;
                    String strTranslatorAccessURI = "https://datamarket.accesscontrol.windows.net/v2/OAuth2-13";
                    System.Net.WebRequest req     = System.Net.WebRequest.Create(strTranslatorAccessURI);
                    req.Method      = "POST";
                    req.ContentType = "application/x-www-form-urlencoded";
                    IAsyncResult writeRequestStreamCallback = (IAsyncResult)req.BeginGetRequestStream(new AsyncCallback(RequestStreamReady), req);
                    txtToTrans.Text = recognitionResult.RecognitionResult.Text;
                }
                else
                {
                    //MessageBox.Show("Sorry! I didn't catch you.");
                }
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
Ejemplo n.º 6
0
        private async void Listen()
        {
            this.recoWithUI = new SpeechRecognizerUI();

            SpeechRecognitionUIResult recoResult = await recoWithUI.RecognizeWithUIAsync();

            if (recoResult.ResultStatus == SpeechRecognitionUIStatus.Succeeded)
            {
                MessageBox.Show(string.Format("You said {0}.",
                                              recoResult.RecognitionResult.Text));
            }
        }
Ejemplo n.º 7
0
        //private async Task UpdatePhraseListsAsync()
        //{
        //    foreach (VoiceCommandSet cs in VoiceCommandService.InstalledCommandSets.Values)
        //    {
        //        List<string> updatedListOfPhrases = GetPhrasesForUpdatedSiteToSearchPhraseList(cs.Language.ToLower());
        //        await cs.UpdatePhraseListAsync("siteToSearch", updatedListOfPhrases);
        //    }
        //}

        public async Task <string> RecognizeTextFromWebSearchGrammar(string exampleText)
        {
            string text = null;

            try
            {
                SpeechRecognizerUI sr = new SpeechRecognizerUI();
                sr.Recognizer.Grammars.AddGrammarFromPredefinedType("web", SpeechPredefinedGrammar.WebSearch);
                sr.Settings.ListenText       = "Listening...";
                sr.Settings.ExampleText      = exampleText;
                sr.Settings.ReadoutEnabled   = false;
                sr.Settings.ShowConfirmation = false;

                SpeechRecognitionUIResult result = await sr.RecognizeWithUIAsync();

                if (result != null)
                {
                    if (result.ResultStatus == SpeechRecognitionUIStatus.Succeeded)
                    {
                        if (result.RecognitionResult != null &&
                            result.RecognitionResult.TextConfidence != SpeechRecognitionConfidence.Rejected)
                        {
                            text = result.RecognitionResult.Text;
                        }
                    }
                    else
                    {
                        if (result.ResultStatus == SpeechRecognitionUIStatus.PrivacyPolicyDeclined)
                        {
                            Execute.BeginOnUIThread(() =>
                            {
                                var toast = new ToastPrompt()
                                {
                                    Title           = "Privacy policy declined",
                                    Message         = "You must accept the privacy policy to use speech recognition.",
                                    TextOrientation = Orientation.Vertical,
                                    TextWrapping    = TextWrapping.Wrap,
                                    Background      = new SolidColorBrush(Colors.Red),
                                };

                                toast.Show();
                            });
                        }
                    }
                }
            }
            catch
            {
            }

            return(text);
        }
Ejemplo n.º 8
0
        private async void VoicePwdButton_Clicked(object sender, RoutedEventArgs e)
        {
            SpeechRecognitionUIResult result = await Speech.recognizerUI.RecognizeWithUIAsync();

            if (result.ResultStatus == SpeechRecognitionUIStatus.Succeeded)
            {
                Settings.voicePwd.Value  = result.RecognitionResult.Text;
                this.isCurrentlySpeaking = true;
                await Speech.synthesizer.SpeakTextAsync("the current voice password is set to, " + result.RecognitionResult.Text);

                this.isCurrentlySpeaking = false;
            }
        }
Ejemplo n.º 9
0
 private String GetOnlyNumberFromSpeech(SpeechRecognitionUIResult recoResultPrice)
 {
     String resultString = recoResultPrice.ResultStatus == SpeechRecognitionUIStatus.Succeeded ? recoResultPrice.RecognitionResult.Text : "0";
     try
     {
         Regex regexObj = new Regex(@"[^\d]");
         resultString = regexObj.Replace(resultString, "");
     }
     catch (ArgumentException ex)
     {
         // Syntax error in the regular expression
     }
     return resultString;
 }
Ejemplo n.º 10
0
        private String GetOnlyNumberFromSpeech(SpeechRecognitionUIResult recoResultPrice)
        {
            String resultString = recoResultPrice.ResultStatus == SpeechRecognitionUIStatus.Succeeded ? recoResultPrice.RecognitionResult.Text : "0";

            try
            {
                Regex regexObj = new Regex(@"[^\d]");
                resultString = regexObj.Replace(resultString, "");
            }
            catch (ArgumentException ex)
            {
                // Syntax error in the regular expression
            }
            return(resultString);
        }
Ejemplo n.º 11
0
        private async void RetrieveMemoSpeech()
        {
            this.SpeechUI = new SpeechRecognizerUI();
            SpeechRecognitionUIResult recoResult = await SpeechUI.RecognizeWithUIAsync();

            String phoneID = Phone.ID;
            String message = recoResult.RecognitionResult.Text;

            MessageBox.Show(string.Format("You said {0}.", recoResult.RecognitionResult.Text));

            // Call API
            // Got Call back

            Memos.Add(new MemoItem());
        }
Ejemplo n.º 12
0
        public async void SpeechToText_Click(object sender, RoutedEventArgs e)
        {
            //Speech recognition only supports spanish from spain not from México
            var Language = (from language in InstalledSpeechRecognizers.All
                            where language.Language == "es-ES"
                            select language).FirstOrDefault();

            SpeechRecognizerUI speechRecognition = new SpeechRecognizerUI();

            speechRecognition.Recognizer.SetRecognizer(Language);

            SpeechRecognitionUIResult recoResult = await speechRecognition.RecognizeWithUIAsync();

            if (recoResult.ResultStatus == SpeechRecognitionUIStatus.Succeeded)
            {
                txtPregunta.Text = recoResult.RecognitionResult.Text.Replace(".", "");
                LaunchSearch();
            }
        }
Ejemplo n.º 13
0
        public async Task <string> GetMicrophoneSpeech(string SpeechListenText, string SpeechExampleText)
        {
            try
            {
                SpeechRecognizerUI sr = new SpeechRecognizerUI();
                sr.Settings.ListenText       = SpeechListenText;
                sr.Settings.ExampleText      = SpeechExampleText;
                sr.Settings.ReadoutEnabled   = false;
                sr.Settings.ShowConfirmation = false;

                SpeechRecognitionUIResult result = await sr.RecognizeWithUIAsync();

                if (result.ResultStatus == SpeechRecognitionUIStatus.Succeeded)
                {
                    return(result.RecognitionResult.Text);
                }
            }
            catch { }
            throw new Exception();
        }
Ejemplo n.º 14
0
        private async void PhoneApplicationPage_Loaded(object sender, RoutedEventArgs e)
        {
            this.recoWithUI = new SpeechRecognizerUI();

            // Start recognition (load the dictation grammar by default).
            SpeechRecognitionUIResult recoResult = await recoWithUI.RecognizeWithUIAsync();

            String result = await Outils.RecoInteract(recoResult.RecognitionResult.Text);

            if (!String.IsNullOrEmpty(result))
            {
                JObject res = JsonConvert.DeserializeObject <JObject>(result);
                if (res["result"] != null && !String.IsNullOrEmpty(res["result"].ToString()))
                {
                    SpeechSynthesizer synth = new SpeechSynthesizer();

                    await synth.SpeakTextAsync(res["result"].ToString());
                }
            }
        }
Ejemplo n.º 15
0
        // Initiate the capture of a voice note and store it to the
        // Azure database if the user is satisfied
        private async void speechBtn_Click(object sender, EventArgs e)
        {
            // Begin recognition using the default grammar and store the result.
            SpeechRecognitionUIResult recoResult = await recoWithUI.RecognizeWithUIAsync();

            // Check that a result was obtained
            if (recoResult.RecognitionResult != null)
            {
                // Determine if the user wants to save the note.
                var result = MessageBox.Show(string.Format("Heard you say \"{0}\" Save?", recoResult.RecognitionResult.Text), "Confirmation", MessageBoxButton.OKCancel);

                // Save the result to the Azure Mobile Service DB if the user is satisfied.
                if (result == MessageBoxResult.OK)
                {
                    var note = new VoiceNote {
                        Text = recoResult.RecognitionResult.Text
                    };
                    AddVoiceNoteAsync(note);
                }
            }
        }
Ejemplo n.º 16
0
        private async Task RecognizeSpeech()
        {
            try
            {
                var localSpeechRecognizerUI = new SpeechRecognizerUI();

                localSpeechRecognizerUI.Settings.ListenText       = "Say your phrase...";
                localSpeechRecognizerUI.Settings.ExampleText      = "What's going on?";
                localSpeechRecognizerUI.Settings.ReadoutEnabled   = false;
                localSpeechRecognizerUI.Settings.ShowConfirmation = true;

                SpeechRecognitionUIResult recognitionResult = await localSpeechRecognizerUI.RecognizeWithUIAsync();

                Dispatcher.BeginInvoke(delegate { DetectedTextTextBox.Text = recognitionResult.RecognitionResult.Text; });
                await SayText(recognitionResult.RecognitionResult.Text);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                throw;
            }
        }
Ejemplo n.º 17
0
        private async Task AskForVoice()
        {
            try
            {
                speechRecognizerUI.Settings.ListenText       = "Which voice?";
                speechRecognizerUI.Settings.ExampleText      = @"examples: '" + _voiceNames[0] + "', '" + _voiceNames[1] + "'";
                speechRecognizerUI.Settings.ReadoutEnabled   = true;
                speechRecognizerUI.Settings.ShowConfirmation = true;
                speechRecognizerUI.Recognizer.Grammars["voicesList"].Enabled = true;
                speechRecognizerUI.Recognizer.Grammars["colorList"].Enabled  = false;

                SpeechRecognitionUIResult result = await speechRecognizerUI.RecognizeWithUIAsync();

                var gskjs = result.RecognitionResult.Semantics.ToList();
                SetVoiceFromCommand(result.RecognitionResult.Text);
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
                //throw;
            }
        }
Ejemplo n.º 18
0
        private async void MicrophoneImage_Tap(object sender, System.Windows.Input.GestureEventArgs e)
        {
            this.speechRecognizer = new SpeechRecognizerUI();
            this.speechRecognizer.Recognizer.Grammars.Clear();
            this.speechRecognizer.Recognizer.Grammars.AddGrammarFromPredefinedType("search", SpeechPredefinedGrammar.WebSearch);
            await this.speechRecognizer.Recognizer.PreloadGrammarsAsync();

            try
            {
                // Use the built-in UI to prompt the user and get the result.
                SpeechRecognitionUIResult recognitionResult = await this.speechRecognizer.RecognizeWithUIAsync();

                if (recognitionResult.ResultStatus == SpeechRecognitionUIStatus.Succeeded)
                {
                    // Output the speech recognition result.
                    NewMessageTextBox.Text = recognitionResult.RecognitionResult.Text.Trim();
                }
            }
            catch (Exception ex)
            {
                MessageBox.Show(ex.Message);
            }
        }
Ejemplo n.º 19
0
        private async void SpeechToText_Click(object sender, RoutedEventArgs e)
        {
            try
            {
                SpeechRecognizerUI speechRecognition = new SpeechRecognizerUI();
                speechRecognition.Settings.ListenText  = "Enter Observations!";
                speechRecognition.Settings.ExampleText = "Ex: X equals <value>, Y equals <value>";
                SpeechSynthesizer synth = new SpeechSynthesizer();
                await synth.SpeakTextAsync("X equals!?");

                SpeechRecognitionUIResult recoResult = await speechRecognition.RecognizeWithUIAsync();

                if (recoResult.ResultStatus == SpeechRecognitionUIStatus.Succeeded)
                {
                    xCoordinates.Add(Double.Parse(recoResult.RecognitionResult.Text));
                }
                double x_val = Double.Parse(recoResult.RecognitionResult.Text);
                await synth.SpeakTextAsync("Y equals!?");

                recoResult = await speechRecognition.RecognizeWithUIAsync();

                if (recoResult.ResultStatus == SpeechRecognitionUIStatus.Succeeded)
                {
                    yCoordinates.Add(Double.Parse(recoResult.RecognitionResult.Text));
                }
                double y_val = Double.Parse(recoResult.RecognitionResult.Text);
                coordinates.Add("X: " + x_val + "   Y: " + y_val);
                listCoordinates.ItemsSource = null;
                listCoordinates.ItemsSource = coordinates;
                DataPoint model = new DataPoint(x_val, y_val);
                CollectionCoordinates.Add(model);
            }
            catch (Exception h)
            {
                MessageBox.Show("Some error, Say Clearly!");
            }
        }
Ejemplo n.º 20
0
        public async Task OpenSpeechUI()
        {
            SpeechRecognizerUI recoWithUI;

            // Create an instance of SpeechRecognizerUI.
            recoWithUI = new SpeechRecognizerUI();
            var installed = InstalledSpeechRecognizers.All;

            if (installed.Any(o => o.Language == "en-US"))
            {
                recoWithUI.Recognizer.SetRecognizer(installed.Where(o => o.Language == "en-US").Single());


                // Uri searchGrammar = new Uri("ms-appx:///Assets/SRGSGrammar1.xml", UriKind.Absolute);

                // Add the SRGS grammar to the grammar set.
                //   recoWithUI.Recognizer.Grammars.AddGrammarFromUri("cities", searchGrammar);

                recoWithUI.Settings.ListenText  = "search for?";
                recoWithUI.Settings.ExampleText = " 'guides', 'guide', 'device' ";
                // Start recognition (load the dictation grammar by default).

                recoWithUI.Recognizer.Grammars.AddGrammarFromPredefinedType("typeName", SpeechPredefinedGrammar.Dictation);

                SpeechRecognitionUIResult recoResult = await recoWithUI.RecognizeWithUIAsync();

                // Do something with the recognition result.
                // MessageBox.Show(string.Format("You said {0}.", recoResult.RecognitionResult.Text),);


                //  DoSearch(recoResult.RecognitionResult.Text);
            }
            else
            {
                MessageBox.Show("not lang");
            }
        }
Ejemplo n.º 21
0
        public static async Task <string> GetResult(string exampleText)
        {
            String             text = "";
            SpeechRecognizerUI sr   = new SpeechRecognizerUI();

            sr.Recognizer.Grammars.AddGrammarFromPredefinedType("web", SpeechPredefinedGrammar.WebSearch);
            sr.Settings.ListenText       = "Listening...";
            sr.Settings.ExampleText      = exampleText;
            sr.Settings.ReadoutEnabled   = false;
            sr.Settings.ShowConfirmation = false;

            SpeechRecognitionUIResult result = await sr.RecognizeWithUIAsync();

            if (result != null &&
                result.ResultStatus == SpeechRecognitionUIStatus.Succeeded &&
                result.RecognitionResult != null &&
                result.RecognitionResult.TextConfidence != SpeechRecognitionConfidence.Rejected)
            {
                await Speak("Looking for " + result.RecognitionResult.Text);

                text = result.RecognitionResult.Text;
            }
            return(text);
        }
    private async void btnWebSearch_Click(object sender, RoutedEventArgs e)
    {
        this.speechRecognizer.Recognizer.Grammars.Clear();

        this.speechRecognizer.Recognizer.Grammars.AddGrammarFromPredefinedType("search", SpeechPredefinedGrammar.WebSearch);

        await this.speechRecognizer.Recognizer.PreloadGrammarsAsync();

        try
        {
            // Use the built-in UI to prompt the user and get the result.
            SpeechRecognitionUIResult recognitionResult = await this.speechRecognizer.RecognizeWithUIAsync();

            if (recognitionResult.ResultStatus == SpeechRecognitionUIStatus.Succeeded)
            {
                // Output the speech recognition result.
                this.txtWebSearchResult.Text = "You said: " + recognitionResult.RecognitionResult.Text;
            }
        }
        catch (Exception ex)
        {
            MessageBox.Show(ex.Message);
        }
    }
Ejemplo n.º 23
0
 //all commands function callings
 private async void saycmnd_Click(object sender, EventArgs e)
 {
     this.recog = new SpeechRecognizerUI();
     this.recogResult = await recog.RecognizeWithUIAsync();
     if (recogResult.ResultStatus == SpeechRecognitionUIStatus.Succeeded)
     {
         if (recogResult.RecognitionResult.Text == "Where am I?")
         {
             GetCurrentLocation();
         }
         else if (recogResult.RecognitionResult.Text == "Navigation.")
         {
             SpeakCommand();
         }
         else if (recogResult.RecognitionResult.Text == "Change my map.")
         {
             SpeechSynthesizer speechdir = new SpeechSynthesizer();
             await speechdir.SpeakTextAsync("Which mode you want me to change, Hybrid, Terrain or Road?");
             ChangeMapMode();
         }
         else if(recogResult.RecognitionResult.Text == "Search place.")
         {
             SearchPlace();
         }
         else if (recogResult.RecognitionResult.Text == "Search people.")
         {
             GetPhoneContactToMap();
         }
         else if (recogResult.RecognitionResult.Text == "Call nearby.")
         {
             MessageBox.Show("The code is in progress...");
         }
         else
         {
             SpeechSynthesizer speecherr = new SpeechSynthesizer();
             await speecherr.SpeakTextAsync("Sorry, couldn't find that command");
         }
     }
 }
Ejemplo n.º 24
0
        private async void speech_Click(object sender, EventArgs eventArgs)
        {
            string message   = "Excuse me, what did you say?!";
            string txtbxType = string.Empty;

            if (GTaskSettings.IsFree)
            {
                GTaskSettings.Upsell();
            }
            else
            {
                try
                {
                    //If no textbox is selected, there is no where to put the text
                    if (focusedTextbox == null)
                    {
                        MessageBoxResult o = MessageBox.Show("Please select the text box you want to use and try again.", "Which Text Box?", MessageBoxButton.OK);
                        return;
                    }

                    // Create an instance of SpeechRecognizerUI.
                    this.recoWithUI = new SpeechRecognizerUI();
                    recoWithUI.Settings.ReadoutEnabled   = false;
                    recoWithUI.Settings.ShowConfirmation = false;

                    if (focusedTextbox.Name == "txtbxTitle")
                    {
                        recoWithUI.Settings.ListenText  = "Listening for Task Title...";
                        recoWithUI.Settings.ExampleText = "Ex. 'Mow the lawn'";
                        txtbxType = "Title";
                    }
                    else
                    {
                        recoWithUI.Settings.ListenText  = "Listening for Tasks Notes...";
                        recoWithUI.Settings.ExampleText = "Ex. 'This needs to be done by Tuesday.'";
                        txtbxType = "Notes";
                    }

                    // Start recognition (load the dictation grammar by default).
                    SpeechRecognitionUIResult recoResult = await recoWithUI.RecognizeWithUIAsync();

                    // Do something with the recognition result.
                    string txtbxText       = focusedTextbox.Text;
                    string SpeakResult     = (recoResult.RecognitionResult == null) ? string.Empty : recoResult.RecognitionResult.Text;
                    string FinalText       = string.Empty;
                    int    SelectionStart  = focusedTextbox.SelectionStart;
                    int    SelectionLength = focusedTextbox.SelectionLength;
                    int    SelectionEnd    = SelectionStart + SelectionLength;

                    if (SpeakResult == string.Empty) //If nothing in speech result, don't do anything
                    {
                        return;
                    }

                    FinalText = SpeechHelper.FormatSpeech(SelectionStart, txtbxText, SelectionEnd, SpeakResult, txtbxType);

                    if (FinalText != String.Empty) //Results are returned
                    {
                        if (SelectionLength == 0)  //0 means it is an insert
                        {
                            focusedTextbox.Text = focusedTextbox.Text.Insert(SelectionStart, FinalText);
                            focusedTextbox.Select(SelectionStart + FinalText.Length, 0); //Set the cursor location to where the start was previously
                        }
                        else //greater than 0 means it is a replace
                        {
                            focusedTextbox.SelectedText = FinalText;
                            focusedTextbox.Select(SelectionStart + FinalText.Length, 0); //Set the cursor location to where the start was previously
                        }
                    }
                }
                catch
                {
                    if (GTaskSettings.MsgError)
                    {
                        MessageBox.Show(message);
                    }
                }
            }
        }
Ejemplo n.º 25
0
        /// <summary>
        /// Method to instantiate recognizer with appropriate grammar and perform recognition.
        /// </summary>
        private async void HandleSpeech()
        {
            if (_handlingSpeech)
            {
                return;
            }

            _handlingSpeech = true;
            try
            {
                SpeechRecognizerUI        recognizer = new SpeechRecognizerUI();
                SpeechRecognitionUIResult result     = null;

                if (this.InputScope != null && (this.InputScope.Names[0] as InputScopeName).NameValue.Equals(InputScopeNameValue.Search))
                {
                    recognizer.Recognizer.Grammars.AddGrammarFromPredefinedType("WebSearchGrammar", SpeechPredefinedGrammar.WebSearch);
                }

                try
                {
                    result = await recognizer.RecognizeWithUIAsync();
                }
                catch (OperationCanceledException)
                {
                    return;
                }
                catch (Exception ex)
                {
                    if ((uint)ex.HResult == 0x80045508)
                    {
                        // This can occur when speech recognition is interupted by navigation away from
                        // the app. We'll just swallow the exception to work around it.
                        return;
                    }

                    MessageBox.Show("An error occured. \n" + ex.Message);
                    return;
                }

                // The SpeechRecognizerUI component will handle cases where the speech was not recognized and prompt
                // user to retry. This check is just to make sure that the speech recognition request was not
                // canceled by the back button, navigation, etc.
                if (result.ResultStatus == SpeechRecognitionUIStatus.Succeeded)
                {
                    // Raise SpeechRecognized event
                    var handler = SpeechRecognized;
                    SpeechRecognizedEventArgs eventArgs = new SpeechRecognizedEventArgs(result.RecognitionResult);

                    if (handler != null)
                    {
                        handler(this, eventArgs);

                        if (eventArgs.Canceled)
                        {
                            return;
                        }
                    }

                    // Update display
                    string originalText = this.Text;

                    if (_useSelectedTextReplacement)
                    {
                        string newText = originalText.Substring(0, _selectionStart) + result.RecognitionResult.Text + originalText.Substring(_selectionEnd + 1);
                        this.Text = newText;
                        this.Select(_selectionStart, result.RecognitionResult.Text.Length);
                    }
                    else
                    {
                        this.Text = result.RecognitionResult.Text;
                        this.Focus();
                        this.Select(_selectionStart, result.RecognitionResult.Text.Length);
                    }
                }
            }
            finally
            {
                _handlingSpeech = false;
            }
        }
Ejemplo n.º 26
0
        //Speech
        private async void speech_Click(object sender, EventArgs eventArgs)
        {
            string message   = "Excuse me, what did you say?!";
            string txtbxType = "Title";

            if (GTaskSettings.IsFree)
            {
                GTaskSettings.Upsell();
            }
            else
            {
                try
                {
                    // Create an instance of SpeechRecognizerUI.
                    this.recoWithUI = new SpeechRecognizerUI();
                    recoWithUI.Settings.ReadoutEnabled   = false;
                    recoWithUI.Settings.ShowConfirmation = false;

                    recoWithUI.Settings.ListenText  = "Listening for Task List Title...";
                    recoWithUI.Settings.ExampleText = "Ex. 'Grocery List'";

                    // Start recognition (load the dictation grammar by default).
                    SpeechRecognitionUIResult recoResult = await recoWithUI.RecognizeWithUIAsync();

                    // Do something with the recognition result.
                    string txtbxText       = txtbxTitle.Text;
                    string FinalText       = string.Empty;
                    int    SelectionStart  = txtbxTitle.SelectionStart;
                    int    SelectionLength = txtbxTitle.SelectionLength;
                    int    SelectionEnd    = SelectionStart + SelectionLength;
                    string SpeakResult     = (recoResult.RecognitionResult == null) ? string.Empty : recoResult.RecognitionResult.Text;

                    if (SpeakResult == string.Empty) //If nothing in speech result, don't do anything
                    {
                        return;
                    }

                    FinalText = SpeechHelper.FormatSpeech(SelectionStart, txtbxText, SelectionEnd, SpeakResult, txtbxType);

                    if (FinalText != String.Empty) //Results are returned
                    {
                        if (SelectionLength == 0)  //0 means it is an insert
                        {
                            txtbxTitle.Text = txtbxTitle.Text.Insert(SelectionStart, FinalText);
                            txtbxTitle.Select(SelectionStart + FinalText.Length, 0); //Set the cursor location to where the start was previously
                        }
                        else //greater than 0 means it is a replace
                        {
                            txtbxTitle.SelectedText = FinalText;
                            txtbxTitle.Select(SelectionStart + FinalText.Length, 0); //Set the cursor location to where the start was previously
                        }
                    }
                }
                catch
                {
                    if (GTaskSettings.MsgError)
                    {
                        MessageBox.Show(message);
                    }
                }
            }
        }
Ejemplo n.º 27
0
        //Voice search through a command
        private async void SearchPlace()
        {
            SpeechSynthesizer commandsrch = new SpeechSynthesizer();
            await commandsrch.SpeakTextAsync("Which place do you want me to search?");

            recogsrch = new SpeechRecognizerUI();
            recogsrchResult = await recogsrch.RecognizeWithUIAsync();
            if (recogsrchResult.ResultStatus == SpeechRecognitionUIStatus.Succeeded)
            {
                mysearchgeoquery = new GeocodeQuery();
                mysearchgeoquery.GeoCoordinate = new GeoCoordinate(MyGeoPosition.Coordinate.Latitude, MyGeoPosition.Coordinate.Longitude);
                mysearchgeoquery.SearchTerm = recogsrchResult.RecognitionResult.Text;
                mysearchgeoquery.QueryCompleted += mysearchgeoquery_QueryCompleted;
                mysearchgeoquery.QueryAsync();
            }
        }