private async void startDetect() { // Create an instance of SpeechRecognizer. var speechRecognizer = new Windows.Media.SpeechRecognition.SpeechRecognizer(); string[] responses = { "start", "quit" }; // Add a list constraint to the recognizer. var listConstraint = new Windows.Media.SpeechRecognition.SpeechRecognitionListConstraint(responses, "startOrStart"); speechRecognizer.Constraints.Add(listConstraint); // Compile the constraint. await speechRecognizer.CompileConstraintsAsync(); // Start recognition. //textBlock1.Text = "Say Start"; //Recognise with UI Windows.Media.SpeechRecognition.SpeechRecognitionResult speechRecognitionResult = await speechRecognizer.RecognizeWithUIAsync(); //Recognise without UI //Windows.Media.SpeechRecognition.SpeechRecognitionResult speechRecognitionResult = await speechRecognizer.RecognizeAsync(); if (speechRecognitionResult.Text == "start") { //textBlock2.Text = "Start detected"; await Task.Delay(2000); startRecAsync(); } if (speechRecognitionResult.Text == "quit") { CoreApplication.Exit(); } }
private async void RecognizeWithListConstraint_Click(object sender, RoutedEventArgs e) { // Create an instance of SpeechRecognizer. var speechRecognizer = new Windows.Media.SpeechRecognition.SpeechRecognizer(); // You could create any IEnumerable dynamically. string[] responses = { "Yes", "No" }; // Add a list constraint to the recognizer. var listConstraint = new Windows.Media.SpeechRecognition.SpeechRecognitionListConstraint(responses, "yesOrNo"); speechRecognizer.UIOptions.ExampleText = @"Ex. ""Yes"", ""No"""; speechRecognizer.Constraints.Add(listConstraint); // Compile the constraint. await speechRecognizer.CompileConstraintsAsync(); this.heardYouSayTextBlock.Visibility = this.resultTextBlock.Visibility = Visibility.Collapsed; // Start recognition. try { Windows.Media.SpeechRecognition.SpeechRecognitionResult speechRecognitionResult = await speechRecognizer.RecognizeWithUIAsync(); // If successful, display the recognition result. if (speechRecognitionResult.Status == Windows.Media.SpeechRecognition.SpeechRecognitionResultStatus.Success) { this.heardYouSayTextBlock.Visibility = this.resultTextBlock.Visibility = Visibility.Visible; this.resultTextBlock.Text = speechRecognitionResult.Text; } } catch (Exception exception) { if ((uint)exception.HResult == App.HResultPrivacyStatementDeclined) { this.resultTextBlock.Visibility = Visibility.Visible; this.resultTextBlock.Text = "The privacy statement was declined."; } else { var messageDialog = new Windows.UI.Popups.MessageDialog(exception.Message, "Exception"); messageDialog.ShowAsync().GetResults(); } } }
private async void MediaElement_MediaEnded(object sender, RoutedEventArgs e) { var speechRecognizer = recon; string[] responses = { "George", "John", "Tony", "Jason", "Antony", "Gabriel" }; if (msg == 0) { Start3(); return; //responses = new string[]{ "Hello","What time is it", "Where are you from" // ,"Jarvis call my girlfriend","Who are your Creators","Bye","What is your form","bye" }; //con = new Windows.Media.SpeechRecognition.SpeechRecognitionListConstraint(responses, "yesOrNo"); } else if (msg == 1) { responses = new string[] { "Hello", "What time is it", "Where are you from" , "Jarvis call my girlfriend", "Who are your Creators", "Bye", "What is your form", "bye" }; con = new Windows.Media.SpeechRecognition.SpeechRecognitionListConstraint(responses, "yesOrNo"); } else if (msg == 2) { responses = new string[] { "A message" }; con = new Windows.Media.SpeechRecognition.SpeechRecognitionListConstraint(responses, "yesOrNo"); } else if (msg == 3) { Frame.Navigate(typeof(MainPage)); } speechRecognizer.UIOptions.AudiblePrompt = "Say what you want to search for..."; speechRecognizer.UIOptions.ExampleText = @"George"; if (msg == 0) { speechRecognizer.Constraints.Add(con); } else if (msg == 1) { speechRecognizer.Constraints.Add(con); } else if (msg == 2) { speechRecognizer.Constraints.Add(con); } // Compile the dictation grammar by default. await speechRecognizer.CompileConstraintsAsync(); await Task.Delay(1000); // Start recognition. Windows.Media.SpeechRecognition.SpeechRecognitionResult speechRecognitionResult = await speechRecognizer.RecognizeAsync(); if (speechRecognitionResult.Status == Windows.Media.SpeechRecognition.SpeechRecognitionResultStatus.Success) { Debug.WriteLine("bfgffgnf"); } Debug.WriteLine(speechRecognitionResult); //psaxneis epafes if (msg == 0) { if (speechRecognitionResult.Text != "") { Name = speechRecognitionResult.Text; //start2(); findContact(); } else { await Task.Delay(2000); Start(); } } else if (msg == 1) { if (speechRecognitionResult.Text != "") { Message = speechRecognitionResult.Text; jarvis(); return; } else { await Task.Delay(2000); // jarvis(); Start3(); return; } } else if (msg == 2) { if (speechRecognitionResult.Text != "") { Message = speechRecognitionResult.Text; ComposeEmail(contactt, Message); } else { await Task.Delay(2000); Start4(); } } }
private async void MediaElement_MediaEnded(object sender, RoutedEventArgs e) { var speechRecognizer = new Windows.Media.SpeechRecognition.SpeechRecognizer(); string[] responses = { "George", "John", "Tony", "Jason", "Antony", "Gabriel" }; if (msg == 0) { con = new Windows.Media.SpeechRecognition.SpeechRecognitionListConstraint(responses, "yesOrNo"); } else if (msg == 1) { responses = new string[] { "Hello", "What time is it", "I was created at Hackathon" , "Jarvis call my girlfriend", "Who are your Creators", "Bye" }; con = new Windows.Media.SpeechRecognition.SpeechRecognitionListConstraint(responses, "yesOrNo"); } else if (msg == 2) { responses = new string[] { "Good morning" }; con = new Windows.Media.SpeechRecognition.SpeechRecognitionListConstraint(responses, "yesOrNo"); } speechRecognizer.UIOptions.AudiblePrompt = "Say what you want to search for..."; speechRecognizer.UIOptions.ExampleText = @"George"; if (msg == 0) { speechRecognizer.Constraints.Add(con); } else if (msg == 1) { speechRecognizer.Constraints.Add(con); } else if (msg == 2) { speechRecognizer.Constraints.Add(con); } // Compile the dictation grammar by default. await speechRecognizer.CompileConstraintsAsync(); // Start recognition. Windows.Media.SpeechRecognition.SpeechRecognitionResult speechRecognitionResult = await speechRecognizer.RecognizeWithUIAsync(); //psaxneis epafes if (msg == 0) { if (speechRecognitionResult.Text != "") { Name = speechRecognitionResult.Text; //start2(); findContact(); return; } else { await Task.Delay(2000); Start(); } } else if (msg == 1) { if (speechRecognitionResult.Text != "") { Message = speechRecognitionResult.Text; jarvis(); } else { await Task.Delay(2000); Start2(); } } else if (msg == 2) { if (speechRecognitionResult.Text.Contains("")) { Message = speechRecognitionResult.Text; ComposeEmail(contactt, Message); } else { await Task.Delay(2000); Start4(); } } }