Exemplo n.º 1
0
        private async void startRecAsync()
        {
            startRec.IsEnabled = false;
            var speechRecognizer = new Windows.Media.SpeechRecognition.SpeechRecognizer();
            //speechRecognizer.Timeouts.EndSilenceTimeout = TimeSpan.FromSeconds(1.2);
            // Compile the constraint.
            await speechRecognizer.CompileConstraintsAsync();

            Random rnd           = new Random();
            int    colors_number = rnd.Next(1, 7);

            string correct_colors = "";

            for (int i = 0; i < colors_number; i++)
            {
                int temp_rnd = rnd.Next(1, 5);
                if (temp_rnd == 1)
                {
                    correct_colors = correct_colors + "green ";
                    green.Fill     = new SolidColorBrush(Windows.UI.Colors.Green);
                    await Task.Delay(1000);

                    green.Fill = new SolidColorBrush(Windows.UI.Colors.DarkGreen);
                    await Task.Delay(200);
                }
                if (temp_rnd == 2)
                {
                    correct_colors = correct_colors + "red ";
                    red.Fill       = new SolidColorBrush(Windows.UI.Colors.Red);
                    await Task.Delay(1000);

                    red.Fill = new SolidColorBrush(Windows.UI.Colors.DarkRed);
                    await Task.Delay(200);
                }
                if (temp_rnd == 3)
                {
                    correct_colors = correct_colors + "blue ";
                    blue.Fill      = new SolidColorBrush(Windows.UI.Colors.Blue);
                    await Task.Delay(1000);

                    blue.Fill = new SolidColorBrush(Windows.UI.Colors.DarkBlue);
                    await Task.Delay(200);
                }
                if (temp_rnd == 4)
                {
                    correct_colors = correct_colors + "yellow ";
                    yellow.Fill    = new SolidColorBrush(Windows.UI.Colors.Yellow);
                    await Task.Delay(1000);

                    yellow.Fill = new SolidColorBrush(Windows.UI.Colors.DarkOrange);
                    await Task.Delay(200);
                }
            }
            string new_correct_colors = correct_colors.TrimEnd(" ");

            //indicates that speech recognition is on
            elli.Fill = new SolidColorBrush(Windows.UI.Colors.Red);
            // Start recognition.
            Windows.Media.SpeechRecognition.SpeechRecognitionResult speechRecognitionResult = await speechRecognizer.RecognizeAsync();

            //Recognition with UI
            //Windows.Media.SpeechRecognition.SpeechRecognitionResult speechRecognitionResult = await speechRecognizer.RecognizeWithUIAsync();

            // Do something with the recognition result.

            textBlock1.Text = speechRecognitionResult.Text;
            elli.Fill       = new SolidColorBrush(Windows.UI.Colors.SteelBlue);

            if (new_correct_colors == speechRecognitionResult.Text)
            {
                textBlock2.Text = "Good work " + Thumbsup;
            }
            else
            {
                textBlock2.Text = "Wrong! Try again " + Disappointed;
            }
            await Task.Delay(3000);

            startRec.IsEnabled = true;
            startDetect();
        }
Exemplo n.º 2
0
        // As of this time, UWP only offers microphone input to SpeechRecognizer, not file input
        public static async System.Threading.Tasks.Task <string> MicrophoneToTextAsync()
        {
            Windows.Media.SpeechRecognition.SpeechRecognizer speechRecognizer = new Windows.Media.SpeechRecognition.SpeechRecognizer();
            speechRecognizer.HypothesisGenerated += SpeechRecognizer_HypothesisGenerated;

            // Compile the dictation grammar by default.
            await speechRecognizer.CompileConstraintsAsync();

            // Start recognition.
            Windows.Media.SpeechRecognition.SpeechRecognitionResult speechRecognitionResult = await speechRecognizer.RecognizeAsync();

            Log.WriteLine("Text:" + speechRecognitionResult.Text);
            return(speechRecognitionResult.Text);
        }
Exemplo n.º 3
0
        private async void OnTimer(object state)
        {
            var startTime   = (DateTime)state;
            var runningTime = Math.Round((DateTime.Now - startTime).TotalSeconds, 0);

            using (Windows.Media.SpeechRecognition.SpeechRecognizer recognizer =
                       new Windows.Media.SpeechRecognition.SpeechRecognizer())
            {
                //recognizer.Constraints.Add(new Windows.Media.SpeechRecognition.SpeechRecognitionTopicConstraint
                //    (Windows.Media.SpeechRecognition.SpeechRecognitionScenario.FormFilling, "Phone"));
                await recognizer.CompileConstraintsAsync();

                recognizer.Timeouts.InitialSilenceTimeout = TimeSpan.FromSeconds(5);
                recognizer.Timeouts.EndSilenceTimeout     = TimeSpan.FromSeconds(20);

                Windows.Media.SpeechRecognition.SpeechRecognitionResult aresult = await recognizer.RecognizeAsync();

                if (aresult.Status == Windows.Media.SpeechRecognition.SpeechRecognitionResultStatus.Success)
                {
                    await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
                    {
                        ExtendedExecutionSessionStatus.Text += aresult.Text + Environment.NewLine;
                    });
                }
            }
            await Dispatcher.RunAsync(CoreDispatcherPriority.Normal, () =>
            {
                ExtendedExecutionSessionStatus.Text += $"Extended execution has been active for {runningTime} seconds" + Environment.NewLine;
            });
        }
Exemplo n.º 4
0
        private async void Button_Click_1(object sender, RoutedEventArgs e)
        {
            await SayWithTheVoice(
                $"Hello {txtName.Text}, I am Sam, The Tip of the Sword, and the better looking AI.  You're looking fine today.  How can I help?",
                "Mark");


            // Create an instance of SpeechRecognizer.
            var speechRecognizer = new Windows.Media.SpeechRecognition.SpeechRecognizer();

            // Compile the dictation grammar by default.
            await speechRecognizer.CompileConstraintsAsync();

            // Start recognition.
            Windows.Media.SpeechRecognition.SpeechRecognitionResult speechRecognitionResult = await speechRecognizer.RecognizeAsync();

            // Do something with the recognition result.
            //var messageDialog = new Windows.UI.Popups.MessageDialog(speechRecognitionResult.Text, "Text spoken");
            //await messageDialog.ShowAsync();

            if (speechRecognitionResult.Text.Contains("coffee"))
            {
                await SayWithTheVoice("I'm sorry, I don't make coffee", "Mark");
            }
            else if (speechRecognitionResult.Text.Contains("chocolate"))
            {
                await SayWithTheVoice("Coming right up!", "Mark");
            }
            else
            {
                await SayWithTheVoice("I'm confused", "Mark");
            }
        }
Exemplo n.º 5
0
        public async void SpeechAsync()
        {
            var constraint = await speechRecognizer.CompileConstraintsAsync();

            Windows.Media.SpeechRecognition.SpeechRecognitionResult speechRecognitionResult = await speechRecognizer.RecognizeAsync();

            switch (speechRecognitionResult.Text.ToLower())
            {
            case "move forward":
                Forward_Click(Forward, null);
                break;

            case "moves forward":
                Forward_Click(Forward, null);
                break;

            case "movies forward":
                Forward_Click(Forward, null);
                break;

            case "moves straight":
                Forward_Click(Forward, null);
                break;

            case "movies straight":
                Forward_Click(Forward, null);
                break;

            case "move backward":
                Backward_Click(Backward, null);
                break;

            case "movies backward":
                Backward_Click(Backward, null);
                break;

            case "move backwards":
                Backward_Click(Backward, null);
                break;

            case "moves backwards":
                Backward_Click(Backward, null);
                break;

            case "move back":
                Backward_Click(Backward, null);
                break;

            case "reverse":
                Backward_Click(Backward, null);
                break;

            case "move left":
                Left_Click(Left, null);
                break;

            case "movies left":
                Left_Click(Left, null);
                break;

            case "move right":
                Right_Click(Right, null);
                break;

            case "moves right":
                Right_Click(Right, null);
                break;

            case "movies right":
                Right_Click(Right, null);
                break;

            case "light on":
                On_Click(On, null);
                break;

            case "lights on":
                On_Click(On, null);
                break;

            case "stop":
                Stop_Click(Stop, null);
                break;

            default:
                break;
            }
        }
Exemplo n.º 6
0
        private async Task SpeakToMachine(object sender, RoutedEventArgs e)
        {
            var speechRecognizer = new Windows.Media.SpeechRecognition.SpeechRecognizer();
            // Compile the dictation grammar by default.
            await speechRecognizer.CompileConstraintsAsync();

            // Start recognition.
            Windows.Media.SpeechRecognition.SpeechRecognitionResult speechRecognitionResult = await speechRecognizer.RecognizeAsync();

            objTextBox.Text = speechRecognitionResult.Text;
            OnClick(sender, e);
        }
Exemplo n.º 7
0
        /*private async void OnTextChanging(object sender, TextBoxTextChangingEventArgs e)
         * {
         *  var synth = new SpeechSynthesizer();
         *  var textboxObj = (TextBox)sender;
         *  Windows.Media.SpeechSynthesis.SpeechSynthesisStream stream = await synth.SynthesizeTextToStreamAsync(textboxObj.Text);
         *  mediaElement.SetSource(stream, stream.ContentType);
         *  mediaElement.Play();
         *
         *
         *
         * }*/

        private async Task SpeakToComputer(object sender, RoutedEventArgs e)
        {
            Debug.WriteLine("HEELEMOQHNOQOQWGWQGI\n");
            var speechRecognizer = new Windows.Media.SpeechRecognition.SpeechRecognizer();
            // Compile the dictation grammar by default.
            await speechRecognizer.CompileConstraintsAsync();

            // Start recognition.
            Windows.Media.SpeechRecognition.SpeechRecognitionResult speechRecognitionResult = await speechRecognizer.RecognizeAsync();

            objTextBox.Text = speechRecognitionResult.Text;
        }