Example #1
0
        public async Task <string> RecognizeSpeechAsync(string audioUrl)
        {
            var audioName = await CloudConvert.ConvertAudioToWavAsync(audioUrl);

            var config        = SpeechConfig.FromSubscription(Settings.SubscriptionKey, Settings.SubscriptionRegion);
            var textConverted = "";

            using (var audioInput = AudioConfig.FromWavFileInput(audioName))
                using (var recognizer = new Microsoft.CognitiveServices.Speech.SpeechRecognizer(config, audioInput))
                {
                    var result = await recognizer.RecognizeOnceAsync();

                    switch (result.Reason)
                    {
                    case ResultReason.NoMatch:
                        textConverted = "Sorry, I couldn't understand what you said.";
                        break;

                    case ResultReason.RecognizedSpeech:
                        textConverted = result.Text;
                        break;

                    default:
                        break;
                    }
                }

            File.Delete(audioName);
            return(textConverted);
        }
Example #2
0
        public async Task RecognizeSpeechAsync()
        {
            var config = SpeechConfig.FromSubscription("541650d764734a37a947aa914ba310cc", "westeurope");

            using (Microsoft.CognitiveServices.Speech.SpeechRecognizer recognizer = new Microsoft.CognitiveServices.Speech.SpeechRecognizer(config))
            {
                var result = await recognizer.RecognizeOnceAsync();


                // Checks result.
                bool   speech1on = false;
                string result2   = result.Text;

                if (result.Reason == ResultReason.RecognizedSpeech)
                {
                    if (result.Text.Contains("Mirror"))
                    {
                        int i = result.Text.IndexOf(" ") + 1;

                        speech1on = true;
                        result2   = result.Text.Substring(i);
                    }
                    else
                    {
                        speech1on = false;
                    }
                    if (speech1on)
                    {
                        var wa = new WAEngine {
                            APIKey = "3YYV6P-R5W8R7TY35"
                        };
                        WALogger.LogLevel        = WALogLevel.None;
                        WALogger.ConsoleLogLevel = WALogLevel.Verbose;
                        var question = result2;
                        var query    = new WAQuery()
                        {
                            Input = question, Format = WAQueryFormat.PlainText
                        };
                        var queryresult = new WAQueryResult();
                        speech.Visible = true;
                        speech.Text    = question;
                        query.PodStates.Add("test");
                        query.AppID = wa.APIKey;
                        string url     = query.FormatQuery();
                        var    result1 = wa.RunQuery(query);
                        foreach (var pod in result1.Pods)
                        {
                            foreach (var subpod in pod.SubPods)
                            {
                                string titlu = pod.Title;

                                if (titlu.Contains("Result"))
                                {
                                    System.Speech.Synthesis.SpeechSynthesizer synth = new System.Speech.Synthesis.SpeechSynthesizer();

                                    // Configure the audio output.
                                    synth.SetOutputToDefaultAudioDevice();

                                    // Speak a string.
                                    synth.Speak(subpod.PlainText);
                                    raspuns.Visible = true;
                                    raspuns.Text    = subpod.PlainText;
                                }
                                else if (titlu.Contains("Basic information"))
                                {
                                    System.Speech.Synthesis.SpeechSynthesizer synth = new System.Speech.Synthesis.SpeechSynthesizer();

                                    // Configure the audio output.
                                    synth.SetOutputToDefaultAudioDevice();

                                    synth.Speak(subpod.PlainText);
                                    raspuns.Visible = true;
                                    raspuns.Text    = subpod.PlainText;
                                }


                                else if (titlu.Contains("Estimates for"))
                                {
                                    System.Speech.Synthesis.SpeechSynthesizer synth = new System.Speech.Synthesis.SpeechSynthesizer();

                                    // Configure the audio output.
                                    synth.SetOutputToDefaultAudioDevice();

                                    synth.Speak(subpod.PlainText);
                                    raspuns.Visible = true;
                                    raspuns.Text    = subpod.PlainText;
                                }

                                else if (titlu.Contains("Biological"))
                                {
                                    System.Speech.Synthesis.SpeechSynthesizer synth = new System.Speech.Synthesis.SpeechSynthesizer();

                                    // Configure the audio output.
                                    synth.SetOutputToDefaultAudioDevice();

                                    synth.Speak(subpod.PlainText);
                                    raspuns.Visible = true;
                                    raspuns.Text    = subpod.PlainText;
                                }

                                else if (titlu.Contains("Definitions"))
                                {
                                    System.Speech.Synthesis.SpeechSynthesizer synth = new System.Speech.Synthesis.SpeechSynthesizer();

                                    // Configure the audio output.
                                    synth.SetOutputToDefaultAudioDevice();

                                    synth.Speak(subpod.PlainText);
                                    raspuns.Visible = true;
                                    raspuns.Text    = subpod.PlainText;
                                }
                            }
                        }
                    }
                }
            }
            RecognizeSpeechAsync();
        }