예제 #1
0
        public void OnResults(Bundle results)
        {
            var textView = FindViewById <TextView>(Resource.Id.SpeechTextView);
            var btnSayIt = FindViewById <Button>(Resource.Id.btnSpeak);
            var lang     = Java.Util.Locale.Default;

            var matches = results.GetStringArrayList(SpeechRecognizer.ResultsRecognition);

            if (matches.Count != 0)
            {
                textView.Text = matches[0];
                var intentResult = nlp.GetMatchingIntent(textView.Text);
                if (intentResult != null)
                {
                    textView.Text += "\r\n" + "Awesome, I will get it done.";
                    textView.Text += "\r\n" + "Action: " + intentResult.Action;
                    speakText      = "Action Name is " + intentResult.Action;
                    if (intentResult.Parameters != null)
                    {
                        var speakTextBuilder = new System.Text.StringBuilder();
                        speakTextBuilder.Append(speakText);
                        var textViewBuilder = new System.Text.StringBuilder();
                        textViewBuilder.Append(textView.Text);
                        foreach (var paramter in intentResult.Parameters)
                        {
                            textViewBuilder.Append("\r\n" + "Parameter Name: " + paramter.Key);
                            textViewBuilder.Append("\r\n" + "Parameter Values: " + string.Join(", ", paramter.Value));
                            speakTextBuilder.Append(" with parameter as " + paramter.Key);
                            foreach (var item in paramter.Value)
                            {
                                speakTextBuilder.Append(" " + item);
                            }
                        }
                        textView.Text = textViewBuilder.ToString();
                        speakText     = speakTextBuilder.ToString();
                    }
                    else
                    {
                        textView.Text += "\r\n" + "No specific parameters mentioned.";
                    }

                    btnSayIt.CallOnClick();
                }
                else
                {
                    textView.Text = "Sorry, I do not understand that... Can You Repeat";
                    speakText     = "Sorry, I do not understand that";
                    btnSayIt.CallOnClick();
                }
            }
            else
            {
                textView.Text = "No speech was recognised";
                speakText     = "No speech was recognised";
                btnSayIt.CallOnClick();
            }
        }
        public static void Main(string [] args)
        {
            var nlp = new NaturalLanguageProcessor.NaturalLanguageProcessor();
            var intentConfigurations  = JsonConvert.DeserializeObject <List <IntentConfiguration> > (File.ReadAllText("RPRSpeechIntents.json")).ToList();
            var contextConfigurations = JsonConvert.DeserializeObject <List <ContextConfiguration> > (File.ReadAllText("RPRScreenContexts.json")).ToList();

            nlp.SetConfiguration(intentConfigurations, contextConfigurations);

            while (true)
            {
                Console.WriteLine("Which screen are you in?");
                string screen = Console.ReadLine();

                var suggestions = nlp.GetSuggestions(screen);

                if (suggestions != null)
                {
                    Console.WriteLine("Some things you can ask me:");
                    suggestions.ForEach(s => {
                        Console.WriteLine(s);
                    });
                }
                else
                {
                    Console.WriteLine("Sorry, I do not understand the context.");
                    break;
                }


                Console.WriteLine("Okay, go ahead, I am listening");
                string userSearch   = Console.ReadLine();
                var    intentResult = nlp.GetMatchingIntent(userSearch);

                if (intentResult != null)
                {
                    Console.WriteLine("Awesome, I will get it done.");
                    Console.WriteLine("Action: " + intentResult.Action);
                    if (intentResult.Parameters != null)
                    {
                        foreach (var paramter in intentResult.Parameters)
                        {
                            Console.WriteLine("Parameter Name: " + paramter.Key);
                            Console.WriteLine("Parameter Values: " + string.Join(", ", paramter.Value));
                        }
                    }
                    else
                    {
                        Console.WriteLine("No specific parameters mentioned.");
                    }
                }
                else
                {
                    Console.Write("Sorry, I do not understand that.");
                }
                Console.ReadLine();
            }
        }
        void StartRecording()
        {
            // Cancel the previous task if it's running.
            recognitionTask?.Cancel();
            recognitionTask = null;


            var     audioSession = AVAudioSession.SharedInstance();
            NSError err;

            err = audioSession.SetCategory(AVAudioSessionCategory.PlayAndRecord, AVAudioSessionCategoryOptions.DefaultToSpeaker);
            audioSession.SetMode(AVAudioSession.ModeMeasurement, out err);
            err = audioSession.SetActive(true, AVAudioSessionSetActiveOptions.NotifyOthersOnDeactivation);

            // Configure request so that results are returned before audio recording is finished
            recognitionRequest = new SFSpeechAudioBufferRecognitionRequest {
                ShouldReportPartialResults = true,
            };

            var inputNode = audioEngine.InputNode;

            if (inputNode == null)
            {
                throw new InvalidProgramException("Audio engine has no input node");
            }


            // A recognition task represents a speech recognition session.
            // We keep a reference to the task so that it can be cancelled.
            recognitionTask = speechRecognizer.GetRecognitionTask(recognitionRequest, (result, error) => {
                var isFinal = false;
                if (result != null)
                {
                    speechIdleTimer.Stop();
                    speechIdleTimer.Start();

                    textView.Text = result.BestTranscription.FormattedString;

                    isFinal = result.Final;
                }

                if (error != null || isFinal)
                {
                    if (result != null)
                    {
                        var intent = nlp.GetMatchingIntent(result.BestTranscription.FormattedString);

                        string resultText;
                        if (intent != null)
                        {
                            textView.Text += "\nAction is " + intent.Action + ".";
                            resultText     = "Action is " + intent.Action + ". ";
                            if (intent.Parameters != null)
                            {
                                intent.Parameters.ForEach(p => {
                                    resultText    += "Parameter " + p.Key + " with values" + string.Join(",", p.Value) + ". ";
                                    textView.Text += "\nParameter " + p.Key + " with values " + string.Join(",", p.Value) + ". ";
                                });
                            }
                        }
                        else
                        {
                            resultText = "Sorry, I did not get that.";
                        }

                        var su = new AVSpeechUtterance(resultText)
                        {
                            Rate            = AVSpeechUtterance.MaximumSpeechRate / 2,
                            Voice           = AVSpeechSynthesisVoice.FromLanguage("en-US"),
                            PitchMultiplier = 1.0f,
                            Volume          = 1
                        };

                        ss.SpeakUtterance(su);
                    }

                    audioEngine.Stop();
                    inputNode.RemoveTapOnBus(0);
                    recognitionRequest   = null;
                    recognitionTask      = null;
                    recordButton.Enabled = true;
                    //recordButton.SetTitle ("Start Recording", UIControlState.Normal);
                    recordButton.Hidden = false;
                    recordStatus.Hidden = true;
                    speechIdleTimer.Stop();
                }
            });

            var recordingFormat = inputNode.GetBusOutputFormat(0);

            inputNode.InstallTapOnBus(0, 1024, recordingFormat, (buffer, when) => {
                recognitionRequest?.Append(buffer);
            });

            audioEngine.Prepare();
            audioEngine.StartAndReturnError(out err);
        }