public void DefaultSpeechRecognized(object sender, SpeechRecognizedEventArgs e) { recognizedCommand = AssistantHelper.GetRecognizedCommand(commands.Command, e.Result.Text); if (recognizedCommand.CommandText == Properties.Resources.AddNewCommand) { if (!AssistantHelper.IsWindowOpen <AddNewCommand>()) { var addNewCommand = new AddNewCommand(_assistantService); addNewCommand.Show(); Sara.SpeakAsync(recognizedCommand.Answer); } else { Sara.SpeakAsync(Properties.Resources.FormIsAlreadyOpen); } } else if (recognizedCommand.CommandText == Properties.Resources.ShowCommands) { if (!AssistantHelper.IsWindowOpen <CommandManagement>()) { var commandManagement = new CommandManagement(); commandManagement.Show(); Sara.SpeakAsync(recognizedCommand.Answer); } else { Sara.SpeakAsync(Properties.Resources.FormIsAlreadyOpen); } } else if (recognizedCommand.CommandText == Properties.Resources.TimeForBreak) { Sara.SpeakAsync(recognizedCommand.Answer); CancelRecognize(); contextMenu.MenuItems[0].Enabled = true; contextMenu.MenuItems[1].Enabled = false; } else if (recognizedCommand.NeedsConfirmation) { Sara.SpeakAsync(Properties.Resources.ConfirmOperation); StopRecognize(); recognizer = new SpeechRecognitionEngine(); _speechRecognizerService.CreateNewSynthesizer(commands.Command.Where(x => x.IsConfimation).Select(x => x.CommandText).ToArray(), recognizer, Sara, listener, ConfirmationSpeechRecognized, RecognizerSpeechRecognized, ListenerSpeechRecognize); } else { _speechRecognizerService.ExecuteRecognizedAction(Sara, recognizedCommand); } }
public GothicPersonalAssistant(ISoundService soundService, ISpeechRecognizerService speechRecognizerService) { _soundService = soundService; InitializeComponent(); SetAssistantIcon(SelectedAssistantId); commands = JsonConvert.DeserializeObject <CommandConfig>(File.ReadAllText(@"GothicAssistant/Commands.json")); speechRecognizerService.CreateNewSynthesizer(commands.Command.Select(x => x.CommandText).ToArray(), recognizer, Bezi, listener, DefaultSpeechRecognized, RecognizerSpeechRecognized, ListenerSpeechRecognize); dispatcherTimer.Tick += dispatcherTimer_Tick; dispatcherTimer.Interval = new TimeSpan(0, 0, 1); dispatcherTimer.Start(); }
public ClassicPersonalAssistant(ISpeechRecognizerService speechRecognizerService, IAssistantService assistantService) { _speechRecognizerService = speechRecognizerService; _assistantService = assistantService; InitializeComponent(); UpdateCommandsList(); _speechRecognizerService.CreateNewSynthesizer(commands.Command.Where(x => !x.IsConfimation).Select(x => x.CommandText).ToArray(), recognizer, Sara, listener, DefaultSpeechRecognized, RecognizerSpeechRecognized, ListenerSpeechRecognize); dispatcherTimer.Tick += dispatcherTimer_Tick; dispatcherTimer.Interval = new TimeSpan(0, 0, 1); dispatcherTimer.Start(); Sara.SpeakAsync(Properties.Resources.SaraIntroduce); PrepareSystemTray(); contextMenu.MenuItems[0].Enabled = false; }