public MainPage() { this.InitializeComponent(); var windowsSpeechRecognizer = new WindowsMediaSpeechRecognizer(); // Create the client. By default, it will poll the REST endpoint provided by the direct line, but optionally, we can give it a websocket implementation to use _botClient = new Microsoft.Bot.Client.BotClient(BotConnection.DirectLineSecret, BotConnection.ApplicationName, new Microsoft.Bot.Client.DirectLine.WebSocketConnection()) { // Use the speech synthesizer implementation in the WinRT Windows.Media.SpeechSynthesis namespace SpeechSynthesizer = new WindowsMediaSpeechSynthesizer(), // Use the Cognitive Services Speech-To-Text API, with speech priming support, as the speech recognizer #error Please provide a Bing Speech API key, or replace this line with "SpeechRecognizer = windowsSpeechRecognizer," SpeechRecognizer = new CognitiveServicesSpeechRecognizer(null), // Give us the ability to trigger speech recognition on keywords // The WindowsMediaSpeechRecognizer can also be used as the primary SpeechRecognizer, instead of CognitiveServicesSpeechRecognizer (above) // for a free solution. TriggerRecognizer = windowsSpeechRecognizer }; // Attach to the callbacks the client provides for observing the state of the bot // This will be called every time the bot sends down an activity _botClient.ConversationUpdated += OnConversationUpdated; // Speech-related events _botClient.SpeechRecognitionStarted += OnSpeechRecognitionStarted; _botClient.IntermediateSpeechRecognitionResultReceived += OnIntermediateSpeechRecognitionResultReceived; _botClient.SpeechRecognitionEnded += OnSpeechRecognitionEnded; _botClient.FinalSpeechRecognitionResultReceived += OnFinalSpeechRecognitionResultReceived; _botClient.SpeechSynthesisEnded += OnSpeechSynthesisEnded; // Set triggers, so that, when the user says "listen" or "what is" the bot client will start speech recognition _botClient.SetStartSpeechRecognitionTriggers(new string[] { "listen", "trivia bot" }); _countdownTimer.PropertyChanged += UpdateCountdown; // Kick off the conversation _startConversationTask = _botClient.StartConversation(); }
/// <summary> /// Invoked when the application is launched normally by the end user. Other entry points /// will be used such as when the application is launched to open a specific file. /// </summary> /// <param name="e">Details about the launch request and process.</param> protected async override void OnLaunched(LaunchActivatedEventArgs e) { Frame rootFrame = Window.Current.Content as Frame; // Do not repeat app initialization when the Window already has content, // just ensure that the window is active if (rootFrame == null) { // Create a Frame to act as the navigation context and navigate to the first page rootFrame = new Frame(); rootFrame.NavigationFailed += OnNavigationFailed; if (e.PreviousExecutionState == ApplicationExecutionState.Terminated) { //TODO: Load state from previously suspended application } // Place the frame in the current Window Window.Current.Content = rootFrame; } if (e.PrelaunchActivated == false) { if (rootFrame.Content == null) { // When the navigation stack isn't restored navigate to the first page, // configuring the new page by passing required information as a navigation // parameter rootFrame.Navigate(typeof(MainPage), e.Arguments); } // Ensure the current window is active Window.Current.Activate(); if (VoiceCommandTrigger.SpeechRecognizer is null) { VoiceCommandTrigger.SpeechRecognizer = await WindowsMediaSpeechRecognizer.CreateAsync(Window.Current); } } }