/// <summary> /// When activating the scenario, ensure we have permission from the user to access their microphone, and /// provide an appropriate path for the user to enable access to the microphone if they haven't /// given explicit permission for it. /// </summary> /// <param name="e">The navigation event details</param> protected async override void OnNavigatedTo(NavigationEventArgs e) { // Save the UI thread dispatcher to allow speech status messages to be shown on the UI. dispatcher = CoreWindow.GetForCurrentThread().Dispatcher; bool permissionGained = await AudioCapturePermissions.RequestMicrophonePermission(); if (permissionGained) { // Enable the recognition buttons. btnRecognizeWithUI.IsEnabled = true; btnRecognizeWithoutUI.IsEnabled = true; Language speechLanguage = SpeechRecognizer.SystemSpeechLanguage; string langTag = speechLanguage.LanguageTag; speechContext = ResourceContext.GetForCurrentView(); speechContext.Languages = new string[] { langTag }; speechResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("LocalizationSpeechResources"); PopulateLanguageDropdown(); await InitializeRecognizer(SpeechRecognizer.SystemSpeechLanguage); } else { resultTextBlock.Visibility = Visibility.Visible; resultTextBlock.Text = "Permission to access capture resources was not given by the user; please set the application setting in Settings->Privacy->Microphone."; btnRecognizeWithUI.IsEnabled = false; btnRecognizeWithoutUI.IsEnabled = false; cbLanguageSelection.IsEnabled = false; } }
private async void MainPage_Loaded(object sender, RoutedEventArgs e) { SetupPersonGroup(); dispatcher = CoreWindow.GetForCurrentThread().Dispatcher; bool permissionGained = await RequestMicrophonePermission(); if (!permissionGained) return; // No permission granted Language speechLanguage = SpeechRecognizer.SystemSpeechLanguage; string langTag = speechLanguage.LanguageTag; speechContext = ResourceContext.GetForCurrentView(); speechContext.Languages = new string[] { langTag }; speechResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("LocalizationSpeechResources"); await InitializeRecognizer(SpeechRecognizer.SystemSpeechLanguage); try { await speechRecognizer.ContinuousRecognitionSession.StartAsync(); } catch (Exception ex) { var messageDialog = new Windows.UI.Popups.MessageDialog(ex.Message, "Exception"); await messageDialog.ShowAsync(); } }
public void UpdateCulture(string language) { this.resoureContext = ResourceContext.GetForCurrentView(); this.resoureContext.Languages = new List<string> { language }; this.language = language; this.OnPropertyChanged("Item[]"); }
/// <summary> /// Upon entering the scenario, ensure that we have permissions to use the Microphone. This may entail popping up /// a dialog to the user on Desktop systems. Only enable functionality once we've gained that permission in order to /// prevent errors from occurring when using the SpeechRecognizer. If speech is not a primary input mechanism, developers /// should consider disabling appropriate parts of the UI if the user does not have a recording device, or does not allow /// audio input. /// </summary> /// <param name="e">Unused navigation parameters</param> protected async override void OnNavigatedTo(NavigationEventArgs e) { rootPage = MainPage.Current; // Keep track of the UI thread dispatcher, as speech events will come in on a separate thread. dispatcher = CoreWindow.GetForCurrentThread().Dispatcher; // Prompt the user for permission to access the microphone. This request will only happen // once, it will not re-prompt if the user rejects the permission. bool permissionGained = await AudioCapturePermissions.RequestMicrophonePermission(); if (permissionGained) { btnContinuousRecognize.IsEnabled = true; // Initialize resource map to retrieve localized speech strings. Language speechLanguage = SpeechRecognizer.SystemSpeechLanguage; string langTag = speechLanguage.LanguageTag; speechContext = ResourceContext.GetForCurrentView(); speechContext.Languages = new string[] { langTag }; speechResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("LocalizationSpeechResources"); PopulateLanguageDropdown(); await InitializeRecognizer(SpeechRecognizer.SystemSpeechLanguage); } else { this.resultTextBlock.Visibility = Visibility.Visible; this.resultTextBlock.Text = "Permission to access capture resources was not given by the user, reset the application setting in Settings->Privacy->Microphone."; btnContinuousRecognize.IsEnabled = false; cbLanguageSelection.IsEnabled = false; } }
public async void Run(IBackgroundTaskInstance taskInstance) { serviceDeferral = taskInstance.GetDeferral(); // Register to receive an event if Cortana dismisses the background task. This will // occur if the task takes too long to respond, or if Cortana's UI is dismissed. // Any pending operations should be cancelled or waited on to clean up where possible. taskInstance.Canceled += OnTaskCanceled; var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; // Load localized resources for strings sent to Cortana to be displayed to the user. cortanaResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources"); // Select the system language, which is what Cortana should be running as. cortanaContext = ResourceContext.GetForViewIndependentUse(); // Get the currently used system date format dateFormatInfo = CultureInfo.CurrentCulture.DateTimeFormat; // This should match the uap:AppService and VoiceCommandService references from the // package manifest and VCD files, respectively. Make sure we've been launched by // a Cortana Voice Command. if (triggerDetails != null && triggerDetails.Name == "AwfulVoiceCommandService") { try { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails( triggerDetails); voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted; VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); // Depending on the operation (defined in AdventureWorks:AdventureWorksCommands.xml) // perform the appropriate command. switch (voiceCommand.CommandName) { case "didMyThreadsUpdate": await CheckForBookmarksForUpdates(); break; case "didMyPmUpdate": await CheckPmsForUpdates(); break; default: // As with app activation VCDs, we need to handle the possibility that // an app update may remove a voice command that is still registered. // This can happen if the user hasn't run an app since an update. LaunchAppInForeground(); break; } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Handling Voice Command failed " + ex.ToString()); } } }
/// <summary> /// MyCustomControl /// </summary> public MyCustomControl() { this.DefaultStyleKey = typeof(MyCustomControl); ResourceContext ctx = new Windows.ApplicationModel.Resources.Core.ResourceContext(); ResourceMap libmap = ResourceManager.Current.MainResourceMap; MyString = libmap.GetSubtree("ManagedPackage").GetSubtree("Resources").GetValue("MyString", ctx).ValueAsString; }
public async void Run(IBackgroundTaskInstance taskInstance) { serviceDeferral = taskInstance.GetDeferral(); taskInstance.Canceled += OnTaskCanceled; var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; cortanaResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources"); cortanaContext = ResourceContext.GetForViewIndependentUse(); dateFormatInfo = CultureInfo.CurrentCulture.DateTimeFormat; if (triggerDetails != null && triggerDetails.Name == "DomojeeVoiceCommandService") { try { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails( triggerDetails); voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted; VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); var userMessage = new VoiceCommandUserMessage(); string message = ""; // Ajout d'une requet jeedom pour retrouver la commande switch (voiceCommand.CommandName) { case "JeedomInteractList": string CortanaVoiceCommande= voiceCommand.Properties["InteractList"][0]; await Jeedom.RequestViewModel.Instance.interactTryToReply(CortanaVoiceCommande); message = Jeedom.RequestViewModel.Instance.InteractReply; break; default: LaunchAppInForeground(); break; } userMessage.DisplayMessage = message; userMessage.SpokenMessage = message; var response = VoiceCommandResponse.CreateResponse(userMessage); response.AppLaunchArgument = message; await voiceServiceConnection.ReportSuccessAsync(response); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Handling Voice Command failed " + ex.ToString()); } } }
/// <summary> /// reads the FlowDirection defined in the app resources /// </summary> public static FlowDirection GetFlowDirection(ResourceContext context = null) { const string KEY_FLOW_DIRECTION = "FlowDirection"; string flowDirection; if (context != null) { flowDirection = KEY_FLOW_DIRECTION.t(context); } else { flowDirection = KEY_FLOW_DIRECTION.t(); } return "RightToLeft".Equals(flowDirection?.Trim()) ? FlowDirection.RightToLeft : FlowDirection.LeftToRight; }
public ResourceManagerImpl(string subtree) { /* Discover resources in the image foreach (var m in ResourceManager.Current.MainResourceMap) { System.Diagnostics.Debug.WriteLine(m); } */ stringResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree(subtree); resourceContext = ResourceContext.GetForCurrentView(); }
public SynthesizeTextScenario() { InitializeComponent(); synthesizer = new SpeechSynthesizer(); speechContext = ResourceContext.GetForCurrentView(); speechContext.Languages = new string[] { SpeechSynthesizer.DefaultVoice.Language }; speechResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("LocalizationTTSResources"); InitializeListboxVoiceChooser(); }
public static string t(this string key, ResourceContext context, string file = null) { try { if (context == null) { return key.t(file); } if (file != null) { return ResourceManager.Current.MainResourceMap.GetSubtree(file).GetValue(key, context).ValueAsString; } return ResourceManager.Current.MainResourceMap.GetSubtree("Resources").GetValue(key, context).ValueAsString; } catch { return key; } }
public static string GetLocalizedMessage(this Status status, ResourceContext context = null) { switch (status) { case Status.YES: return "STATUS_YES".t(context, R.File.STATUS); case Status.NO: return "STATUS_NO".t(context, R.File.STATUS); case Status.LIMITED: return "STATUS_LIMITED".t(context, R.File.STATUS); case Status.UNKNOWN: return "STATUS_UNKNOWN".t(context, R.File.STATUS); default: throw new Exception("unknown status type"); } }
private void SetLabelsText() { ResourceContext ctx = new ResourceContext(); ctx.Languages = new string[] { AppManager.AppData.Language }; ResourceMap rmap = ResourceManager.Current.MainResourceMap.GetSubtree("EcosCab.Core.Resources.Labels/"); tbTitle.Text = rmap.GetValue("Settings", ctx).ValueAsString.ToCapital(); cbLanguage.Header = rmap.GetValue("Language", ctx).ValueAsString.ToCapital(); cbRecentItemsCount.Header = rmap.GetValue("RecentItemsCount", ctx).ValueAsString.ToCapital(); swRunAfterReversing.Header = rmap.GetValue("RunAfterReversing", ctx).ValueAsString.ToCapital(); swRunAfterReversing.OnContent = rmap.GetValue("Yes", ctx).ValueAsString.ToCapital(); swRunAfterReversing.OffContent = rmap.GetValue("No", ctx).ValueAsString.ToCapital(); }
public async void Run(IBackgroundTaskInstance taskInstance) { serviceDeferral = taskInstance.GetDeferral(); taskInstance.Canceled += OnTaskCancelled; var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; cortanaResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources"); cortanaContext = ResourceContext.GetForViewIndependentUse(); if (triggerDetails != null && triggerDetails.Name == "VoiceCommandService") { try { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails); voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted; VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); var client = await this.GetClient(); // Depending on the operation (defined in the VoiceCommands.xml file) // perform the appropriate command. switch (voiceCommand.CommandName) { case "turnOnItem": var onTarget = voiceCommand.Properties["target"][0]; await SendCompletionMessageForOnOff(client, onTarget, true); break; case "turnOffItem": var offTarget = voiceCommand.Properties["target"][0]; await SendCompletionMessageForOnOff(client, offTarget, false); break; default: // As with app activation VCDs, we need to handle the possibility that // an app update may remove a voice command that is still registered. // This can happen if the user hasn't run an app since an update. //LaunchAppInForeground(); break; } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Handling Voice Command failed " + ex.ToString()); } } }
public async void Run(IBackgroundTaskInstance taskInstance) { serviceDeferral = taskInstance.GetDeferral(); taskInstance.Canceled += OnTaskCanceled; var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; // Load localized resources for strings sent to Cortana to be displayed to the user. cortanaResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources"); // Select the system language, which is what Cortana should be running as. cortanaContext = ResourceContext.GetForViewIndependentUse(); if (triggerDetails != null && triggerDetails.Name == "HolVoiceCommandService") { try { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails( triggerDetails); voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted; VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); switch (voiceCommand.CommandName) { case "SayHello": var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "Hello!"; userMessage.SpokenMessage = "Your app says hi. It is having a great time."; var response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); break; default: break; } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Handling Voice Command failed " + ex.ToString()); } } }
/* string dfa = ""; class msgQueue { public string msg; }; private async void dowork(object obj) { while (true) { string temp = conn.getMessage(); if (temp != null) { textToSynthesize.Text = temp; } } } */ public SynthesizeTextScenario() { InitializeComponent(); synthesizer = new SpeechSynthesizer(); speechContext = ResourceContext.GetForCurrentView(); speechContext.Languages = new string[] { SpeechSynthesizer.DefaultVoice.Language }; speechResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("LocalizationTTSResources"); conn = new azureConnector(); //conn.sendSBMessageToTopic("Hello hello", "ordermeal"); //conn.sendSBMessageToTopic("Oh Burger Burger", "ordermeal"); conn.runSubscriptionReceiver("ordermeal", "orderMealSubscription"); //workerThread msgChecker = new workerThread(this.dowork); //msgChecker.Start(new msgQueue() {msg = ""}); InitializeListboxVoiceChooser(); }
public static void Run(Windows.UI.Xaml.Controls.TextBlock outputBlock) { outputBlock.Text += String.Format("\nThe current culture is {0}.\n", CultureInfo.CurrentCulture.Name); ResourceLoader rl = new ResourceLoader(); // Display greeting using the resources of the current culture. string greeting = rl.GetString("Greeting"); outputBlock.Text += String.Format("{0}\n", String.IsNullOrEmpty(greeting) ? "Здравствуйте" : greeting); // Display greeting using fr-FR resources. ResourceContext ctx = new Windows.ApplicationModel.Resources.Core.ResourceContext(); ctx.Languages = new string[] { "fr-FR" }; ResourceMap rmap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources"); string newGreeting = rmap.GetValue("Greeting", ctx).ValueAsString(); outputBlock.Text += String.Format("\n\nCulture of Current Context: {0}\n", ctx.Languages[0]); outputBlock.Text += String.Format("{0}\n", String.IsNullOrEmpty(newGreeting) ? greeting : newGreeting); }
/// <summary> /// Background task entrypoint. Voice Commands using the <VoiceCommandService Target="..."> /// tag will invoke this when they are recognized by Cortana, passing along details of the /// invocation. /// /// Background tasks must respond to activation by Cortana within 0.5 seconds, and must /// report progress to Cortana every 5 seconds (unless Cortana is waiting for user /// input). There is no execution time limit on the background task managed by Cortana, /// but developers should use plmdebug (https://msdn.microsoft.com/en-us/library/windows/hardware/jj680085%28v=vs.85%29.aspx) /// on the Cortana app package in order to prevent Cortana timing out the task during /// debugging. /// /// Cortana dismisses its UI if it loses focus. This will cause it to terminate the background /// task, even if the background task is being debugged. Use of Remote Debugging is recommended /// in order to debug background task behaviors. In order to debug background tasks, open the /// project properties for the app package (not the background task project), and enable /// Debug -> "Do not launch, but debug my code when it starts". Alternatively, add a long /// initial progress screen, and attach to the background task process while it executes. /// </summary> /// <param name="taskInstance">Connection to the hosting background service process.</param> public async void Run(IBackgroundTaskInstance taskInstance) { serviceDeferral = taskInstance.GetDeferral(); // Register to receive an event if Cortana dismisses the background task. This will // occur if the task takes too long to respond, or if Cortana's UI is dismissed. // Any pending operations should be cancelled or waited on to clean up where possible. taskInstance.Canceled += OnTaskCanceled; var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; // Load localized resources for strings sent to Cortana to be displayed to the user. cortanaResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources"); // Select the system language, which is what Cortana should be running as. cortanaContext = ResourceContext.GetForViewIndependentUse(); // Get the currently used system date format dateFormatInfo = CultureInfo.CurrentCulture.DateTimeFormat; VoiceCommandResponse response = null; // This should match the uap:AppService and RuleVoiceCommandService references from the // package manifest and VCD files, respectively. Make sure we've been launched by // a Cortana Voice Command. if (triggerDetails != null && triggerDetails.Name == this.GetType().Name) { try { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails( triggerDetails); voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted; VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); HttpClient client = new HttpClient(); switch (voiceCommand.CommandName) { case "turnOnLight": string postBody = JsonConvert.SerializeObject(new Settings { IsOn = false }); client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); var webResponse = await client.PostAsync("http://hiremotemeetcortana.azurewebsites.net/api/settings", new StringContent(postBody, Encoding.UTF8, "application/json")); if (webResponse.IsSuccessStatusCode) { var turnOnLightMessage = new VoiceCommandUserMessage { DisplayMessage = "Wakeup Light has been turned on ", SpokenMessage = "Wakeup Light has been turned on " }; response = VoiceCommandResponse.CreateResponse(turnOnLightMessage); await voiceServiceConnection.ReportSuccessAsync(response); } else { var turnOnLightMessage = new VoiceCommandUserMessage { DisplayMessage = "Something went wrong", SpokenMessage = "Something went wrong" }; response = VoiceCommandResponse.CreateResponse(turnOnLightMessage); await voiceServiceConnection.ReportFailureAsync(response); } break; case "turnOffLight": string turnOffLightBody = JsonConvert.SerializeObject(new Settings { IsOn = false }); client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); var saveRurnOffLight = await client.PostAsync("http://hiremotemeetcortana.azurewebsites.net/api/settings", new StringContent(turnOffLightBody, Encoding.UTF8, "application/json")); if (saveRurnOffLight.IsSuccessStatusCode) { var turnOnLightMessage = new VoiceCommandUserMessage { DisplayMessage = "Wakeup Light has been turned off ", SpokenMessage = "Wakeup Light has been turned off " }; response = VoiceCommandResponse.CreateResponse(turnOnLightMessage); await voiceServiceConnection.ReportSuccessAsync(response); } else { var turnOnLightMessage = new VoiceCommandUserMessage { DisplayMessage = "Something went wrong", SpokenMessage = "Something went wrong" }; response = VoiceCommandResponse.CreateResponse(turnOnLightMessage); await voiceServiceConnection.ReportFailureAsync(response); } break; default: // As with app activation VCDs, we need to handle the possibility that // an app update may remove a voice command that is still registered. // This can happen if the user hasn't run an app since an update. LaunchAppInForeground(); break; } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Handling Voice Command failed " + ex.ToString()); } } }
/// <summary> /// Upon entering the scenario, ensure that we have permissions to use the Microphone. This may entail popping up /// a dialog to the user on Desktop systems. Only enable functionality once we've gained that permission in order to /// prevent errors from occurring when using the SpeechRecognizer. If speech is not a primary input mechanism, developers /// should consider disabling appropriate parts of the UI if the user does not have a recording device, or does not allow /// audio input. /// </summary> /// <param name="e">Unused navigation parameters</param> protected async override void OnNavigatedTo(NavigationEventArgs e) { rootPage = MainPage.Current; // Keep track of the UI thread dispatcher, as speech events will come in on a separate thread. dispatcher = CoreWindow.GetForCurrentThread().Dispatcher; // Prompt the user for permission to access the microphone. This request will only happen // once, it will not re-prompt if the user rejects the permission. bool permissionGained = await AudioCapturePermissions.RequestMicrophonePermission(); if (permissionGained) { btnContinuousRecognize.IsEnabled = true; } else { resultTextBlock.Text = "Permission to access capture resources was not given by the user, reset the application setting in Settings->Privacy->Microphone."; } Language speechLanguage = SpeechRecognizer.SystemSpeechLanguage; string langTag = speechLanguage.LanguageTag; speechContext = ResourceContext.GetForCurrentView(); speechContext.Languages = new string[] { langTag }; speechResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("LocalizationSpeechResources"); PopulateLanguageDropdown(); // Initialize the recognizer. Since the recognizer is disposed on scenario exit, we need to make sure we re-initialize it on scenario // entrance, as the xaml page may not get destroyed between openings of the scenario. await InitializeRecognizer(SpeechRecognizer.SystemSpeechLanguage); }
public void SetArgs(WheelmapVoiceCommandService service, VoiceCommand command) { this.service = service; this.voiceCommand = command; context = ResourceContext.GetForViewIndependentUse(); }
private string GetResourceString(string resourceName, string languageName) { if (!WinRTInterop.Callbacks.IsAppxModel()) { // on desktop we usually don't have resource strings. so we just return the key name return resourceName; } if (_resourceMap == null) return null; ResourceContext context; ResourceCandidate candidate; if (languageName == null && _neutralResourcesCultureName == null) { candidate = _resourceMap.GetValue(resourceName,ResourceContext.GetForViewIndependentUse()); } else { context = new ResourceContext(); context.QualifierValues["language"] = (languageName != null ? languageName + ";" : "") + (_neutralResourcesCultureName != null ? _neutralResourcesCultureName : ""); candidate = _resourceMap.GetValue(resourceName, context); } return candidate == null ? null : candidate.ValueAsString; }
private void SetLabelsText() { var ctx = new ResourceContext() { Languages = new string[] { AppManager.AppData.Language } }; ResourceMap rmap = ResourceManager.Current.MainResourceMap.GetSubtree("EcosCab.StringResources.Labels/"); swUseRemoteConnection.OnContent = rmap.GetValue("Yes", ctx).ValueAsString.ToCapital(); swUseRemoteConnection.OffContent = rmap.GetValue("No", ctx).ValueAsString.ToCapital(); }
public ResourceManagerImpl() { stringResourceMap = Windows.ApplicationModel.Resources.Core.ResourceManager.Current.MainResourceMap.GetSubtree("Microsoft.Azure.Devices.Client/Resources"); resourceContext = Windows.ApplicationModel.Resources.Core.ResourceContext.GetForCurrentView(); }
public Scenario7() { this.InitializeComponent(); defaultContextForCurrentView = ResourceContext.GetForCurrentView(); }
/// <summary> /// Background task entrypoint. Voice Commands using the <VoiceCommandService Target="..."> /// tag will invoke this when they are recognized by Cortana, passing along details of the /// invocation. /// /// Background tasks must respond to activation by Cortana within 0.5 seconds, and must /// report progress to Cortana every 5 seconds (unless Cortana is waiting for user /// input). There is no execution time limit on the background task managed by Cortana, /// but developers should use plmdebug (https://msdn.microsoft.com/en-us/library/windows/hardware/jj680085%28v=vs.85%29.aspx) /// on the Cortana app package in order to prevent Cortana timing out the task during /// debugging. /// /// Cortana dismisses its UI if it loses focus. This will cause it to terminate the background /// task, even if the background task is being debugged. Use of Remote Debugging is recommended /// in order to debug background task behaviors. In order to debug background tasks, open the /// project properties for the app package (not the background task project), and enable /// Debug -> "Do not launch, but debug my code when it starts". Alternatively, add a long /// initial progress screen, and attach to the background task process while it executes. /// </summary> /// <param name="taskInstance">Connection to the hosting background service process.</param> public async void Run(IBackgroundTaskInstance taskInstance) { serviceDeferral = taskInstance.GetDeferral(); // Register to receive an event if Cortana dismisses the background task. This will // occur if the task takes too long to respond, or if Cortana's UI is dismissed. // Any pending operations should be cancelled or waited on to clean up where possible. taskInstance.Canceled += OnTaskCanceled; var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; // Load localized resources for strings sent to Cortana to be displayed to the user. cortanaResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources"); // Select the system language, which is what Cortana should be running as. cortanaContext = ResourceContext.GetForViewIndependentUse(); // Get the currently used system date format dateFormatInfo = CultureInfo.CurrentCulture.DateTimeFormat; // This should match the uap:AppService and VoiceCommandService references from the // package manifest and VCD files, respectively. Make sure we've been launched by // a Cortana Voice Command. if (triggerDetails != null && triggerDetails.Name == "BandOnTheRunVoiceCommandService") { try { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails( triggerDetails); voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted; VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); VoiceCommandUserMessage userMessage = new VoiceCommandUserMessage(); // Depending on the operation (defined in AdventureWorks:AdventureWorksCommands.xml) // perform the appropriate command. switch (voiceCommand.CommandName) { case "showbandinformation": //hardcoded - needs to be hooked into real data flow. userMessage.DisplayMessage = "Band 1 \n" + "status: connected\n" + "Motion: Jogging\n" + "Speed: 10kph\n" + "Skin Temp: 37\n" + "UV: medium"; userMessage.SpokenMessage = "Showing band information"; ; var response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); break; default: break; } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Handling Voice Command failed " + ex.ToString()); } finally { if (this.serviceDeferral != null) { this.serviceDeferral.Complete(); } } } }
/// The methods provided in this section are simply used to allow /// NavigationHelper to respond to the page's navigation methods. /// /// Page specific logic should be placed in event handlers for the /// <see cref="Common.NavigationHelper.LoadState"/> /// and <see cref="Common.NavigationHelper.SaveState"/>. /// The navigation parameter is available in the LoadState method /// in addition to page state preserved during an earlier session. protected async override void OnNavigatedTo(NavigationEventArgs e) { navigationHelper.OnNavigatedTo(e); // Prompt the user for permission to access the microphone. This request will only happen // once, it will not re-prompt if the user rejects the permission. bool permissionGained = await AudioCapturePermissions.RequestMicrophonePermission(); speechLanguage = SpeechRecognizer.SystemSpeechLanguage; string langTag = speechLanguage.LanguageTag; // Initialize resource map to retrieve localized speech strings. speechContext = ResourceContext.GetForCurrentView(); IReadOnlyList<Language> supportedLanguages = SpeechRecognizer.SupportedGrammarLanguages; if (supportedLanguages.Count > 1) { if (Windows.Globalization.ApplicationLanguages.PrimaryLanguageOverride == "zh-Hans-CN") { speechContext.Languages = new string[] { "zh-Hans-CN" }; speechLanguage = new Windows.Globalization.Language("zh-Hans-CN"); } else { speechContext.Languages = new string[] { "en-US" }; speechLanguage = new Windows.Globalization.Language("en-US"); } } else { speechContext.Languages = new string[] { langTag }; } speechResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("LocalizationSpeechResources"); //Initia Command await InitializeRecognizer(speechLanguage); //Initia RecognizerNote await InitializeRecognizerNote(speechLanguage); if (speechRecognizer.State == SpeechRecognizerState.Idle && permissionGained) { try { await speechRecognizer.ContinuousRecognitionSession.StartAsync(); } catch (Exception ex) { var messageDialog = new Windows.UI.Popups.MessageDialog(ex.Message, "Exception"); await messageDialog.ShowAsync(); } } }
private static void InitializeStaticGlobalResourceContext(global::Windows.ApplicationModel.Resources.Core.ResourceManager resourceManager) { if (s_globalResourceContext == null) { lock (s_objectForLock) { if (s_globalResourceContext == null) { resourceManager = resourceManager ?? global::Windows.ApplicationModel.Resources.Core.ResourceManager.Current; if (resourceManager != null) { #pragma warning disable 618 ResourceContext globalResourceContext = resourceManager.DefaultContext; #pragma warning restore 618 if (globalResourceContext != null) { List<String> languages = new List<string>(globalResourceContext.Languages); s_globalResourceContextBestFitCultureInfo = GetBestFitCultureFromLanguageList(languages); s_globalResourceContextFallBackList = ReadOnlyListToString(languages); s_globalResourceContext = globalResourceContext; s_globalResourceContext.QualifierValues.MapChanged += new MapChangedEventHandler<string, string>(GlobalResourceContextChanged); } } } } } }
/// <summary> /// Background task entrypoint. Voice Commands using the <VoiceCommandService Target="..."> /// tag will invoke this when they are recognized by Cortana, passing along details of the /// invocation. /// /// Background tasks must respond to activation by Cortana within 0.5 seconds, and must /// report progress to Cortana every 5 seconds (unless Cortana is waiting for user /// input). There is no execution time limit on the background task managed by Cortana, /// but developers should use plmdebug (https://msdn.microsoft.com/en-us/library/windows/hardware/jj680085%28v=vs.85%29.aspx) /// on the Cortana app package in order to prevent Cortana timing out the task during /// debugging. /// /// Cortana dismisses its UI if it loses focus. This will cause it to terminate the background /// task, even if the background task is being debugged. Use of Remote Debugging is recommended /// in order to debug background task behaviors. In order to debug background tasks, open the /// project properties for the app package (not the background task project), and enable /// Debug -> "Do not launch, but debug my code when it starts". Alternatively, add a long /// initial progress screen, and attach to the background task process while it executes. /// </summary> /// <param name="taskInstance">Connection to the hosting background service process.</param> public async void Run(IBackgroundTaskInstance taskInstance) { serviceDeferral = taskInstance.GetDeferral(); // Register to receive an event if Cortana dismisses the background task. This will // occur if the task takes too long to respond, or if Cortana's UI is dismissed. // Any pending operations should be cancelled or waited on to clean up where possible. taskInstance.Canceled += OnTaskCanceled; var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; // Load localized resources for strings sent to Cortana to be displayed to the user. cortanaResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources"); // Select the system language, which is what Cortana should be running as. cortanaContext = ResourceContext.GetForViewIndependentUse(); // Get the currently used system date format dateFormatInfo = CultureInfo.CurrentCulture.DateTimeFormat; // This should match the uap:AppService and VoiceCommandService references from the // package manifest and VCD files, respectively. Make sure we've been launched by // a Cortana Voice Command. string actualName = triggerDetails.Name; string expectedName = typeof(TheVoiceCommandService).ToString(); bool launchedByCortanaVoiceCommand = triggerDetails != null && expectedName.Contains( actualName ); if ( launchedByCortanaVoiceCommand ) { try { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails( triggerDetails ); voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted; VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); switch (voiceCommand.CommandName) { default: LaunchAppInForeground(); break; } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Handling Voice Command failed " + ex.ToString()); } } }
private static ResourceContext GetCurrentCulture() { var rc = new ResourceContext { Languages = new List<string> { Windows.System.UserProfile.GlobalizationPreferences.Languages[0] } }; CurrentLanguage = rc.Languages[0]; return rc; }
private async void PlayTTS(string message) { speechContext = ResourceContext.GetForCurrentView(); speechContext.Languages = new string[] { SpeechSynthesizer.DefaultVoice.Language }; synthesizer = new SpeechSynthesizer(); var voices = SpeechSynthesizer.AllVoices; VoiceInformation currentVoice = synthesizer.Voice; VoiceInformation voice = null; foreach (VoiceInformation item in voices.OrderBy(p => p.Language)) { string tag = item.Language; if(tag.Equals("zh-CN") && item.Gender == VoiceGender.Female) { voice = item; } } if(null != voice) { synthesizer.Voice = voice; SpeechSynthesisStream synthesisStream = await synthesizer.SynthesizeTextToStreamAsync(message); media.AutoPlay = true; media.SetSource(synthesisStream, synthesisStream.ContentType); media.Play(); } }
// Obtain instances of the Resource Map and Resource Context provided by // the Windows Modern Resource Manager (MRM). // Not thread-safe. Only call this once on one thread for each object instance. // For example, System.Runtime.ResourceManager only calls this from its constructors, // guaranteeing that this only gets called once, on one thread, for each new instance // of System.Runtime.ResourceManager. // Throws exceptions // Only returns true if the function succeeded completely. // Outputs exceptionInfo since it may be needed for debugging purposes // if an exception is thrown by one of Initialize's callees. public override bool Initialize(string libpath, string reswFilename, out PRIExceptionInfo exceptionInfo) { Debug.Assert(libpath != null); Debug.Assert(reswFilename != null); exceptionInfo = null; if (InitializeStatics()) { // AllResourceMaps can throw ERROR_MRM_MAP_NOT_FOUND, // although in that case we are not sure for which package it failed to find // resources (if we are looking for resources for a framework package, // it might throw ERROR_MRM_MAP_NOT_FOUND if the app package // resources could not be loaded, even if the framework package // resources are properly configured). So we will not fill in the // exceptionInfo structure at this point since we don't have any // reliable information to include in it. IReadOnlyDictionary<String, ResourceMap> resourceMapDictionary = s_globalResourceManager.AllResourceMaps; if (resourceMapDictionary != null) { string packageSimpleName = FindPackageSimpleNameForFilename(libpath); #if netstandard // If we have found a simple package name for the assembly, lets make sure it is not *.resource.dll that // an application may have packaged in its AppX. This is to enforce AppX apps to use PRI resources. if (packageSimpleName != null) { if (packageSimpleName.EndsWith(".resources.dll", StringComparison.CurrentCultureIgnoreCase)) { // Pretend we didnt get a package name. When an attempt is made to get resource string, GetString implementation // will see that we are going to use modern resource manager but we dont have PRI and will thrown an exception indicating // so. This will force the developer to have a valid PRI based resource. packageSimpleName = null; } } #endif // netstandard if (packageSimpleName != null) { ResourceMap packageResourceMap = null; // Win8 enforces that package simple names are unique (for example, the App Store will not // allow two apps with the same package simple name). That is why the Modern Resource Manager // keys access to resources based on the package simple name. if (resourceMapDictionary.TryGetValue(packageSimpleName, out packageResourceMap)) { if (packageResourceMap != null) { // GetSubtree returns null when it cannot find resource strings // named "reswFilename/*" for the package we are looking up. reswFilename = UriUtility.UriEncode(reswFilename); _resourceMap = packageResourceMap.GetSubtree(reswFilename); if (_resourceMap == null) { exceptionInfo = new PRIExceptionInfo(); exceptionInfo._PackageSimpleName = packageSimpleName; exceptionInfo._ResWFile = reswFilename; } else { _clonedResourceContext = s_globalResourceContext.Clone(); if (_clonedResourceContext != null) { // Will need to be changed the first time it is used. But we can't set it to "" since we will take a lock on it. _clonedResourceContextFallBackList = ReadOnlyListToString(_clonedResourceContext.Languages); if (_clonedResourceContextFallBackList != null) return true; } } } } } } } return false; }
void Scenario10_SearchMultipleResourceIds(ResourceContext context, string[] resourceIds) { this.Scenario10TextBlock.Text = ""; var dimensionMap = ResourceManager.Current.MainResourceMap.GetSubtree("dimensions"); foreach (var id in resourceIds) { NamedResource namedResource; if (dimensionMap.TryGetValue(id, out namedResource)) { var resourceCandidates = namedResource.ResolveAll(context); Scenario10_ShowCandidates(id, resourceCandidates); } } }