/// <summary> /// Initialize Speech Recognizer and compile constraints. /// </summary> /// <param name="recognizerLanguage">Language to use for the speech recognizer</param> /// <returns>Awaitable task.</returns> private async Task InitializeRecognizer(Language recognizerLanguage) { if (speechRecognizer != null) { // cleanup prior to re-initializing this scenario. speechRecognizer.StateChanged -= SpeechRecognizer_StateChanged; this.speechRecognizer.Dispose(); this.speechRecognizer = null; } // Create an instance of SpeechRecognizer. speechRecognizer = new SpeechRecognizer(recognizerLanguage); // Provide feedback to the user about the state of the recognizer. speechRecognizer.StateChanged += SpeechRecognizer_StateChanged; // Add a web search topic constraint to the recognizer. var webSearchGrammar = new SpeechRecognitionTopicConstraint(SpeechRecognitionScenario.WebSearch, "webSearch"); speechRecognizer.Constraints.Add(webSearchGrammar); // RecognizeWithUIAsync allows developers to customize the prompts. speechRecognizer.UIOptions.AudiblePrompt = "Say what you want to search for..."; speechRecognizer.UIOptions.ExampleText = speechResourceMap.GetValue("WebSearchUIOptionsExampleText", speechContext).ValueAsString; // Compile the constraint. SpeechRecognitionCompilationResult compilationResult = await speechRecognizer.CompileConstraintsAsync(); // Check to make sure that the constraints were in a proper format and the recognizer was able to compile it. if (compilationResult.Status != SpeechRecognitionResultStatus.Success) { // Disable the recognition buttons. btnRecognizeWithUI.IsEnabled = false; btnRecognizeWithoutUI.IsEnabled = false; // Let the user know that the grammar didn't compile properly. resultTextBlock.Visibility = Visibility.Visible; resultTextBlock.Text = "Unable to compile grammar."; } }
public static void Run(Windows.UI.Xaml.Controls.TextBlock outputBlock) { outputBlock.Text += String.Format("\nThe current culture is {0}.\n", CultureInfo.CurrentCulture.Name); ResourceLoader rl = new ResourceLoader(); // Display greeting using the resources of the current culture. string greeting = rl.GetString("Greeting"); outputBlock.Text += String.Format("{0}\n", String.IsNullOrEmpty(greeting) ? "Здравствуйте" : greeting); // Display greeting using fr-FR resources. ResourceContext ctx = new Windows.ApplicationModel.Resources.Core.ResourceContext(); ctx.Languages = new string[] { "fr-FR" }; ResourceMap rmap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources"); string newGreeting = rmap.GetValue("Greeting", ctx).ValueAsString(); outputBlock.Text += String.Format("\n\nCulture of Current Context: {0}\n", ctx.Languages[0]); outputBlock.Text += String.Format("{0}\n", String.IsNullOrEmpty(newGreeting) ? greeting : newGreeting); }
/// <summary> /// Initialize Speech Recognizer and compile constraints. /// </summary> /// <param name="recognizerLanguage">Language to use for the speech recognizer</param> /// <returns>Awaitable task.</returns> private async Task InitializeRecognizer(Language recognizerLanguage) { if (speechRecognizer != null) { // cleanup prior to re-initializing this scenario. speechRecognizer.StateChanged -= SpeechRecognizer_StateChanged; this.speechRecognizer.Dispose(); this.speechRecognizer = null; } // Create an instance of SpeechRecognizer. speechRecognizer = new SpeechRecognizer(recognizerLanguage); // Provide feedback to the user about the state of the recognizer. speechRecognizer.StateChanged += SpeechRecognizer_StateChanged; // Compile the dictation topic constraint, which optimizes for dictated speech. var dictationConstraint = new SpeechRecognitionTopicConstraint(SpeechRecognitionScenario.Dictation, "dictation"); speechRecognizer.Constraints.Add(dictationConstraint); SpeechRecognitionCompilationResult compilationResult = await speechRecognizer.CompileConstraintsAsync(); // RecognizeWithUIAsync allows developers to customize the prompts. speechRecognizer.UIOptions.AudiblePrompt = "Dictate a phrase or sentence..."; speechRecognizer.UIOptions.ExampleText = speechResourceMap.GetValue("DictationUIOptionsExampleText", speechContext).ValueAsString; // Check to make sure that the constraints were in a proper format and the recognizer was able to compile it. if (compilationResult.Status != SpeechRecognitionResultStatus.Success) { // Disable the recognition buttons. btnContinuousRecognize.IsEnabled = false; btnClearText.IsEnabled = false; // Let the user know that the grammar didn't compile properly. resultTextBlock.Visibility = Visibility.Visible; resultTextBlock.Text = "Unable to compile grammar."; } }
/// <summary> /// Gets the full name system users. /// </summary> /// <returns></returns> private async Task GetFullNameSystemUsers() { try { //Retrieve //The URL will change in 2016 to include the API version - api/data/v8.0/systemusers HttpResponseMessage retrieveResponse = await httpClient.GetAsync("systemusers?$select=fullname&$orderby=fullname asc"); if (retrieveResponse.IsSuccessStatusCode) { JObject jRetrieveResponse = JObject.Parse(retrieveResponse.Content.ReadAsStringAsync().Result); dynamic systemUserObject = JsonConvert.DeserializeObject(jRetrieveResponse.ToString()); var userMessage = new VoiceCommandUserMessage(); var destinationsContentTiles = new List <VoiceCommandContentTile>(); string recordsRetrieved = string.Format( cortanaResourceMap.GetValue("UsersRetrieved", cortanaContext).ValueAsString); userMessage.DisplayMessage = userMessage.SpokenMessage = recordsRetrieved; int i = 0; foreach (var data in systemUserObject.value) { if (i >= 10) { break; } var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleOnly; destinationTile.Title = data.fullname.Value; destinationsContentTiles.Add(destinationTile); i++; } var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); await voiceServiceConnection.ReportSuccessAsync(response); } } catch (Exception ex) { //ShowException(ex); } }
private async void InitializeRecognizer() { _resetEvent.WaitOne(3000); string wakeupStr = ResourceMap.GetValue("HeyCortana", ResourceContext).ValueAsString; _recognizer.Constraints.Add( new SpeechRecognitionListConstraint( new List <string>() { wakeupStr }, "Wakeup")); List <string> wrongWakeStrings = new List <string>(); foreach (var item in ResourceMap) { //string str = item.Value.Candidates.First().ValueAsString; string str = ResourceMap.GetValue(item.Key, ResourceContext).ValueAsString; if (str != wakeupStr) { wrongWakeStrings.Add(str); } } _recognizer.Constraints.Add( new SpeechRecognitionListConstraint(wrongWakeStrings, "Wrong")); var result = await _recognizer.CompileConstraintsAsync(); if (result.Status != SpeechRecognitionResultStatus.Success) { throw new Exception(); } DispatcherHelper.CheckBeginInvokeOnUI(() => { WakeUpString = ResourceMap.GetValue("HeyCortana", ResourceContext).ValueAsString; }); }
/// <summary> /// Initialize Speech Recognizer and compile constraints. /// </summary> /// <param name="recognizerLanguage">Language to use for the speech recognizer</param> /// <returns>Awaitable task.</returns> private async Task InitializeRecognizer(Language recognizerLanguage) { if (speechRecognizer != null) { // cleanup prior to re-initializing this scenario. speechRecognizer.StateChanged -= SpeechRecognizer_StateChanged; this.speechRecognizer.Dispose(); this.speechRecognizer = null; } try { // Create an instance of SpeechRecognizer. speechRecognizer = new SpeechRecognizer(recognizerLanguage); // Provide feedback to the user about the state of the recognizer. speechRecognizer.StateChanged += SpeechRecognizer_StateChanged; // Add a list constraint to the recognizer. speechRecognizer.Constraints.Add( new SpeechRecognitionListConstraint( new List <string>() { speechResourceMap.GetValue("ListGrammarGoHome", speechContext).ValueAsString }, "Home")); speechRecognizer.Constraints.Add( new SpeechRecognitionListConstraint( new List <string>() { speechResourceMap.GetValue("ListGrammarGoToContosoStudio", speechContext).ValueAsString }, "GoToContosoStudio")); speechRecognizer.Constraints.Add( new SpeechRecognitionListConstraint( new List <string>() { speechResourceMap.GetValue("ListGrammarShowMessage", speechContext).ValueAsString, speechResourceMap.GetValue("ListGrammarOpenMessage", speechContext).ValueAsString }, "Message")); speechRecognizer.Constraints.Add( new SpeechRecognitionListConstraint( new List <string>() { speechResourceMap.GetValue("ListGrammarSendEmail", speechContext).ValueAsString, speechResourceMap.GetValue("ListGrammarCreateEmail", speechContext).ValueAsString }, "Email")); speechRecognizer.Constraints.Add( new SpeechRecognitionListConstraint( new List <string>() { speechResourceMap.GetValue("ListGrammarCallNitaFarley", speechContext).ValueAsString, speechResourceMap.GetValue("ListGrammarCallNita", speechContext).ValueAsString }, "CallNita")); speechRecognizer.Constraints.Add( new SpeechRecognitionListConstraint( new List <string>() { speechResourceMap.GetValue("ListGrammarCallWayneSigmon", speechContext).ValueAsString, speechResourceMap.GetValue("ListGrammarCallWayne", speechContext).ValueAsString }, "CallWayne")); // RecognizeWithUIAsync allows developers to customize the prompts. string uiOptionsText = string.Format("Try saying '{0}', '{1}' or '{2}'", speechResourceMap.GetValue("ListGrammarGoHome", speechContext).ValueAsString, speechResourceMap.GetValue("ListGrammarGoToContosoStudio", speechContext).ValueAsString, speechResourceMap.GetValue("ListGrammarShowMessage", speechContext).ValueAsString); speechRecognizer.UIOptions.ExampleText = uiOptionsText; helpTextBlock.Text = string.Format("{0}\n{1}", speechResourceMap.GetValue("ListGrammarHelpText", speechContext).ValueAsString, uiOptionsText); // Compile the constraint. SpeechRecognitionCompilationResult compilationResult = await speechRecognizer.CompileConstraintsAsync(); // Check to make sure that the constraints were in a proper format and the recognizer was able to compile it. if (compilationResult.Status != SpeechRecognitionResultStatus.Success) { // Disable the recognition buttons. btnRecognizeWithUI.IsEnabled = false; btnRecognizeWithoutUI.IsEnabled = false; // Let the user know that the grammar didn't compile properly. resultTextBlock.Visibility = Visibility.Visible; resultTextBlock.Text = "Unable to compile grammar."; } else { btnRecognizeWithUI.IsEnabled = true; btnRecognizeWithoutUI.IsEnabled = true; resultTextBlock.Visibility = Visibility.Collapsed; } } catch (Exception ex) { if ((uint)ex.HResult == HResultRecognizerNotFound) { btnRecognizeWithUI.IsEnabled = false; btnRecognizeWithoutUI.IsEnabled = false; resultTextBlock.Visibility = Visibility.Visible; resultTextBlock.Text = "Speech Language pack for selected language not installed."; } else { var messageDialog = new Windows.UI.Popups.MessageDialog(ex.Message, "Exception"); await messageDialog.ShowAsync(); } } }
/// <summary> /// Initialize Speech Recognizer and compile constraints. /// </summary> /// <param name="recognizerLanguage">Language to use for the speech recognizer</param> /// <returns>Awaitable task.</returns> private async Task InitializeRecognizer(Language recognizerLanguage) { if (speechRecognizer != null) { // cleanup prior to re-initializing this scenario. speechRecognizer.ContinuousRecognitionSession.Completed -= ContinuousRecognitionSession_Completed; speechRecognizer.ContinuousRecognitionSession.ResultGenerated -= ContinuousRecognitionSession_ResultGenerated; speechRecognizer.StateChanged -= SpeechRecognizer_StateChanged; this.speechRecognizer.Dispose(); this.speechRecognizer = null; } try { // Initialize the SRGS-compliant XML file. // For more information about grammars for Windows apps and how to // define and use SRGS-compliant grammars in your app, see // https://msdn.microsoft.com/en-us/library/dn596121.aspx // determine the language code being used. string languageTag = recognizerLanguage.LanguageTag; string fileName = String.Format("SRGS\\{0}\\SRGSColors.xml", languageTag); StorageFile grammarContentFile = await Package.Current.InstalledLocation.GetFileAsync(fileName); resultTextBlock.Text = speechResourceMap.GetValue("SRGSHelpText", speechContext).ValueAsString; // Initialize the SpeechRecognizer and add the grammar. speechRecognizer = new SpeechRecognizer(recognizerLanguage); // Provide feedback to the user about the state of the recognizer. This can be used to provide // visual feedback to help the user understand whether they're being heard. speechRecognizer.StateChanged += SpeechRecognizer_StateChanged; SpeechRecognitionGrammarFileConstraint grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammarContentFile); speechRecognizer.Constraints.Add(grammarConstraint); SpeechRecognitionCompilationResult compilationResult = await speechRecognizer.CompileConstraintsAsync(); // Check to make sure that the constraints were in a proper format and the recognizer was able to compile them. if (compilationResult.Status != SpeechRecognitionResultStatus.Success) { // Disable the recognition button. btnContinuousRecognize.IsEnabled = false; // Let the user know that the grammar didn't compile properly. resultTextBlock.Text = "Unable to compile grammar."; } else { // Set EndSilenceTimeout to give users more time to complete speaking a phrase. speechRecognizer.Timeouts.EndSilenceTimeout = TimeSpan.FromSeconds(1.2); // Handle continuous recognition events. Completed fires when various error states occur. ResultGenerated fires when // some recognized phrases occur, or the garbage rule is hit. speechRecognizer.ContinuousRecognitionSession.Completed += ContinuousRecognitionSession_Completed; speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated; btnContinuousRecognize.IsEnabled = true; resultTextBlock.Text = speechResourceMap.GetValue("SRGSHelpText", speechContext).ValueAsString; resultTextBlock.Visibility = Visibility.Visible; } } catch (Exception ex) { if ((uint)ex.HResult == HResultRecognizerNotFound) { btnContinuousRecognize.IsEnabled = false; resultTextBlock.Visibility = Visibility.Visible; resultTextBlock.Text = "Speech Language pack for selected language not installed."; } else { var messageDialog = new Windows.UI.Popups.MessageDialog(ex.Message, "Exception"); await messageDialog.ShowAsync(); } } }
public string GetLocalizedValue(object value, string suffix, CultureIdentifier culture, params object[] args) { if (value == null) { return("<!! NULL !!>"); } ILocalizable localizableValue = value as ILocalizable; string localizedValue; object valueKey = Tuple.Create(value, suffix); if (!localizedValueMap.TryGetValue(valueKey, out localizedValue)) { string valueType = value.GetType().Name; string valueName = value.ToString(); Enum enumValue = value as Enum; bool isFlags = enumValue != null && enumValue.GetType().GetTypeInfo().GetCustomAttributes <FlagsAttribute>().Any(); if (isFlags && ((int)value) != 0) { List <string> localizedEnumParts = new List <string>(); foreach (var flag in Enum.GetValues(enumValue.GetType())) { if (((int)flag != 0) && enumValue.HasFlag((Enum)flag)) { localizedEnumParts.Add(GetRawLocalizedValue(flag, culture)); } } localizedValue = string.Join(", ", localizedEnumParts); } else if (localizableValue != null) { // If it's localized, then we'll let them determine // the appropriate way to get the raw localized value localizedValue = localizableValue.ToString(this); } else { string localizedValueKey = string.Format("{0}_{1}", valueType, valueName); if (!string.IsNullOrWhiteSpace(suffix)) { localizedValueKey = localizedValueKey + "_" + suffix; } #if NETFX_CORE ResourceContext context = new ResourceContext(); context.Languages = new string[] { ResolveCulture(culture).ToString() }; ResourceCandidate localizedCandidate = map.GetValue(localizedValueKey, context); if (localizedCandidate == null) { System.Diagnostics.Debug.WriteLine($"Unable to find localized value {localizedValueKey}"); } localizedValue = localizedCandidate?.ValueAsString ?? "!! Missing Resource for " + localizedValueKey + " !!"; #else localizedValue = this.ResourceLoader.GetString(localizedValueKey, ResolveCulture(culture)) ?? "!! Missing Resource for " + localizedValueKey + " !!"; #endif } localizedValueMap[valueKey] = localizedValue; } if (localizableValue != null) { args = args == null || args.Length == 0 ? localizableValue.LocalizedContext : args; } if (args != null && args.Length > 0) { return(string.Format(localizedValue, args)); } return(localizedValue); }
private async Task SendCompletionMessageForReport(string target) { #region ShowProgressScreen // If this operation is expected to take longer than 0.5 seconds, the task must // provide a progress response to Cortana prior to starting the operation, and // provide updates at most every 5 seconds. try { string loadingPowerOnOff = string.Format( cortanaResourceMap.GetValue("LoadingReport", cortanaContext).ValueAsString, target); await ShowProgressScreen(loadingPowerOnOff); } catch (Exception e) { Debug.WriteLine(e.Message); } #endregion var userMessage = new VoiceCommandUserMessage(); var destinationsContentTiles = new List <VoiceCommandContentTile>(); #region checkConnection //if (_connection == null) //{ // Debug.WriteLine("_connection is null"); // _connection = new AppServiceConnection(); // _connection.AppServiceName = "VoiceCommandService"; // _connection.PackageFamilyName = "CommaxIotApp_5s72hevbe334y"; // var status = await _connection.OpenAsync(); // switch (status) // { // case AppServiceConnectionStatus.AppNotInstalled: // Debug.WriteLine("The app AppServicesProvider is not installed. Deploy AppServicesProvider to this device and try again."); // _connection = null; // break; // case AppServiceConnectionStatus.AppUnavailable: // Debug.WriteLine("The app AppServicesProvider is not available. This could be because it is currently being updated or was installed to a removable device that is no longer available."); // _connection = null; // break; // case AppServiceConnectionStatus.AppServiceUnavailable: // Debug.WriteLine(string.Format("The app AppServicesProvider is installed but it does not provide the app service {0}.", _connection.AppServiceName)); // _connection = null; // break; // case AppServiceConnectionStatus.Unknown: // Debug.WriteLine("An unkown error occurred while we were trying to open an AppServiceConnection."); // _connection = null; // break; // } //} #endregion #region Logic if (_connection == null) { #region CommonService 검색실패시 //userMessage = new VoiceCommandUserMessage(); //destinationsContentTiles = new List<VoiceCommandContentTile>(); if (true) { // In this scenario, perhaps someone has modified data on your service outside of your // control. If you're accessing a remote service, having a background task that // periodically refreshes the phrase list so it's likely to be in sync is ideal. // This is unlikely to occur for this sample app, however. string foundNoTargetToPowerHandle = string.Format( cortanaResourceMap.GetValue("FoundNoTargetToPowerHandle", cortanaContext).ValueAsString, target); userMessage.DisplayMessage = foundNoTargetToPowerHandle; userMessage.SpokenMessage = foundNoTargetToPowerHandle; } #endregion } else { var message = new ValueSet(); message.Add("Type", "Common"); message.Add("Command", "GetStatus"); message.Add("Target", target); AppServiceResponse responseData = await _connection.SendMessageAsync(message); bool isOpen = false; if (responseData.Status == AppServiceResponseStatus.Success && responseData.Message.ContainsKey("Result")) { isOpen = (bool)responseData.Message["Result"]; } string strOpen = (isOpen) ? "On" : "Off"; userMessage.DisplayMessage = string.Format(cortanaResourceMap.GetValue("ReportStatus", cortanaContext).ValueAsString, target, strOpen); userMessage.SpokenMessage = string.Format(cortanaResourceMap.GetValue("ReportStatus", cortanaContext).ValueAsString, target, strOpen); //var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); var response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); } #endregion }
async void Get_ss_true() { // await Analysis_Json(); Loading.IsActive = true; string Uri = "http://s.moreplay.cn/apps/ss/ss.json"; Uri uri = new Uri(Uri); HttpClient client = new HttpClient(); try { AccoutJson = await client.GetStringAsync(uri); } catch { // System.Diagnostics.Debug.WriteLine("ErrorCode: " + e); var NetError = resourceMap.GetValue("NetError", resourceContext); var messageDig = new MessageDialog(NetError.ValueAsString); //展示窗口,获取按钮是否退出 var result = await messageDig.ShowAsync(); IsError = true; } if (IsError == false) { JObject JSON = JObject.Parse(AccoutJson); JsonSerializer json = JsonSerializer.Create(); account = json.Deserialize <Rootobject>(new JsonTextReader(new StringReader(AccoutJson))); // 将Json对象解析为Json字符串 // ToJsonObject(AccoutJson); //如何订阅版账号 if (isPro == true) { ss_pro(); // JArray ss_pro_array = JArray.Parse(JSON["ss_pro"].ToString()); // JArray jlist = JArray.Parse(JSON["ss"].ToString()); } else { ss(); } //获取JSON中SS账号的具体数量 // int Quantity_ss = (int)JSON["quantity_ss"]; // System.Diagnostics.Debug.WriteLine(e.Message); //将SS账号组成数组 // JArray jlist = JArray.Parse(JSON["ss"].ToString()); // Random rnd = new Random(); //从以下范围内随机取值 // int r = rnd.Next(0, Quantity_ss); /* * jlist.ToObject<List<Items>>(); * List<Items> items = ((JArray)jlist).Select(x => new Items * { * Ip = (string)x["ip"], * Port = (string)x["port"], * PW = (string)x["pw"] * }).ToList(); */ /* * string ss_Str = string.Join("", jlist[r]);//数组转成字符串 * ss_Str = ss_Str.TrimEnd('"'); * ss_Str = ss_Str.TrimStart('"'); * Regex r1 = new Regex("\"\""); * ss_Str = r1.Replace(ss_Str, "\",\"", Quantity_ss - 2); * ss_Str = "{\"" + ss_Str + "\"}"; * //将随机获取到的账号数组转化为json * JObject ss_Arr = (JObject)JsonConvert.DeserializeObject(ss_Str); * * IP.Text = (string)ss_Arr["ip"]; * Port.Text = (string)ss_Arr["port"]; * Password.Text = (string)ss_Arr["pw"]; * Encryption.Text = (string)ss_Arr["jm"]; */ Loading.IsActive = false; List.Visibility = Visibility.Visible; title.Visibility = Visibility.Collapsed; /* } * catch { * var messageDig = new MessageDialog("网络异常,请检查网络!"); * //展示窗口,获取按钮是否退出 * var result = await messageDig.ShowAsync(); * } */ } }
/// <summary> /// Retrieves a string resource for the specified culture using the resource map. /// </summary> /// <param name="name">The name of the string resource.</param> /// <param name="culture">The culture to retrieve a matching string for. May be <see langword="null"/>.</param> /// <returns>A localized string.</returns> internal static string GetResource(string name, CultureInfo culture) { Windows.ApplicationModel.Resources.Core.ResourceContext context; if (culture == null || culture.IsNeutralCulture) { context = Context; if (context == null) { context = Windows.ApplicationModel.Resources.Core.ResourceContext.GetForViewIndependentUse(); } } else { context = new Windows.ApplicationModel.Resources.Core.ResourceContext(); context.Languages = new string[] { culture.TwoLetterISOLanguageName }; } Windows.ApplicationModel.Resources.Core.ResourceCandidate resourceCandidate = ResourceMap.GetValue("NerdyDuck.Logging/Resources/" + name, context); if (resourceCandidate == null) { throw new ArgumentOutOfRangeException(nameof(name)); } return(resourceCandidate.ValueAsString); }
/// <summary> /// Retrieves a string resource using the resource map. /// </summary> /// <param name="name">The name of the string resource.</param> /// <returns>A localized string.</returns> internal static string GetResource(string name) { Windows.ApplicationModel.Resources.Core.ResourceContext context = Context; if (context == null) { context = Windows.ApplicationModel.Resources.Core.ResourceContext.GetForViewIndependentUse(); } Windows.ApplicationModel.Resources.Core.ResourceCandidate resourceCandidate = ResourceMap.GetValue("NerdyDuck.Scheduler/Resources/" + name, context); if (resourceCandidate == null) { throw new ArgumentOutOfRangeException(nameof(name)); } return(resourceCandidate.ValueAsString); }
/// <summary> /// Initialize Speech Recognizer and compile constraints. /// </summary> /// <param name="recognizerLanguage">Language to use for the speech recognizer</param> /// <returns>Awaitable task.</returns> private async Task InitializeRecognizer(Language recognizerLanguage) { if (speechRecognizer != null) { // cleanup prior to re-initializing this scenario. speechRecognizer.StateChanged -= SpeechRecognizer_StateChanged; speechRecognizer.ContinuousRecognitionSession.Completed -= ContinuousRecognitionSession_Completed; speechRecognizer.ContinuousRecognitionSession.ResultGenerated -= ContinuousRecognitionSession_ResultGenerated; this.speechRecognizer.Dispose(); this.speechRecognizer = null; } try { this.speechRecognizer = new SpeechRecognizer(recognizerLanguage); // Provide feedback to the user about the state of the recognizer. This can be used to provide visual feedback in the form // of an audio indicator to help the user understand whether they're being heard. speechRecognizer.StateChanged += SpeechRecognizer_StateChanged; // Build a command-list grammar. Commands should ideally be drawn from a resource file for localization, and // be grouped into tags for alternate forms of the same command. speechRecognizer.Constraints.Add( new SpeechRecognitionListConstraint( new List <string>() { speechResourceMap.GetValue("ListGrammarGoHome", speechContext).ValueAsString }, "Home")); speechRecognizer.Constraints.Add( new SpeechRecognitionListConstraint( new List <string>() { speechResourceMap.GetValue("ListGrammarGoToContosoStudio", speechContext).ValueAsString }, "GoToContosoStudio")); speechRecognizer.Constraints.Add( new SpeechRecognitionListConstraint( new List <string>() { speechResourceMap.GetValue("ListGrammarShowMessage", speechContext).ValueAsString, speechResourceMap.GetValue("ListGrammarOpenMessage", speechContext).ValueAsString }, "Message")); speechRecognizer.Constraints.Add( new SpeechRecognitionListConstraint( new List <string>() { speechResourceMap.GetValue("ListGrammarSendEmail", speechContext).ValueAsString, speechResourceMap.GetValue("ListGrammarCreateEmail", speechContext).ValueAsString }, "Email")); speechRecognizer.Constraints.Add( new SpeechRecognitionListConstraint( new List <string>() { speechResourceMap.GetValue("ListGrammarCallNitaFarley", speechContext).ValueAsString, speechResourceMap.GetValue("ListGrammarCallNita", speechContext).ValueAsString }, "CallNita")); speechRecognizer.Constraints.Add( new SpeechRecognitionListConstraint( new List <string>() { speechResourceMap.GetValue("ListGrammarCallWayneSigmon", speechContext).ValueAsString, speechResourceMap.GetValue("ListGrammarCallWayne", speechContext).ValueAsString }, "CallWayne")); // Update the help text in the UI to show localized examples string uiOptionsText = string.Format("Try saying '{0}', '{1}' or '{2}'", speechResourceMap.GetValue("ListGrammarGoHome", speechContext).ValueAsString, speechResourceMap.GetValue("ListGrammarGoToContosoStudio", speechContext).ValueAsString, speechResourceMap.GetValue("ListGrammarShowMessage", speechContext).ValueAsString); listGrammarHelpText.Text = string.Format("{0}\n{1}", speechResourceMap.GetValue("ListGrammarHelpText", speechContext).ValueAsString, uiOptionsText); SpeechRecognitionCompilationResult result = await speechRecognizer.CompileConstraintsAsync(); if (result.Status != SpeechRecognitionResultStatus.Success) { // Disable the recognition buttons. btnContinuousRecognize.IsEnabled = false; // Let the user know that the grammar didn't compile properly. resultTextBlock.Visibility = Visibility.Visible; resultTextBlock.Text = "Unable to compile grammar."; } else { btnContinuousRecognize.IsEnabled = true; resultTextBlock.Visibility = Visibility.Collapsed; // Handle continuous recognition events. Completed fires when various error states occur. ResultGenerated fires when // some recognized phrases occur, or the garbage rule is hit. speechRecognizer.ContinuousRecognitionSession.Completed += ContinuousRecognitionSession_Completed; speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated; } } catch (Exception ex) { if ((uint)ex.HResult == HResultRecognizerNotFound) { btnContinuousRecognize.IsEnabled = false; resultTextBlock.Visibility = Visibility.Visible; resultTextBlock.Text = "Speech Language pack for selected language not installed."; } else { var messageDialog = new Windows.UI.Popups.MessageDialog(ex.Message, "Exception"); await messageDialog.ShowAsync(); } } }
private async void Scenario12Button_Show_Click(object sender, RoutedEventArgs e) { // Two coding patterns will be used: // 1. Get a ResourceContext on the UI thread using GetForCurrentView and pass // to the non-UI thread // 2. Get a ResourceContext on the non-UI thread using GetForViewIndependentUse // // Two analogous patterns could be used for ResourceLoader instead of ResourceContext. // pattern 1: get a ResourceContext for the UI thread ResourceContext defaultContextForUiThread = ResourceContext.GetForCurrentView(); // pattern 2: we'll create a view-independent context in the non-UI worker thread // We need some things in order to display results in the UI (doing that // for purposes of this sample, to show that work was actually done in the // worker thread): List <string> uiDependentResourceList = new List <string>(); List <string> uiIndependentResourceList = new List <string>(); // use a worker thread for the heavy lifting so the UI isn't blocked await Windows.System.Threading.ThreadPool.RunAsync( (source) => { ResourceMap stringResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources"); // pattern 1: the defaultContextForUiThread variable was created above and is visible here // pattern 2: get a view-independent ResourceContext ResourceContext defaultViewIndependentResourceContext = ResourceContext.GetForViewIndependentUse(); // NOTE: The ResourceContext obtained using GetForViewIndependentUse() has no scale qualifier // value set. If this ResourceContext is used in its default state to retrieve a resource, that // will work provided that the resource does not have any scale-qualified variants. But if the // resource has any scale-qualified variants, then that will fail at runtime. // // A scale qualifier value on this ResourceContext can be set programmatically. If that is done, // then the ResourceContext can be used to retrieve a resource that has scale-qualified variants. // But if the scale qualifier is reset (e.g., using the ResourceContext.Reset() method), then // it will return to the default state with no scale qualifier value set, and cannot be used // to retrieve any resource that has scale-qualified variants. // simulate processing a number of items // just using a single string resource: that's sufficient to demonstrate for (var i = 0; i < 4; i++) { // pattern 1: use the ResourceContext from the UI thread string listItem1 = stringResourceMap.GetValue("string1", defaultContextForUiThread).ValueAsString; uiDependentResourceList.Add(listItem1); // pattern 2: use the view-independent ResourceContext string listItem2 = stringResourceMap.GetValue("string1", defaultViewIndependentResourceContext).ValueAsString; uiIndependentResourceList.Add(listItem2); } }); // Display the results in one go. (A more finessed design might add results // in the UI asynchronously, but that goes beyond what this sample is // demonstrating.) ViewDependentResourcesList.ItemsSource = uiDependentResourceList; ViewIndependentResourcesList.ItemsSource = uiIndependentResourceList; }
public async void DownloadAction() { Progress <DownloadOperation> progress = null; if (Url == null || Url == "") { ContentDialog notFoundLinkFileDialog = new ContentDialog() { Title = resourceMap.GetValue("titleErrorDownloadFileDialog", resourceContext).ValueAsString, Content = resourceMap.GetValue("contentErrorDownloadFileDialog", resourceContext).ValueAsString, PrimaryButtonText = "ОК" }; ContentDialogResult result = await notFoundLinkFileDialog.ShowAsync(); return; } FolderPicker folderPicker = new FolderPicker { SuggestedStartLocation = PickerLocationId.Downloads, ViewMode = PickerViewMode.Thumbnail }; folderPicker.FileTypeFilter.Add("*"); StorageFolder folder = await folderPicker.PickSingleFolderAsync(); if (folder != null) { Uri downloadUrl = new Uri(Url); String fileName = Path.GetFileName(downloadUrl.ToString()); var request = HttpWebRequest.Create(downloadUrl) as HttpWebRequest; StorageFile file = await folder.CreateFileAsync(fileName, CreationCollisionOption.GenerateUniqueName); downloadOperation = backgroundDownloader.CreateDownload(downloadUrl, file); progress = new Progress <DownloadOperation>(x => ProgressChanged(downloadOperation)); cancellationToken = new CancellationTokenSource(); try { DownloadFile newFile = new DownloadFile { Id = downloadOperation.Guid, Name = fileName }; toastNotification.SendUpdatableToastWithProgress(newFile.Name); newFile.FileSize = (downloadOperation.Progress.TotalBytesToReceive / 1024).ToString() + " kb"; newFile.DateTime = DateTime.Now; newFile.Type = FType; newFile.Description = Description; newFile.Status = Status; Files.Add(newFile); await downloadOperation.StartAsync().AsTask(cancellationToken.Token, progress); if (downloadOperation.Progress.Status == BackgroundTransferStatus.Completed) { toastNotification.SendCompletedToast(fileName); dataStorage.Save(Files); UpdateTileAction(); } IsEnableButtons = false; Url = Description = ""; FType = FileType.None; } catch (TaskCanceledException) { Status = resourceMap.GetValue("canceledStatus", resourceContext).ValueAsString; await downloadOperation.ResultFile.DeleteAsync(); downloadOperation = null; } catch (Exception) { Status = "File not found"; var messageDialog = new MessageDialog("No internet connection has been found."); await downloadOperation.ResultFile.DeleteAsync(); downloadOperation = null; } } }
private async Task InitializeRecognizer(Language recognizerLanguage, byte initType) { if (speechRecognizer != null) { Debug.WriteLine("[INIT] - Cleanup recognizer"); // cleanup prior to re-initializing this scenario. speechRecognizer.StateChanged -= SpeechRecognizer_StateChanged; speechRecognizer.ContinuousRecognitionSession.Completed -= ContinuousRecognitionSession_Completed; speechRecognizer.ContinuousRecognitionSession.ResultGenerated -= ContinuousRecognitionSession_ResultGenerated; speechRecognizer.HypothesisGenerated -= SpeechRecognizer_HypothesisGenerated; this.speechRecognizer.Dispose(); this.speechRecognizer = null; } this.speechRecognizer = new SpeechRecognizer(recognizerLanguage); switch (initType) { case 0: // Initialize resource map to retrieve localized speech strings. string langTag = recognizerLanguage.LanguageTag; speechContext.Languages = new string[] { langTag }; speechResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("triggerStrings"); try { this.speechRecognizer = new SpeechRecognizer(recognizerLanguage); // Provide feedback to the user about the state of the recognizer. This can be used to provide visual feedback in the form // of an audio indicator to help the user understand whether they're being heard. speechRecognizer.StateChanged += SpeechRecognizer_StateChanged; // Build a command-list grammar. Commands should ideally be drawn from a resource file for localization, and // be grouped into tags for alternate forms of the same command. speechRecognizer.Constraints.Add( new SpeechRecognitionListConstraint( new List <string>() { speechResourceMap.GetValue("startCmd", speechContext).ValueAsString }, "Home")); SpeechRecognitionCompilationResult res = await speechRecognizer.CompileConstraintsAsync(); if (res.Status != SpeechRecognitionResultStatus.Success) { // Disable the recognition buttons. Debug.WriteLine("Unable to compile grammar!!!"); } else { // Handle continuous recognition events. Completed fires when various error states occur. ResultGenerated fires when // some recognized phrases occur, or the garbage rule is hit. speechRecognizer.ContinuousRecognitionSession.Completed += ContinuousRecognitionSession_Completed; speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated; } } catch (Exception ex) { Debug.WriteLine("[INIT] - Errore nella try-catch della list"); } break; case 1: // Provide feedback to the user about the state of the recognizer. This can be used to provide visual feedback in the form // of an audio indicator to help the user understand whether they're being heard. speechRecognizer.StateChanged += SpeechRecognizer_StateChanged; // Apply the dictation topic constraint to optimize for dictated freeform speech. var dictationConstraint = new SpeechRecognitionTopicConstraint(SpeechRecognitionScenario.Dictation, "dictation"); speechRecognizer.Constraints.Add(dictationConstraint); SpeechRecognitionCompilationResult result = await speechRecognizer.CompileConstraintsAsync(); if (result.Status != SpeechRecognitionResultStatus.Success) { Debug.WriteLine("[INIT] - Grammar compilation FAILED"); } else { Debug.WriteLine("[INIT] - Grammar compilation OK"); } // Handle continuous recognition events. Completed fires when various error states occur. ResultGenerated fires when // some recognized phrases occur, or the garbage rule is hit. HypothesisGenerated fires during recognition, and // allows us to provide incremental feedback based on what the user's currently saying. speechRecognizer.ContinuousRecognitionSession.Completed += ContinuousRecognitionSession_Completed; speechRecognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated; speechRecognizer.HypothesisGenerated += SpeechRecognizer_HypothesisGenerated; break; } }
void UpdateContent() { var text = Element.Text; var elementImage = Element.Image; // No image, just the text if (elementImage == null) { Control.Content = text; return; } // if (UWP.App.UniversalApi >= 4) //CompositionSurfaceBrush imageBrush = compositor.CreateSurfaceBrush(); //LoadedImageSurface loadedSurface = LoadedImageSurface.StartLoadFromUri(new Uri("ms-appx:///Assets/myPic.jpg"), new Size(200.0, 400.0)); FrameworkElement image; var element = Element as Ao3TrackReader.Controls.Button; ResourceMap resMap = ResourceManager.Current.MainResourceMap.GetSubtree("Files"); ResourceContext resContext = ResourceContext.GetForCurrentView(); var res = resMap.GetValue(elementImage.File, resContext); int scale = 100; try { scale = int.Parse(res?.GetQualifierValue("scale")); } catch { } #if True var bmpicon = new BitmapIcon { UriSource = new Uri("ms-appx:///" + elementImage.File) }; image = bmpicon; if (element.ImageWidth > 0) { image.Width = element.ImageWidth; } if (element.ImageHeight > 0) { image.Height = element.ImageHeight; } bmpicon.SizeChanged += (sender, e) => { (Element as Xamarin.Forms.IVisualElementController).InvalidateMeasure(Xamarin.Forms.Internals.InvalidationTrigger.RendererReady); }; UpdateIconColor(); #else var bmp = new BitmapImage(new Uri("ms-appx:///" + elementImage.File)); image = new WImage { Source = bmp, VerticalAlignment = VerticalAlignment.Center, HorizontalAlignment = HorizontalAlignment.Center, Stretch = Stretch.Uniform }; if (element.ImageWidth > 0) { image.Width = element.ImageWidth; } if (element.ImageHeight > 0) { image.Height = element.ImageHeight; } bmp.ImageOpened += (sender, args) => { if (element.ImageWidth > 0) { image.Width = element.ImageWidth; } else { image.Width = bmp.PixelWidth * 100 / scale; } if (element.ImageHeight > 0) { image.Height = element.ImageHeight; } else { image.Height = bmp.PixelHeight * 100 / scale; } (Element as Xamarin.Forms.IVisualElementController).InvalidateMeasure(Xamarin.Forms.Internals.InvalidationTrigger.RendererReady); }; #endif // No text, just the image if (string.IsNullOrEmpty(text)) { Control.Content = image; return; } // Both image and text, so we need to build a container for them Control.Content = CreateContentContainer(Element.ContentLayout, image, text); }
/// <summary> /// Handle the Trip Cancellation task. This task demonstrates how to prompt a user /// for confirmation of an operation, show users a progress screen while performing /// a long-running task, and showing a completion screen. /// </summary> /// <param name="destination">The name of a destination, expected to match the phrase list.</param> /// <returns></returns> private async Task SendCompletionMessageForCancellation(string destination) { // Begin loading data to search for the target store. If this operation is going to take a long time, // for instance, requiring a response from a remote web service, consider inserting a progress screen // here, in order to prevent Cortana from timing out. string progressScreenString = string.Format( cortanaResourceMap.GetValue("ProgressLookingForTripToDest", cortanaContext).ValueAsString, destination); await ShowProgressScreen(progressScreenString); Model.TripStore store = new Model.TripStore(); await store.LoadTrips(); // We might have multiple trips to the destination. For now, we just pick the first. IEnumerable <Model.Trip> trips = store.Trips.Where(p => p.Destination == destination); Model.Trip trip = null; if (trips.Count() > 1) { // If there is more than one trip, provide a disambiguation screen rather than just picking one // however, more advanced logic here might be ideal (ie, if there's a significant number of items, // you may want to just fall back to a link to your app where you can provide a deeper search experience. string disambiguationDestinationString = string.Format( cortanaResourceMap.GetValue("DisambiguationWhichTripToDest", cortanaContext).ValueAsString, destination); string disambiguationRepeatString = cortanaResourceMap.GetValue("DisambiguationRepeat", cortanaContext).ValueAsString; trip = await DisambiguateTrips(trips, disambiguationDestinationString, disambiguationRepeatString); } else { // One or no trips exist with that destination, so retrieve it, or return null. trip = trips.FirstOrDefault(); } var userPrompt = new VoiceCommandUserMessage(); VoiceCommandResponse response; if (trip == null) { var userMessage = new VoiceCommandUserMessage(); // In this scenario, perhaps someone has modified data on your service outside of this // apps control. If you're accessing a remote service, having a background task that // periodically refreshes the phrase list so it's likely to be in sync is ideal. // This is unlikely to occur for this sample app, however. string noSuchTripToDestination = string.Format( cortanaResourceMap.GetValue("NoSuchTripToDestination", cortanaContext).ValueAsString, destination); userMessage.DisplayMessage = userMessage.SpokenMessage = noSuchTripToDestination; response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); } else { // Prompt the user for confirmation that we've selected the correct trip to cancel. string cancelTripToDestination = string.Format( cortanaResourceMap.GetValue("CancelTripToDestination", cortanaContext).ValueAsString, destination); userPrompt.DisplayMessage = userPrompt.SpokenMessage = cancelTripToDestination; var userReprompt = new VoiceCommandUserMessage(); string confirmCancelTripToDestination = string.Format( cortanaResourceMap.GetValue("ConfirmCancelTripToDestination", cortanaContext).ValueAsString, destination); userReprompt.DisplayMessage = userReprompt.SpokenMessage = confirmCancelTripToDestination; //REVERT var cancelledContentTiles = new List <VoiceCommandContentTile>(); if (xyz != null) { cancelledContentTiles.Add(xyz); } response = VoiceCommandResponse.CreateResponseForPrompt(userPrompt, userReprompt, cancelledContentTiles); var voiceCommandConfirmation = await voiceServiceConnection.RequestConfirmationAsync(response); // If RequestConfirmationAsync returns null, Cortana's UI has likely been dismissed. if (voiceCommandConfirmation != null) { if (voiceCommandConfirmation.Confirmed == true) { string cancellingTripToDestination = string.Format( cortanaResourceMap.GetValue("CancellingTripToDestination", cortanaContext).ValueAsString, destination); await ShowProgressScreen(cancellingTripToDestination); // Perform the operation to remote the trip from the app's data. // Since the background task runs within the app package of the installed app, // we can access local files belonging to the app without issue. await store.DeleteTrip(trip); // Provide a completion message to the user. var userMessage = new VoiceCommandUserMessage(); string cancelledTripToDestination = string.Format( cortanaResourceMap.GetValue("CancelledTripToDestination", cortanaContext).ValueAsString, destination); userMessage.DisplayMessage = userMessage.SpokenMessage = cancelledTripToDestination; response = VoiceCommandResponse.CreateResponse(userMessage, cancelledContentTiles); //REVERT cancelledContentTiles response.AppLaunchArgument = destination; //REVERT await voiceServiceConnection.ReportSuccessAsync(response); } else { // Confirm no action for the user. var userMessage = new VoiceCommandUserMessage(); string keepingTripToDestination = string.Format( cortanaResourceMap.GetValue("KeepingTripToDestination", cortanaContext).ValueAsString, destination); userMessage.DisplayMessage = userMessage.SpokenMessage = keepingTripToDestination; response = VoiceCommandResponse.CreateResponse(userMessage); response.AppLaunchArgument = destination; //REVERT await voiceServiceConnection.ReportSuccessAsync(response); } } } }
/// <summary> /// Search for, and change state of lights in room /// </summary> private async Task SendCompletionMessageChangingState(string room, bool state) { // Begin loading data to search for the target store. If this operation is going to take a long time, // for instance, requiring a response from a remote web service, consider inserting a progress screen // here, in order to prevent Cortana from timing out. string progressScreenString = string.Format( cortanaResourceMap.GetValue("ProgressLookingForLightInRoom", cortanaContext).ValueAsString, room); await ShowProgressScreen(progressScreenString); Model.LightStore store = new Model.LightStore(); await store.LoadLights(); // We might have multiple trips to the destination. For now, we just pick the first. IEnumerable <Model.Light> lights = store.Lights.Where(p => p.Room == room); Model.Light light = null; if (lights.Count() > 1) { // If there is more than one trip, provide a disambiguation screen rather than just picking one // however, more advanced logic here might be ideal (ie, if there's a significant number of items, // you may want to just fall back to a link to your app where you can provide a deeper search experience. string disambiguatioRoomString = string.Format( cortanaResourceMap.GetValue("DisambiguationWhichRoom", cortanaContext).ValueAsString, room, state); string disambiguationRepeatString = cortanaResourceMap.GetValue("DisambiguationRepeat", cortanaContext).ValueAsString; light = await DisambiguateLights(lights, disambiguatioRoomString, disambiguationRepeatString); } else { // One or no trips exist with that destination, so retrieve it, or return null. light = lights.FirstOrDefault(); } var userPrompt = new VoiceCommandUserMessage(); VoiceCommandResponse response; if (light == null) { var userMessage = new VoiceCommandUserMessage(); // In this scenario, perhaps someone has modified data on your service outside of this // apps control. If you're accessing a remote service, having a background task that // periodically refreshes the phrase list so it's likely to be in sync is ideal. // This is unlikely to occur for this sample app, however. string NoSuchLightInRoom = string.Format( cortanaResourceMap.GetValue("NoSuchLightInRoom", cortanaContext).ValueAsString, room); userMessage.DisplayMessage = userMessage.SpokenMessage = NoSuchLightInRoom; response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); } else { string stateString = state ? "on" : "off"; // Prompt the user for confirmation that we've selected the correct trip to cancel. string changeLightStateInRoom = string.Format( cortanaResourceMap.GetValue("ChangeLightStateInRoom", cortanaContext).ValueAsString, room, stateString); userPrompt.DisplayMessage = userPrompt.SpokenMessage = changeLightStateInRoom; var userReprompt = new VoiceCommandUserMessage(); string confirmChangeLightStateInRoom = string.Format( cortanaResourceMap.GetValue("ConfirmChangeLightStateInRoom", cortanaContext).ValueAsString, room, stateString); userReprompt.DisplayMessage = userReprompt.SpokenMessage = confirmChangeLightStateInRoom; response = VoiceCommandResponse.CreateResponseForPrompt(userPrompt, userReprompt); var voiceCommandConfirmation = await voiceServiceConnection.RequestConfirmationAsync(response); // If RequestConfirmationAsync returns null, Cortana's UI has likely been dismissed. if (voiceCommandConfirmation != null) { if (voiceCommandConfirmation.Confirmed == true) { string changingLightStateInRoom = string.Format( cortanaResourceMap.GetValue("ChangingLightStateInRoom", cortanaContext).ValueAsString, room, stateString); await ShowProgressScreen(changingLightStateInRoom); // Perform the operation to remote the trip from the app's data. // Since the background task runs within the app package of the installed app, // we can access local files belonging to the app without issue. Debug.Write(light.State); light.State = state; Debug.Write(light.State); // Provide a completion message to the user. var userMessage = new VoiceCommandUserMessage(); string changedLightStateInRoom = string.Format( cortanaResourceMap.GetValue("ChangedLightStateInRoom", cortanaContext).ValueAsString, room, stateString); userMessage.DisplayMessage = userMessage.SpokenMessage = changedLightStateInRoom; response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); } else { // Confirm no action for the user. var userMessage = new VoiceCommandUserMessage(); string keepingLightSettings = string.Format( cortanaResourceMap.GetValue("KeepingLightSettings", cortanaContext).ValueAsString, room); userMessage.DisplayMessage = userMessage.SpokenMessage = keepingLightSettings; response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); } } } }
/// <summary> /// Search for, and show details related to a single trip, if the trip can be /// found. This demonstrates a simple response flow in Cortana. /// </summary> /// <param name="destination">The destination, expected to be in the phrase list.</param> /// <returns></returns> private async Task ShowInfomation(string condition) { // If this operation is expected to take longer than 0.5 seconds, the task must // provide a progress response to Cortana prior to starting the operation, and // provide updates at most every 5 seconds. string loadingTripToDestination = string.Format( cortanaResourceMap.GetValue("LoadingInsiderCondition", cortanaContext).ValueAsString, condition); //await ShowProgressScreen(loadingTripToDestination); await ShowProgressScreen(loadingTripToDestination); var userMessage = new VoiceCommandUserMessage(); var destinationsContentTiles = new List <VoiceCommandContentTile>(); // Set a title message for the page. string message = ""; message = cortanaResourceMap.GetValue("currentCondition", cortanaContext).ValueAsString; userMessage.DisplayMessage = message; userMessage.SpokenMessage = "message"; // file in tiles for each destination, to display information about the trips without // launching the app. string result = null; using (HttpClient _httpClient = new HttpClient()) { CancellationTokenSource _cts; _cts = new CancellationTokenSource(); try { // 需要 post 的数据 var postData = new HttpFormUrlEncodedContent( new List <KeyValuePair <string, string> > { new KeyValuePair <string, string>("param1", condition), //new KeyValuePair<string, string>("param2", "abcd") } ); HttpResponseMessage httpresponse = await _httpClient.PostAsync( new Uri("http://" + "10.168.32.44" + ":0808/"), postData).AsTask(_cts.Token); // 取消请求的方式改为通过 CancellationTokenSource 来实现了 // HttpContent.ReadAsStringAsync() - 以 string 方式获取响应数据 // HttpContent.ReadAsByteArrayAsync() - 以 byte[] 方式获取响应数据 // HttpContent.ReadAsStreamAsync() - 以 stream 方式获取响应数据 if ((int)httpresponse.StatusCode != 200) { result = string.Format("连接失败,状态码为:{0}", ((int)httpresponse.StatusCode)); } result += await httpresponse.Content.ReadAsStringAsync(); //result += Environment.NewLine; Debug.WriteLine(result); } catch (TaskCanceledException) { result += "取消了"; //result += Environment.NewLine; } catch (Exception ex) { //result += ex.ToString(); result += "连接失败"; } } char[] seprator = { ';' }; string[] results = result.Split(seprator); if (results.Length == 1) { var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; destinationTile.Image = await Package.Current.InstalledLocation.GetFileAsync("VoiceService\\Images\\weather.png"); //destinationTile.Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Pics/weather.jpg")); destinationTile.Title = "错误信息"; destinationTile.TextLine1 = Trim100(result); destinationsContentTiles.Add(destinationTile); } else { for (int i = 0; i < 3 && i < results.Length; i++) { var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; destinationTile.Image = await Package.Current.InstalledLocation.GetFileAsync("VoiceService\\Images\\weather.png"); //string Destination = string.Format("地点{0}", i); //destinationTile.AppLaunchArgument = string.Format("destination={0}", Destination); switch (i) { case 0: destinationTile.Title = "温度(摄氏度)"; destinationTile.TextLine1 = Trim100(results[i]); destinationsContentTiles.Add(destinationTile); break; case 1: destinationTile.Title = "温度(华氏度)"; destinationTile.TextLine1 = Trim100(results[i]); destinationsContentTiles.Add(destinationTile); break; case 2: destinationTile.Title = "湿度"; destinationTile.TextLine1 = Trim100(results[i]); destinationsContentTiles.Add(destinationTile); break; } } } var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); //response.AppLaunchArgument = string.Format(destination); await voiceServiceConnection.ReportSuccessAsync(response); }
public async void Run(IBackgroundTaskInstance taskInstance) { serviceDeferral = taskInstance.GetDeferral(); // Register to receive an event if Cortana dismisses the background task. This will // occur if the task takes too long to respond, or if Cortana's UI is dismissed. // Any pending operations should be cancelled or waited on to clean up where possible. taskInstance.Canceled += OnTaskCanceled; var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; // Load localized resources for strings sent to Cortana to be displayed to the user. cortanaResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources"); // Select the system language, which is what Cortana should be running as. cortanaContext = ResourceContext.GetForViewIndependentUse(); // Get the currently used system date format dateFormatInfo = CultureInfo.CurrentCulture.DateTimeFormat; // This should match the uap:AppService and VoiceCommandService references from the // package manifest and VCD files, respectively. Make sure we've been launched by // a Cortana Voice Command. if (triggerDetails != null && triggerDetails.Name == "GeneralQueryVoiceCommandService") { try { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails( triggerDetails); voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted; // GetVoiceCommandAsync establishes initial connection to Cortana, and must be called prior to any // messages sent to Cortana. Attempting to use ReportSuccessAsync, ReportProgressAsync, etc // prior to calling this will produce undefined behavior. VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); var interpretation = voiceCommand.SpeechRecognitionResult.SemanticInterpretation; string clientId = cortanaResourceMap.GetValue("ClientId", cortanaContext).ValueAsString; string userName = cortanaResourceMap.GetValue("Domain", cortanaContext).ValueAsString; string rootSiteUrl = cortanaResourceMap.GetValue("rootSite", cortanaContext).ValueAsString; StringBuilder searchAPIUrl = new StringBuilder(); switch (voiceCommand.CommandName) { case "SharePointWhatsCheckedOutQueryCommand": searchAPIUrl = searchAPIUrl.Append("/_api/search/query?querytext='CheckoutUserOWSUSER:"******"'"); await SearchCheckedOutDocumentsAsync(rootSiteUrl, searchAPIUrl.ToString()); break; case "SPSearchContentCommand": var searchSiteName = voiceCommand.Properties["searchsite"][0]; var searchText = voiceCommand.Properties["dictatedSearchText"][0]; searchAPIUrl = searchAPIUrl.Append("/_api/search/query?querytext='" + searchText + "'"); await SearchSharePointDocumentsAsync(rootSiteUrl, searchAPIUrl.ToString()); break; default: // As with app activation VCDs, we need to handle the possibility that // an app update may remove a voice command that is still registered. // This can happen if the user hasn't run an app since an update. LaunchAppInForeground(); break; } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Handling Voice Command failed " + ex.ToString()); } } }
public string GetString(string name, System.Globalization.CultureInfo culture) { var value = stringResourceMap.GetValue(name, resourceContext).ValueAsString; return(ValidateNotEmpty(value)); }
public string GetUnlocalized(string category, string id) => NullIfEmpty(resourceMap.GetValue($"/{category}/{id}", unlocalizedContext).ValueAsString);
/// <summary> /// Restituisce il valore stringa della risorsa passata come argomento. /// </summary> /// <param name="stringName">Nome della risorsa</param> /// <returns>Valore della risorsa come stringa</returns> public string GetString(string stringName) { var resourceValue = ResourceMap.GetValue(stringName, ResourceContext); return(resourceValue.ValueAsString); }