public override VoiceCommandResponse Execute(IWorkbook workbook, IPersistenceLayer persistenceLayer, Windows.ApplicationModel.VoiceCommands.VoiceCommand voiceCommand) { string content = this.ExtractPropertyFromVoiceCommand(voiceCommand, "speech"); content = content.FirstLetterToUpper(); content = content.Trim(); content = content.Trim('.'); var task = new Task { Added = DateTime.Now, Modified = DateTime.Now, Title = content, Folder = workbook.Folders[0] }; task.Priority = workbook.Settings.GetValue <TaskPriority>(CoreSettings.DefaultPriority); task.Due = ModelHelper.GetDefaultDueDate(workbook.Settings); task.Start = ModelHelper.GetDefaultStartDate(workbook.Settings); task.Context = ModelHelper.GetDefaultContext(workbook); persistenceLayer.Save(); this.UpdateLiveTiles(workbook); var userMessage = new VoiceCommandUserMessage { DisplayMessage = string.Format(StringResources.Notification_NewTaskCreatedNoDueDateFormat, content), SpokenMessage = StringResources.Notification_NewTaskCreatedNoDueDateFormat.Replace("\"{0}\"", string.Empty) }; this.SignalForegroundAppWorkbookChanged(); return(VoiceCommandResponse.CreateResponse(userMessage)); }
private async void SendCompletionMessageForDestination(double proablity, DSAVoiceCommand voiceCommand) { // Take action and determine when the next trip to destination // Insert code here. // Replace the hardcoded strings used here with strings // appropriate for your application. // First, create the VoiceCommandUserMessage with the strings // that Cortana will show and speak. var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = $"Die Chance auf erfolg ist: {proablity:P2}"; userMessage.SpokenMessage = $"Die Chance auf erfolg ist: {proablity:P2}"; // Create the VoiceCommandResponse from the userMessage and list // of content tiles. var response = VoiceCommandResponse.CreateResponse(userMessage); // Cortana will present a “Go to app_name” link that the user // can tap to launch the app. // Pass in a launch to enable the app to deep link to a page // relevant to the voice command. response.AppLaunchArgument = $"{voiceCommand.Eigentschaft1};{voiceCommand.Eigentschaft2};{voiceCommand.Eigentschaft3}"; // Ask Cortana to display the user message and content tile and // also speak the user message. await voiceServiceConnection.ReportSuccessAsync(response); }
async void taskcounter() { var x = TodoService.GetMustDoList().Count; VoiceCommandResponse response = null; if (x == 0) { var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "No tasks on Denna"; userMessage.SpokenMessage = "You have no tasks ! Add one"; response = VoiceCommandResponse.CreateResponse(userMessage); } else { var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "You have " + x + " tasks" + " on Denna"; userMessage.SpokenMessage = "You have " + x + " tasks"; response = VoiceCommandResponse.CreateResponse(userMessage); } // Create the VoiceCommandResponse from the userMessage and list // of content tiles. // Cortana will present a “Go to app_name” link that the user // can tap to launch the app. // Pass in a launch to enable the app to deep link to a page // relevant to the voice command. response.AppLaunchArgument = "agsonCortana"; // Ask Cortana to display the user message and content tile and // also speak the user message. await voiceServiceConnection.ReportSuccessAsync(response); }
private async Task HandleLightsOnOrOff(string lightState) { await ShowProgressScreen("Hold on"); VoiceCommandUserMessage userMessage = new VoiceCommandUserMessage(); string defaultMessage = $"Turning your lights {lightState}"; JObject on = new JObject(); try { SetOnOrOff(lightState, on); httpClient.PutAsync($"{baseUrl}/groups/0/action", new StringContent(on.ToString())); userMessage.DisplayMessage = defaultMessage; userMessage.SpokenMessage = defaultMessage; } catch (Exception ex) { SetError(userMessage); VoiceCommandResponse errResponse = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportFailureAsync(errResponse); } VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); }
public async void Run(IBackgroundTaskInstance taskInstance) { // Create the deferral by requesting it from the task instance serviceDeferral = taskInstance.GetDeferral(); AppServiceTriggerDetails triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; if (triggerDetails != null && triggerDetails.Name.Equals("VoiceCommandService")) { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails); VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); // Perform the appropriate command depending on the operation defined in VCD switch (voiceCommand.CommandName) { case "CheckTemperature": VoiceCommandUserMessage userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "The current temperature is 23 degrees"; userMessage.SpokenMessage = "The current temperature is 23 degrees"; VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(userMessage, null); await voiceServiceConnection.ReportSuccessAsync(response); break; default: break; } } // Once the asynchronous method(s) are done, close the deferral serviceDeferral.Complete(); }
/// <summary> /// Search for, and show details related to a single trip, if the trip can be /// found. This demonstrates a simple response flow in Cortana. /// </summary> /// <param name="destination">The destination, expected to be in the phrase list.</param> /// <returns></returns> private async Task SendCompletionMessageForDestination(string destination) { // If this operation is expected to take longer than 0.5 seconds, the task must // provide a progress response to Cortana prior to starting the operation, and // provide updates at most every 5 seconds. string loadingTripToDestination = string.Format( cortanaResourceMap.GetValue("Loading", cortanaContext).ValueAsString, destination); await ShowProgressScreen(loadingTripToDestination); //state // Look for the specified trip. The destination *should* be pulled from the grammar we // provided, and the subsequently updated phrase list, so it should be a 1:1 match, including case. // However, we might have multiple trips to the destination. For now, we just pick the first. var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "OK"; userMessage.SpokenMessage = "OK"; var response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); }
/// <summary> /// Searches the cabs nearby. /// </summary> /// <returns>System.Threading.Tasks.Task.</returns> private async Task SearchCabsNearby() { var geolocator = new Geolocator(); var pos = await geolocator.GetGeopositionAsync().AsTask(); var locationPoint = new LocationPoint { Latitude = pos.Coordinate.Point.Position.Latitude, Longitude = pos.Coordinate.Point.Position.Longitude }; var searchResult = this.searchClient.SearchDocuments <GpsSensorRecord>( "*", SearchDocument.FilterTextForDistanceFromPoint("geoCoordinates", locationPoint, 100)); if (!searchResult.Any()) { await this.SendErrorMessageAsync("No cabs available"); return; } var tilelist = searchResult.Select(result => new VoiceCommandContentTile { ContentTileType = VoiceCommandContentTileType.TitleOnly, Title = result.VehicleId }).ToList(); var successmessage = new VoiceCommandUserMessage(); successmessage.DisplayMessage = successmessage.SpokenMessage = "Found the following cabs near you..."; var response = VoiceCommandResponse.CreateResponse(successmessage, tilelist); await this.voiceCommandServiceConnection.ReportSuccessAsync(response); }
private async Task FindSessionsByTag(string tags) { try { var list = _agendaService.FindSessionsByKeyword(tags); var results = list.Where(f => f.Value > 0).OrderByDescending(f => f.Value).Select(l => l.Key).Take(10).ToList(); var userMessage = new VoiceCommandUserMessage(); if (results.Any()) { userMessage.DisplayMessage = "Showing top " + results.Count() + " sessions related to " + tags; userMessage.SpokenMessage = "Showing top " + results.Count() + " sessions related to " + tags; } else { userMessage.DisplayMessage = "There are no results for " + tags; userMessage.SpokenMessage = "There are no results for " + tags; } await ShowResults(results, userMessage); } catch (Exception exception) { Debug.WriteLine(exception.Message); } }
private async Task <int> CheckInput(string SelectedTask, string Message, string SecondMessage) { var UserMessage = new VoiceCommandUserMessage(); var UserReMessage = new VoiceCommandUserMessage(); UserMessage.DisplayMessage = UserMessage.SpokenMessage = Message; UserReMessage.DisplayMessage = UserReMessage.SpokenMessage = SecondMessage; var Response = VoiceCommandResponse.CreateResponseForPrompt(UserMessage, UserReMessage); var VoiceCommandConfirmation = await VoiceServiceConnection.RequestConfirmationAsync(Response); // If RequestConfirmationAsync returns null, Cortana's UI has likely been dismissed. if (VoiceCommandConfirmation != null) { if (VoiceCommandConfirmation.Confirmed == true) { return(1); } else { return(0); } } return(-1); }
private async Task SendAnswer() { var destContentTiles = new List<VoiceCommandContentTile>(); var destTile = new VoiceCommandContentTile() { ContentTileType = VoiceCommandContentTileType.TitleWithText, Title = "Leaderboard", TextLine1 = "1. Vladimir - 9337\n2. Petri - 8000" }; destContentTiles.Add(destTile); var userMessagePlay = new VoiceCommandUserMessage(); userMessagePlay.DisplayMessage = "Do you want to play?"; userMessagePlay.SpokenMessage = "Yes, you are 1337 points behind Vladimir. Do you want to play?"; var userMessagePlay2 = new VoiceCommandUserMessage(); userMessagePlay2.DisplayMessage = "You are far behind. Do you want to play the game?"; userMessagePlay2.SpokenMessage = "You are far behind. Do you want to play the game now?"; var resp2 = VoiceCommandResponse.CreateResponseForPrompt(userMessagePlay, userMessagePlay2, destContentTiles); var confResp2 = await voiceServiceConnection.RequestConfirmationAsync(resp2); if(confResp2.Confirmed) { var umP = new VoiceCommandUserMessage(); umP.DisplayMessage = "Do you want to play?"; umP.SpokenMessage = "You are 1337 points behind Vladimir. Do you want to play?"; var resp3 = VoiceCommandResponse.CreateResponse(umP); await voiceServiceConnection.RequestAppLaunchAsync(resp3); } }
public async void Run(IBackgroundTaskInstance taskInstance) { serviceDeferral = taskInstance.GetDeferral(); taskInstance.Canceled += OnTaskCanceled; var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; cortanaResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources"); cortanaContext = ResourceContext.GetForViewIndependentUse(); dateFormatInfo = CultureInfo.CurrentCulture.DateTimeFormat; if (triggerDetails != null && triggerDetails.Name == "JeedomAppVoiceCommandService") { try { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails( triggerDetails); voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted; VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); var userMessage = new VoiceCommandUserMessage(); string message = ""; // Ajout d'une requet jeedom pour retrouver la commande switch (voiceCommand.CommandName) { case "JeedomInteractList": string CortanaVoiceCommande = voiceCommand.Properties["InteractList"][0]; await Jeedom.RequestViewModel.Instance.interactTryToReply(CortanaVoiceCommande); message = Jeedom.RequestViewModel.Instance.InteractReply; break; default: LaunchAppInForeground(); break; } userMessage.DisplayMessage = message; userMessage.SpokenMessage = message; var response = VoiceCommandResponse.CreateResponse(userMessage); response.AppLaunchArgument = message; await voiceServiceConnection.ReportSuccessAsync(response); } catch (Exception ex) { //System.Diagnostics.Debug.WriteLine("Handling Voice Command failed " + ex.ToString()); } } }
private async Task <VoiceCommandDisambiguationResult> ReportForContributionTypeAsync() { var types = this.typeContainer.GetCommonTypes().ToList(); if (types.Any()) { var userPrompt = new VoiceCommandUserMessage(); userPrompt.DisplayMessage = userPrompt.SpokenMessage = "What type of contribution is it?"; var repeatPrompt = new VoiceCommandUserMessage(); repeatPrompt.DisplayMessage = repeatPrompt.SpokenMessage = "Sorry, what type of contribution is it?"; var typeTiles = types.Select(type => new VoiceCommandContentTile { ContentTileType = VoiceCommandContentTileType.TitleOnly, AppContext = type, Title = type.Name }).ToList(); var response = VoiceCommandResponse.CreateResponseForPrompt(userPrompt, repeatPrompt, typeTiles); return(await voiceServiceConnection.RequestDisambiguationAsync(response)); } return(null); }
private async Task <VoiceCommandDisambiguationResult> ReportForContributionAreaAsync() { var allAreas = this.areaContainer.GetMyAreaTechnologies().ToList(); var areas = allAreas.Count > 10 ? allAreas.Take(10).ToList() : allAreas; if (areas.Any()) { var userPrompt = new VoiceCommandUserMessage(); userPrompt.DisplayMessage = userPrompt.SpokenMessage = "Which area is it?"; var repeatPrompt = new VoiceCommandUserMessage(); repeatPrompt.DisplayMessage = repeatPrompt.SpokenMessage = "Sorry, which area is it?"; var areaTiles = areas.Select(area => new VoiceCommandContentTile { ContentTileType = VoiceCommandContentTileType.TitleOnly, AppContext = area, Title = area.Name }).ToList(); var response = VoiceCommandResponse.CreateResponseForPrompt(userPrompt, repeatPrompt, areaTiles); return(await voiceServiceConnection.RequestDisambiguationAsync(response)); } return(null); }
public async void Run(IBackgroundTaskInstance taskInstance) { // Create the deferral by requesting it from the task instance serviceDeferral = taskInstance.GetDeferral(); AppServiceTriggerDetails triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; if (triggerDetails != null && triggerDetails.Name.Equals("IMCommandVoice")) { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails); VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); // Perform the appropriate command depending on the operation defined in VCD switch (voiceCommand.CommandName) { case "oldback": VoiceCommandUserMessage userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "The current temperature is 23 degrees"; userMessage.SpokenMessage = "The current temperature is 23 degrees"; VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(userMessage, null); await voiceServiceConnection.ReportSuccessAsync(response); break; default: break; } } // Once the asynchronous method(s) are done, close the deferral serviceDeferral.Complete(); }
private static async Task HandleReadEventsCommandAsync(VoiceCommandServiceConnection connection) { try { ReadRepository ReadRepository = new ReadRepository(); //Genero un mensaje de espera para que el usuario vea var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "Buscando eventos próximos .."; userMessage.SpokenMessage = "Buscando eventos próximos "; var response = VoiceCommandResponse.CreateResponse(userMessage); await connection.ReportProgressAsync(response); var today = DateTime.Now.Date; var notices = await ReadRepository.GetNextEvents(); if (notices.Count > 1) { userMessage.SpokenMessage = userMessage.DisplayMessage = $"El dia de hoy se realizan {notices.Count} eventos"; var tile = new VoiceCommandContentTile(); tile.ContentTileType = VoiceCommandContentTileType.TitleOnly; var titleList = new List <VoiceCommandContentTile>(); var count = 0; foreach (var noticeModel in notices) { if (count <= 5) { titleList.Add(new VoiceCommandContentTile { Title = noticeModel.Title.ToString(), ContentTileType = VoiceCommandContentTileType.TitleWithText, TextLine1 = noticeModel.Date.ToString() }); ++count; } } response = VoiceCommandResponse.CreateResponse(userMessage, titleList); await connection.ReportProgressAsync(response); } else { if (notices != null) { userMessage.SpokenMessage = userMessage.DisplayMessage = $"Usted tiene {notices.First().Title} eventos próximos"; response = VoiceCommandResponse.CreateResponse(userMessage); } } await connection.ReportSuccessAsync(response); } catch (Exception ex) { throw; } }
private async Task ShowNearestResults(List <Sight> nearest) { var userMessage = new VoiceCommandUserMessage { DisplayMessage = "Here are your closest Sights:", SpokenMessage = "Here are your closest sights" }; var sightsContentTiles = new List <VoiceCommandContentTile>(); foreach (var sight in nearest) { var sightTile = new VoiceCommandContentTile(); sightTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; if (sight.ImagePath.StartsWith("ms-appx")) { sightTile.Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri(sight.ImagePath)); } else { sightTile.Image = await StorageFile.GetFileFromPathAsync(sight.ImagePath); } sightTile.Title = sight.Name; sightTile.TextLine1 = sight.Description; sightTile.AppContext = sight.Id; sightTile.AppLaunchArgument = sight.Id.ToString("D"); sightsContentTiles.Add(sightTile); } var response = VoiceCommandResponse.CreateResponse(userMessage, sightsContentTiles); await _voiceServiceConnection.ReportSuccessAsync(response); }
private async Task ShowResults(List <Session> results, VoiceCommandUserMessage userMessage) { var destinationsContentTiles = new List <VoiceCommandContentTile>(); foreach (var kvp in results) { var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; destinationTile.AppLaunchArgument = kvp.Title.GetValidString(); destinationTile.TextLine1 = kvp.Title.GetValidString(); destinationTile.TextLine2 = kvp.Speakers[0].Name.GetValidString(); destinationTile.TextLine3 = kvp.Location.Room.GetValidString(); IRandomAccessStreamReference thumbnail = RandomAccessStreamReference.CreateFromUri(new Uri(kvp.Speakers[0].Photo)); destinationTile.Image = await StorageFile.CreateStreamedFileFromUriAsync(kvp.Title, new Uri(kvp.Speakers[0].Photo), thumbnail); destinationTile.AppLaunchArgument = kvp.Title; destinationsContentTiles.Add(destinationTile); } var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); response.AppLaunchArgument = "session"; await voiceServiceConnection.ReportSuccessAsync(response); }
private async Task ReportResultAsync(VoiceReportResult result, string message) { var progress = new VoiceCommandUserMessage(); progress.DisplayMessage = progress.SpokenMessage = message; var response = VoiceCommandResponse.CreateResponse(progress); switch (result) { case VoiceReportResult.Progress: await this.voiceServiceConnection.ReportProgressAsync(response); break; case VoiceReportResult.Fail: await this.voiceServiceConnection.ReportFailureAsync(response); break; case VoiceReportResult.Success: await this.voiceServiceConnection.ReportSuccessAsync(response); break; } }
private async Task FindSessionsByTag(string tags) { try { var list = _agendaService.FindSessionsByKeyword(tags); var results = list.Where(f => f.Value > 0).OrderByDescending(f => f.Value).Select(l => l.Key).Take(10).ToList(); var userMessage = new VoiceCommandUserMessage(); if (results.Any()) { userMessage.DisplayMessage = "Showing top " + results.Count() + " sessions related to " + tags; userMessage.SpokenMessage = "Showing top " + results.Count() + " sessions related to " + tags; } else { userMessage.DisplayMessage = "There are no results for " + tags; userMessage.SpokenMessage = "There are no results for " + tags; } await ShowResults(results, userMessage); } catch (Exception exception) { Debug.WriteLine(exception.Message); } }
/// <summary> /// Searches the cabs in area. /// </summary> /// <param name="area">The area.</param> /// <returns>System.Threading.Tasks.Task.</returns> private async Task SearchCabsInArea(string area) { var locationData = new LocationData(BingApiKey).GetBoundingBoxCoordinates($"{area},India").Result; var searchResult = this.searchClient.SearchDocuments <GpsSensorRecord>( "*", SearchDocument.FilterTextForLocationBounds("geoCoordinates", locationData)); if (!searchResult.Any()) { await this.SendErrorMessageAsync("No cabs available"); return; } var tilelist = searchResult.Select( result => new VoiceCommandContentTile { ContentTileType = VoiceCommandContentTileType.TitleOnly, Title = result.VehicleId }).ToList(); var successmessage = new VoiceCommandUserMessage(); successmessage.DisplayMessage = successmessage.SpokenMessage = $"Found the following cabs in {area}..."; var response = VoiceCommandResponse.CreateResponse(successmessage, tilelist); await this.voiceCommandServiceConnection.ReportSuccessAsync(response); }
private async Task SendCompletionMessageForFixedAmount(string amount, string subject) { var userMessage = new VoiceCommandUserMessage(); int amountnumber; if (int.TryParse(amount, out amountnumber)) { userMessage.DisplayMessage = String.Format("Das habe ich gespeichert.", amount, subject); userMessage.SpokenMessage = String.Format("Ich habe {0} {1} gespeichert.", amount, subject); var contentTiles = new List <VoiceCommandContentTile>(); contentTiles.Add(new VoiceCommandContentTile() { ContentTileType = VoiceCommandContentTileType.TitleOnly, Title = String.Format("{0} {1} gespeichert.", amount, subject) }); var response = VoiceCommandResponse.CreateResponse(userMessage, contentTiles); await voiceServiceConnection.ReportSuccessAsync(response); } else { } await Task.Delay(2000); }
private async Task <bool> SendCompletionMessageForSetTemperature(string room, string temperature) { VoiceCommandUserMessage userMessage; VoiceCommandResponse response; if (Login()) { try { await _Client.GetAsync($"http://10.0.0.10:8083/ZWaveAPI/Run/devices[20].instances[0].commandClasses[0x43].Set(1,{temperature})"); userMessage = new VoiceCommandUserMessage(); string setTemperatureDone = $"Ich habe das Thermostat im {room} auf {temperature} °C gestellt"; userMessage.DisplayMessage = userMessage.SpokenMessage = setTemperatureDone; response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); return(true); } catch (Exception) { } } userMessage = new VoiceCommandUserMessage(); string setTemperatureMissed = $"Ich konnte das Thermostat im {room} nicht stellen"; userMessage.DisplayMessage = userMessage.SpokenMessage = setTemperatureMissed; response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); return(false); }
private async Task <bool> SendCompletionMessageForSetDimmer(string room, string value) { VoiceCommandUserMessage userMessage; VoiceCommandResponse response; if (Login()) { try { value = string.Compare("ein", value, true) == 0 ? "99" : value; value = string.Compare("aus", value, true) == 0 ? "0" : value; value = Convert.ToInt16(value) >= 100 ? "99" : value; value = Convert.ToInt16(value) < 0 ? "0" : value; await _Client.GetAsync($"http://10.0.0.10:8083/ZWaveAPI/Run/devices[14].instances[0].commandClasses['SwitchMultilevel'].Set({value})"); userMessage = new VoiceCommandUserMessage(); string setDimmerDone = $"Ich habe das Hintergrundlicht im {room} auf {value} % gedimmt"; userMessage.DisplayMessage = userMessage.SpokenMessage = setDimmerDone; response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); return(true); } catch (Exception) { } } userMessage = new VoiceCommandUserMessage(); string setTemperatureMissed = $"Ich konnte das Hintergrundlicht im {room} nicht dimmen"; userMessage.DisplayMessage = userMessage.SpokenMessage = setTemperatureMissed; response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); return(false); }
private async Task ShowLatestNews() { string progress = "Getting the latest news..."; await ShowProgressScreen(progress); RssService feedService = new RssService(); var news = await feedService.GetNews("http://blog.qmatteoq.com/feed"); List <VoiceCommandContentTile> contentTiles = new List <VoiceCommandContentTile>(); VoiceCommandUserMessage message = new VoiceCommandUserMessage(); string text = "Here are the latest news"; message.DisplayMessage = text; message.SpokenMessage = text; foreach (FeedItem item in news.Take(5)) { VoiceCommandContentTile tile = new VoiceCommandContentTile(); tile.ContentTileType = VoiceCommandContentTileType.TitleOnly; tile.Title = item.Title; tile.TextLine1 = item.PublishDate.ToString("g"); contentTiles.Add(tile); } VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(message, contentTiles); await _voiceServiceConnection.ReportSuccessAsync(response); }
private async Task QueryBaikeByKeyword(string keyword) { var userProgressMessage = new VoiceCommandUserMessage(); userProgressMessage.DisplayMessage = userProgressMessage.SpokenMessage = $"正在查询{keyword}"; VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(userProgressMessage); await voiceServiceConnection.ReportProgressAsync(response); var userMessage = new VoiceCommandUserMessage(); var data = await QueryBaike.BaiduBaike.QueryByKeyword(keyword); userMessage.DisplayMessage = userMessage.SpokenMessage = data.Summary; VoiceCommandResponse queryResponse = null; if (data.Image != null) { queryResponse = VoiceCommandResponse.CreateResponse(userMessage, new List <VoiceCommandContentTile> { new VoiceCommandContentTile() { Image = data.Image, ContentTileType = data.TileType } }); } else { queryResponse = VoiceCommandResponse.CreateResponse(userMessage); } queryResponse.AppLaunchArgument = keyword; await voiceServiceConnection.ReportSuccessAsync(queryResponse); }
private async Task DisambiguateAsync(string promptMessage, string repromptMessage) { var prompt = new VoiceCommandUserMessage(); prompt.DisplayMessage = prompt.SpokenMessage = promptMessage; var reprompt = new VoiceCommandUserMessage(); reprompt.DisplayMessage = reprompt.SpokenMessage = repromptMessage; var contentTiles = new List <VoiceCommandContentTile>(); for (var i = 1; i < 7; i++) { var tile = new VoiceCommandContentTile(); tile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; tile.Image = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(new Uri($"ms-appx:///ControlPanel.BackgroundServices/Assets/68_Dice_{i}.png")); tile.AppContext = i; tile.AppLaunchArgument = $"type={i}"; tile.Title = $"The dice result is {i}"; contentTiles.Add(tile); } response = VoiceCommandResponse.CreateResponseForPrompt(prompt, reprompt, contentTiles); try { var result = await voiceServiceConn.RequestDisambiguationAsync(response); if (result != null) { System.Diagnostics.Debug.WriteLine(result); } } catch (Exception ex) { } }
private async void SendCompletionMessageForDestination(string destination) { var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "Here’s your cab details."; userMessage.SpokenMessage = "Ola cab /Uber Cab."; var destinationsContentTiles = new List <VoiceCommandContentTile>(); var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; destinationTile.Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///ContosoCabs.VoiceCommands/Images/cablogo.png")); destinationTile.AppLaunchArgument = destination; destinationTile.Title = "Hyderabad"; destinationTile.TextLine1 = "you have been amazing"; destinationsContentTiles.Add(destinationTile); // Create the VoiceCommandResponse from the userMessage and list // of content tiles. var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); response.AppLaunchArgument = string.Format("destination={0}”, “Hyderabad"); await voiceServiceConnection.ReportSuccessAsync(response); }
/// <summary> /// Show a progress screen. These should be posted at least every 5 seconds for a /// long-running operation, such as accessing network resources over a mobile /// carrier network. /// </summary> /// <param name="message">The message to display, relating to the task being performed.</param> /// <returns></returns> protected async Task ShowProgressScreen(string message) { var userProgressMessage = new VoiceCommandUserMessage(); userProgressMessage.DisplayMessage = userProgressMessage.SpokenMessage = message; VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(userProgressMessage); await voiceServiceConnection.ReportProgressAsync(response); }
private async Task ShowLatestNews() { string progress = "Getting the latest news..."; await ShowProgressScreen(progress); RssService feedService = new RssService(); var news = await feedService.GetNews("http://blog.qmatteoq.com/feed"); List<VoiceCommandContentTile> contentTiles = new List<VoiceCommandContentTile>(); VoiceCommandUserMessage message = new VoiceCommandUserMessage(); string text = "Here are the latest news"; message.DisplayMessage = text; message.SpokenMessage = text; foreach (FeedItem item in news.Take(5)) { VoiceCommandContentTile tile = new VoiceCommandContentTile(); tile.ContentTileType = VoiceCommandContentTileType.TitleOnly; tile.Title = item.Title; tile.TextLine1 = item.PublishDate.ToString("g"); contentTiles.Add(tile); } VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(message, contentTiles); await _voiceServiceConnection.ReportSuccessAsync(response); }
private async Task SendCompletionMessageForParkingPlace() { await ShowProgressScreen("I'm getting free parking places for Outlook"); var places = await LocationHelper.GetParkingPlaces(); var message = $"There are {places.Current} free places and this is {places.TrendString}."; if (places.Trend == ParkTrend.FillingFast || places.Trend == ParkTrend.FillingSlow) { if (places.RemainingMinutes < 60) { var fillTime = DateTime.Now.AddMinutes(places.RemainingMinutes); message += $" Parking place will be full at {fillTime.Hour}:{fillTime.Minute}."; } else { message += $" It will take at least one hour before parking is full."; } } var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = message; userMessage.SpokenMessage = message; var response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); }
private static async Task AsyncReportSuccess(VoiceCommandServiceConnection connection, string spokenMessage, string displayMessage, IEnumerable <VoiceCommandContentTile> contentTiles) { var responseMsg = new VoiceCommandUserMessage { SpokenMessage = spokenMessage, DisplayMessage = displayMessage }; var response = VoiceCommandResponse.CreateResponse(responseMsg, contentTiles); await connection.ReportSuccessAsync(response); }
private async Task SendResponse(string responseMessage) { var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = userMessage.SpokenMessage = responseMessage; var response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); }
private static async Task AsyncReportProgress(VoiceCommandServiceConnection connection, string spokenMessage, string displayMessage) { var responseMsg = new VoiceCommandUserMessage { SpokenMessage = spokenMessage, DisplayMessage = displayMessage }; var response = VoiceCommandResponse.CreateResponse(responseMsg); await connection.ReportProgressAsync(response); }
private async void LaunchAppInForeground() { var userMessage = new VoiceCommandUserMessage(); userMessage.SpokenMessage = "开启 App 中...请稍后"; var response = VoiceCommandResponse.CreateResponse(userMessage); response.AppLaunchArgument = ""; await vcConnection.RequestAppLaunchAsync(response); }
public async void ReportSuccess(VoiceCommandServiceConnection voiceCommandServiceConnection) { VoiceCommandUserMessage userMessage = new VoiceCommandUserMessage(); userMessage.SpokenMessage = userMessage.DisplayMessage = "已成功"; VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(userMessage); await voiceCommandServiceConnection.ReportSuccessAsync(response); }
private async Task ShowProgressScreen(string message) { var userProgressMessage = new VoiceCommandUserMessage(); userProgressMessage.DisplayMessage = userProgressMessage.SpokenMessage = message; VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(userProgressMessage); await vcConnection.ReportProgressAsync(response); }
public async void Run(IBackgroundTaskInstance taskInstance) { serviceDeferral = taskInstance.GetDeferral(); taskInstance.Canceled += OnTaskCanceled; var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; cortanaResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources"); cortanaContext = ResourceContext.GetForViewIndependentUse(); dateFormatInfo = CultureInfo.CurrentCulture.DateTimeFormat; if (triggerDetails != null && triggerDetails.Name == "DomojeeVoiceCommandService") { try { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails( triggerDetails); voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted; VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); var userMessage = new VoiceCommandUserMessage(); string message = ""; // Ajout d'une requet jeedom pour retrouver la commande switch (voiceCommand.CommandName) { case "JeedomInteractList": string CortanaVoiceCommande= voiceCommand.Properties["InteractList"][0]; await Jeedom.RequestViewModel.Instance.interactTryToReply(CortanaVoiceCommande); message = Jeedom.RequestViewModel.Instance.InteractReply; break; default: LaunchAppInForeground(); break; } userMessage.DisplayMessage = message; userMessage.SpokenMessage = message; var response = VoiceCommandResponse.CreateResponse(userMessage); response.AppLaunchArgument = message; await voiceServiceConnection.ReportSuccessAsync(response); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Handling Voice Command failed " + ex.ToString()); } } }
/// <summary> /// Provide a simple response that launches the app. Expected to be used in the /// case where the voice command could not be recognized (eg, a VCD/code mismatch.) /// </summary> protected async Task LaunchAppInForeground(VoiceCommandUserMessage userMessage = null, string args = null) { if (userMessage == null) { userMessage = new VoiceCommandUserMessage(); userMessage.SpokenMessage = "OPEN_APP_SpokenMessage".t(context, R.File.CORTANA); userMessage.DisplayMessage = "OPEN_APP_DisplayMessage".t(context, R.File.CORTANA); } var response = VoiceCommandResponse.CreateResponse(userMessage); response.AppLaunchArgument = args ?? ""; await voiceServiceConnection.RequestAppLaunchAsync(response); }
private async void launchAppInForeground() { var userMessage = new VoiceCommandUserMessage(); userMessage.SpokenMessage = "Launching superGame"; var response = VoiceCommandResponse.CreateResponse(userMessage); response.AppLaunchArgument = ""; await voiceServiceConection.RequestAppLaunchAsync(response); }
private async Task SendProgressMessageAsync(string message) { var progressmessage = new VoiceCommandUserMessage(); progressmessage.DisplayMessage = progressmessage.SpokenMessage = message; // Show progress message // Affiche le message de progression var response = VoiceCommandResponse.CreateResponse(progressmessage); await _voiceCommandServiceConnection.ReportProgressAsync(response); }
private async void sendCompletionMessageForHighScorer() { // longer than 0.5 seconds, then progress report has to be sent string progressMessage = "Finding the highest scorer"; await ShowProgressScreen(progressMessage); var userMsg = new VoiceCommandUserMessage(); userMsg.DisplayMessage = userMsg.SpokenMessage = "The person with the highest score is Damien"; VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(userMsg); await voiceServiceConection.ReportSuccessAsync(response); }
private async Task SendCompletionMessageForOnOff(RelayNodeClient client, string target, bool turnItOn) { var userMessage = new VoiceCommandUserMessage(); if (!client.IsReady) { string noController = string.Format( cortanaResourceMap.GetValue("noControllerFound", cortanaContext).ValueAsString, target); userMessage.DisplayMessage = noController; userMessage.SpokenMessage = noController; } else { int relayId = 0; switch (target) { default: case "light": case "lamp": relayId = 0; break; case "fan": relayId = 1; break; } if (turnItOn) { client.SetRelay(relayId, true); string turnedOn = string.Format( cortanaResourceMap.GetValue("turnedOnMessage", cortanaContext).ValueAsString, target); userMessage.DisplayMessage = turnedOn; userMessage.SpokenMessage = turnedOn; } else { client.SetRelay(relayId, false); string turnedOn = string.Format( cortanaResourceMap.GetValue("turnedOffMessage", cortanaContext).ValueAsString, target); userMessage.DisplayMessage = turnedOn; userMessage.SpokenMessage = turnedOn; } } //var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); var response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); }
public async void Run(IBackgroundTaskInstance taskInstance) { serviceDeferral = taskInstance.GetDeferral(); taskInstance.Canceled += OnTaskCanceled; var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; if (triggerDetails != null && triggerDetails.Name == "HolVoiceCommandService") { try { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails( triggerDetails); voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted; VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); switch (voiceCommand.CommandName) { case "SayHello": var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "お店で合言葉話してね。"; userMessage.SpokenMessage = "ごきげんよう。"; var response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); break; default: break; } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Handling Voice Command failed " + ex.ToString()); } } }
private async void ShowEndMyPresentation() { var userMessage = new VoiceCommandUserMessage(); //string message = "Okay Oliver, ich starte jetzt deinen Vortrag und wünsch Dir viel Erfolg."; string message = "Oliver! Mein Name ist nicht Siri. Und du solltest mich lieber nicht noch mal so ansprechen, sonst bin ich echt sauer."; userMessage.SpokenMessage = message; userMessage.DisplayMessage = message; var response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); }
private async void DoSomething(string classname) { SubstitutionSchedules.LoadSubstitutionSchedulesFromWeb loader = new SubstitutionSchedules.LoadSubstitutionSchedulesFromWeb(); List<string> list = await loader.LoadSchoolClassWithSubstitutionNames(DateTime.Today); VoiceCommandUserMessage answer = new VoiceCommandUserMessage(); bool hasSubstitution = list.Contains<string>(classname); if(hasSubstitution) { answer.DisplayMessage = "Ja"; } else { answer.DisplayMessage = "Nein"; } }
public async void Run(IBackgroundTaskInstance taskInstance) { serviceDeferral = taskInstance.GetDeferral(); taskInstance.Canceled += OnTaskCanceled; var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; // Load localized resources for strings sent to Cortana to be displayed to the user. cortanaResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources"); // Select the system language, which is what Cortana should be running as. cortanaContext = ResourceContext.GetForViewIndependentUse(); if (triggerDetails != null && triggerDetails.Name == "HolVoiceCommandService") { try { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails( triggerDetails); voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted; VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); switch (voiceCommand.CommandName) { case "SayHello": var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "Hello!"; userMessage.SpokenMessage = "Your app says hi. It is having a great time."; var response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); break; default: break; } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Handling Voice Command failed " + ex.ToString()); } } }
protected override async void OnRun(IBackgroundTaskInstance taskInstance) { this.serviceDeferral = taskInstance.GetDeferral(); var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; // get the voiceCommandServiceConnection from the tigger details voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails); VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); VoiceCommandResponse response; // switch statement to handle different commands switch (voiceCommand.CommandName) { case "sendMessage": // get the message the user has spoken var message = voiceCommand.Properties["message"][0]; //var bot = new Bot(); // get response from bot string firstResponse = ""; //await bot.SendMessageAndGetResponseFromBot(message); // create response messages for Cortana to respond var responseMessage = new VoiceCommandUserMessage(); var responseMessage2 = new VoiceCommandUserMessage(); responseMessage.DisplayMessage = responseMessage.SpokenMessage = firstResponse; responseMessage2.DisplayMessage = responseMessage2.SpokenMessage = "did you not hear me?"; // create a response and ask Cortana to respond with success response = VoiceCommandResponse.CreateResponse(responseMessage); await voiceServiceConnection.ReportSuccessAsync(response); break; } if (this.serviceDeferral != null) { //Complete the service deferral this.serviceDeferral.Complete(); } }
protected override async void OnRun(IBackgroundTaskInstance taskInstance) { this.serviceDeferral = taskInstance.GetDeferral(); taskInstance.Canceled += OnTaskCanceled; var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; VoiceCommandUserMessage userMessage; VoiceCommandResponse response; try { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails); voiceServiceConnection.VoiceCommandCompleted += VoiceCommandCompleted; VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); switch (voiceCommand.CommandName) { case "getPatientData": userMessage = new VoiceCommandUserMessage(); userMessage.SpokenMessage = "Here is the Patient Data"; var responseMessage = new VoiceCommandUserMessage(); responseMessage.DisplayMessage = responseMessage.SpokenMessage = "Patient Name: John Spartan\nAge: 47\nBlood Type: O+\nPatient ID: 000S00117"; response = VoiceCommandResponse.CreateResponse(responseMessage); await voiceServiceConnection.ReportSuccessAsync(response); break; } } catch (Exception ex) { Debug.WriteLine(ex.Message); } finally { if (this.serviceDeferral != null) { //Complete the service deferral this.serviceDeferral.Complete(); } } }
private async void SendCompletionMessageForSearchContent(string content) { var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "Which one do you wanna see?"; userMessage.SpokenMessage = "Which one do you wanna see?"; var contentItemTiles = new List<VoiceCommandContentTile>(); List<Video> itemData = new List<Video>(); if (content.Equals("movies")) { // 电影为 Recommendations 的第 3、4 条 // itemData.Add(data[2]); itemData.Add(data[3]); } else { // 电视剧为 Recommendations 的第 1、2 条 // itemData.Add(data[0]); itemData.Add(data[1]); } if (data != null) { foreach (var item in itemData) { contentItemTiles.Add(new VoiceCommandContentTile() { ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText, AppLaunchArgument = item.Id.ToString(), Title = item.Title, TextLine1 = item.Desc }); } } var response = VoiceCommandResponse.CreateResponse(userMessage, contentItemTiles); response.AppLaunchArgument = ""; await voiceServiceConnection.ReportSuccessAsync(response); }
private async void GetSassy() { String[] firstPart = { "lazy", "stupid", "insecure", "idiotic", "slimy", "slutty", "smelly", "pompous", "communist", "dicknose", "pie-eating", "racist", "elitist", "trashy", "drug-loving", "butterface", "tone deaf", "ugly", "creepy" }; String[] secondPart = { "douche", "ass", "turd", "rectum", "butt", "c**k", "shit", "crotch", "bitch", "prick", "s**t", "taint", "f**k", "dick", "boner", "shart", "nut", "sphincter" }; String[] thirdPart = { "pilot", "canoe", "captain", "pirate", "hammer", "knob", "box", "jockey", "nazi", "waffle", "goblin", "blossum", "biscuit", "clown", "socket", "monster", "hound", "dragon", "balloon"}; Random rand = new Random(); String responseText = "Make your own damn coffee you " + firstPart[rand.Next(0, firstPart.Length - 1)] + " " + secondPart[rand.Next(0, secondPart.Length - 1)] + " " + thirdPart[rand.Next(0, thirdPart.Length - 1)] + "!"; var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = responseText; userMessage.SpokenMessage = responseText; var response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); }
private async void ShowResultsInCortana(string textSpoken) { var userFeedBackMessage = new VoiceCommandUserMessage(); userFeedBackMessage.DisplayMessage = "Showing results"; userFeedBackMessage.SpokenMessage = "Here are your results"; //QueryWikiPedia IKnowledgeBase ikb = new Wikipedia(); var teste = await ikb.GetImagesByTitle(textSpoken); if (teste != null) { var ContentTiles = new List<VoiceCommandContentTile>(); var destinationTile = new VoiceCommandContentTile(); // The user can tap on the visual content to launch the app. // Pass in a launch argument to enable the app to deep link to a // page relevant to the item displayed on the content tile. destinationTile.AppLaunchArgument = "Add Argument to APP"; destinationTile.Title = "Titulo do Tile"; destinationTile.TextLine1 = "Texto 01 do Tile"; destinationTile.TextLine2 = "Texto 02 do Tile"; destinationTile.TextLine3 = "Texto 03 do Tile"; destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWithText; ContentTiles.Add(destinationTile); var response = VoiceCommandResponse.CreateResponse(userFeedBackMessage, ContentTiles); await voiceServiceConnection.ReportSuccessAsync(response); } else { //TODO: Implement error } }
public async void Run(IBackgroundTaskInstance taskInstance) { serviceDeferral = taskInstance.GetDeferral(); taskInstance.Canceled += TaskInstance_Canceled; var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; if(triggerDetails != null && triggerDetails.Name == "DoctorCortanaEndpoint") { try { VoiceCommandServiceConnection vsvc = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails); vsvc.VoiceCommandCompleted += Vsvc_VoiceCommandCompleted; VoiceCommand vcmd = await vsvc.GetVoiceCommandAsync(); switch (vcmd.CommandName) { case "codebaseQuery": VoiceCommandUserMessage successmsg = new VoiceCommandUserMessage(); successmsg.DisplayMessage = "Malaika says it's made of mostly PHP, CSharp, and JavaScript code."; successmsg.SpokenMessage = "My friend Malaika says that Mr.Henson made ArcherSysOS out of PHP,JavaScript and C Sharp."; VoiceCommandResponse vcr = VoiceCommandResponse.CreateResponse(successmsg); await vsvc.ReportProgressAsync(vcr); break; } }catch(Exception e) { VoiceCommandServiceConnection vsvc = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails); vsvc.VoiceCommandCompleted += Vsvc_VoiceCommandCompleted; VoiceCommandUserMessage err = new VoiceCommandUserMessage(); err.DisplayMessage = e.Message; err.SpokenMessage = " I'm Sorry, but I can't talk to the ArcherSysOS team right now. try again later"; var errresp = VoiceCommandResponse.CreateResponse(err); await vsvc.RequestAppLaunchAsync(errresp); } } }
public async void Run(IBackgroundTaskInstance taskInstance) { serviceDeferral = taskInstance.GetDeferral(); taskInstance.Canceled += OnTaskCanceled; var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; if (triggerDetails != null && triggerDetails.Name == "CodecampSessionsVoiceCommandService") { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails( triggerDetails); voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted; VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); await _agendaService.GetSessionsAsync(); switch (voiceCommand.CommandName) { case "sayPresentationDescription": var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "You already forgot? You are going to talk about how I can help developers to create voice activated apps"; userMessage.SpokenMessage = "You already forgot? You are going to talk about how I can help developers to create voice activated apps. By the way...asshole, stop forcing me to help you with this stupid presentation. You're lucky I can't use curse words"; var response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); break; case "findSessionsWithCortana": var tags = voiceCommand.SpeechRecognitionResult.SemanticInterpretation.Properties["search"][0]; await FindSessionsByTag(tags); break; default: // As with app activation VCDs, we need to handle the possibility that // an app update may remove a voice command that is still registered. // This can happen if the user hasn't run an app since an update. LaunchAppInForeground(); break; } } }
public async void Run(IBackgroundTaskInstance taskInstance) { BackgroundTaskDeferral _defferal = taskInstance.GetDeferral(); taskInstance.Canceled += OnTaskCanceled; var triggerDetals = taskInstance.TriggerDetails as AppServiceTriggerDetails; if (triggerDetals != null && triggerDetals.Name == "ArcherVM.CortanaPlusEndpoint") { try { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetals); voiceServiceConnection.VoiceCommandCompleted += VoiceServiceConnection_VoiceCommandCompleted; VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); switch (voiceCommand.CommandName) { case "openArcherVM": var app = voiceCommand.Properties["App"][0]; var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "Here's your web app."; userMessage.SpokenMessage = "Let me get Malaika to open up " + app + " for you."; VoiceCommandResponse vcr = VoiceCommandResponse.CreateResponse(userMessage); break; } }catch (Exception e) { } } }
private static async Task HandleReadNamedaysCommandAsync(VoiceCommandServiceConnection connection) { var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "Fetching today's namedays for you"; userMessage.SpokenMessage = "Fetching today's namedays for you"; var response = VoiceCommandResponse.CreateResponse(userMessage); await connection.ReportProgressAsync(response); var today = DateTime.Now.Date; var namedays = await NamedayRepository.GetAllNamedaysAsync(); var todaysNameday = namedays.Find(e => e.Day == today.Day && e.Month == today.Month); var namedaysAsString = todaysNameday.NamesAsString; if (todaysNameday.Names.Count() == 1) { userMessage.SpokenMessage = userMessage.DisplayMessage = $"It is {namedaysAsString}'s nameday today"; response = VoiceCommandResponse.CreateResponse(userMessage); } else { userMessage.SpokenMessage = $"Today's namedays are: {namedaysAsString}"; userMessage.DisplayMessage = "Here are today's namedays:"; var tile = new VoiceCommandContentTile(); tile.ContentTileType = VoiceCommandContentTileType.TitleOnly; tile.Title = namedaysAsString; response = VoiceCommandResponse.CreateResponse(userMessage, new List<VoiceCommandContentTile> { tile }); } await connection.ReportSuccessAsync(response); }
private async Task ShowResults(List<Session> results, VoiceCommandUserMessage userMessage) { var destinationsContentTiles = new List<VoiceCommandContentTile>(); foreach (var kvp in results) { var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; destinationTile.AppLaunchArgument = kvp.Title.GetValidString(); destinationTile.TextLine1 = kvp.Title.GetValidString(); destinationTile.TextLine2 = kvp.Speakers[0].Name.GetValidString(); destinationTile.TextLine3 = kvp.Location.Room.GetValidString(); IRandomAccessStreamReference thumbnail = RandomAccessStreamReference.CreateFromUri(new Uri(kvp.Speakers[0].Photo)); destinationTile.Image = await StorageFile.CreateStreamedFileFromUriAsync(kvp.Title, new Uri(kvp.Speakers[0].Photo), thumbnail); destinationTile.AppLaunchArgument = kvp.Title; destinationsContentTiles.Add(destinationTile); } var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); response.AppLaunchArgument = "session"; await voiceServiceConnection.ReportSuccessAsync(response); }
/// <summary> /// Background task entrypoint. Voice Commands using the <VoiceCommandService Target="..."> /// tag will invoke this when they are recognized by Cortana, passing along details of the /// invocation. /// /// Background tasks must respond to activation by Cortana within 0.5 seconds, and must /// report progress to Cortana every 5 seconds (unless Cortana is waiting for user /// input). There is no execution time limit on the background task managed by Cortana, /// but developers should use plmdebug (https://msdn.microsoft.com/en-us/library/windows/hardware/jj680085%28v=vs.85%29.aspx) /// on the Cortana app package in order to prevent Cortana timing out the task during /// debugging. /// /// Cortana dismisses its UI if it loses focus. This will cause it to terminate the background /// task, even if the background task is being debugged. Use of Remote Debugging is recommended /// in order to debug background task behaviors. In order to debug background tasks, open the /// project properties for the app package (not the background task project), and enable /// Debug -> "Do not launch, but debug my code when it starts". Alternatively, add a long /// initial progress screen, and attach to the background task process while it executes. /// </summary> /// <param name="taskInstance">Connection to the hosting background service process.</param> public async void Run(IBackgroundTaskInstance taskInstance) { serviceDeferral = taskInstance.GetDeferral(); // Register to receive an event if Cortana dismisses the background task. This will // occur if the task takes too long to respond, or if Cortana's UI is dismissed. // Any pending operations should be cancelled or waited on to clean up where possible. taskInstance.Canceled += OnTaskCanceled; var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; // Load localized resources for strings sent to Cortana to be displayed to the user. cortanaResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources"); // Select the system language, which is what Cortana should be running as. cortanaContext = ResourceContext.GetForViewIndependentUse(); // Get the currently used system date format dateFormatInfo = CultureInfo.CurrentCulture.DateTimeFormat; VoiceCommandResponse response = null; // This should match the uap:AppService and RuleVoiceCommandService references from the // package manifest and VCD files, respectively. Make sure we've been launched by // a Cortana Voice Command. if (triggerDetails != null && triggerDetails.Name == this.GetType().Name) { try { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails( triggerDetails); voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted; VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); HttpClient client = new HttpClient(); switch (voiceCommand.CommandName) { case "turnOnLight": string postBody = JsonConvert.SerializeObject(new Settings { IsOn = false }); client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); var webResponse = await client.PostAsync("http://hiremotemeetcortana.azurewebsites.net/api/settings", new StringContent(postBody, Encoding.UTF8, "application/json")); if (webResponse.IsSuccessStatusCode) { var turnOnLightMessage = new VoiceCommandUserMessage { DisplayMessage = "Wakeup Light has been turned on ", SpokenMessage = "Wakeup Light has been turned on " }; response = VoiceCommandResponse.CreateResponse(turnOnLightMessage); await voiceServiceConnection.ReportSuccessAsync(response); } else { var turnOnLightMessage = new VoiceCommandUserMessage { DisplayMessage = "Something went wrong", SpokenMessage = "Something went wrong" }; response = VoiceCommandResponse.CreateResponse(turnOnLightMessage); await voiceServiceConnection.ReportFailureAsync(response); } break; case "turnOffLight": string turnOffLightBody = JsonConvert.SerializeObject(new Settings { IsOn = false }); client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); var saveRurnOffLight = await client.PostAsync("http://hiremotemeetcortana.azurewebsites.net/api/settings", new StringContent(turnOffLightBody, Encoding.UTF8, "application/json")); if (saveRurnOffLight.IsSuccessStatusCode) { var turnOnLightMessage = new VoiceCommandUserMessage { DisplayMessage = "Wakeup Light has been turned off ", SpokenMessage = "Wakeup Light has been turned off " }; response = VoiceCommandResponse.CreateResponse(turnOnLightMessage); await voiceServiceConnection.ReportSuccessAsync(response); } else { var turnOnLightMessage = new VoiceCommandUserMessage { DisplayMessage = "Something went wrong", SpokenMessage = "Something went wrong" }; response = VoiceCommandResponse.CreateResponse(turnOnLightMessage); await voiceServiceConnection.ReportFailureAsync(response); } break; default: // As with app activation VCDs, we need to handle the possibility that // an app update may remove a voice command that is still registered. // This can happen if the user hasn't run an app since an update. LaunchAppInForeground(); break; } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine("Handling Voice Command failed " + ex.ToString()); } } }
/// <summary> /// Provide a simple response that launches the app. Expected to be used in the /// case where the voice command could not be recognized (eg, a VCD/code mismatch.) /// </summary> private async void LaunchAppInForeground() { var userMessage = new VoiceCommandUserMessage(); userMessage.SpokenMessage = "Launching remote wakeup light alarm"; var response = VoiceCommandResponse.CreateResponse(userMessage); response.AppLaunchArgument = ""; await voiceServiceConnection.RequestAppLaunchAsync(response); }
/// <summary> /// Provide a simple response that launches the app. Expected to be used in the /// case where the voice command could not be recognized (eg, a VCD/code mismatch.) /// </summary> private async void LaunchAppInForeground() { var userMessage = new VoiceCommandUserMessage(); userMessage.SpokenMessage = cortanaResourceMap.GetValue("LaunchingAdventureWorks", cortanaContext).ValueAsString; var response = VoiceCommandResponse.CreateResponse(userMessage); response.AppLaunchArgument = ""; await voiceServiceConnection.RequestAppLaunchAsync(response); }
/// <summary> /// Search for, and show details related to a single trip, if the trip can be /// found. This demonstrates a simple response flow in Cortana. /// </summary> /// <param name="destination">The destination, expected to be in the phrase list.</param> /// <returns></returns> private async Task SendCompletionMessageForDestination(string destination) { // If this operation is expected to take longer than 0.5 seconds, the task must // provide a progress response to Cortana prior to starting the operation, and // provide updates at most every 5 seconds. string loadingTripToDestination = string.Format( cortanaResourceMap.GetValue("LoadingTripToDestination", cortanaContext).ValueAsString, destination); await ShowProgressScreen(loadingTripToDestination); Model.TripStore store = new Model.TripStore(); await store.LoadTrips(); // Look for the specified trip. The destination *should* be pulled from the grammar we // provided, and the subsequently updated phrase list, so it should be a 1:1 match, including case. // However, we might have multiple trips to the destination. For now, we just pick the first. IEnumerable<Model.Trip> trips = store.Trips.Where(p => p.Destination == destination); var userMessage = new VoiceCommandUserMessage(); var destinationsContentTiles = new List<VoiceCommandContentTile>(); if (trips.Count() == 0) { // In this scenario, perhaps someone has modified data on your service outside of your // control. If you're accessing a remote service, having a background task that // periodically refreshes the phrase list so it's likely to be in sync is ideal. // This is unlikely to occur for this sample app, however. string foundNoTripToDestination = string.Format( cortanaResourceMap.GetValue("FoundNoTripToDestination", cortanaContext).ValueAsString, destination); userMessage.DisplayMessage = foundNoTripToDestination; userMessage.SpokenMessage = foundNoTripToDestination; } else { // Set a title message for the page. string message = ""; if (trips.Count() > 1) { message = cortanaResourceMap.GetValue("PluralUpcomingTrips", cortanaContext).ValueAsString; } else { message = cortanaResourceMap.GetValue("SingularUpcomingTrip", cortanaContext).ValueAsString; } userMessage.DisplayMessage = message; userMessage.SpokenMessage = message; // file in tiles for each destination, to display information about the trips without // launching the app. foreach (Model.Trip trip in trips) { int i = 1; var destinationTile = new VoiceCommandContentTile(); // To handle UI scaling, Cortana automatically looks up files with FileName.scale-<n>.ext formats based on the requested filename. // See the VoiceCommandService\Images folder for an example. destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; destinationTile.Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///AdventureWorks.VoiceCommands/Images/GreyTile.png")); destinationTile.AppLaunchArgument = string.Format("destination={0}", trip.Destination); destinationTile.Title = trip.Destination; if (trip.StartDate != null) { destinationTile.TextLine1 = trip.StartDate.Value.ToString(dateFormatInfo.LongDatePattern); } else { destinationTile.TextLine1 = trip.Destination + " " + i; } destinationsContentTiles.Add(destinationTile); i++; } } var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); if (trips.Count() > 0) { response.AppLaunchArgument = string.Format("destination={0}", destination); } await voiceServiceConnection.ReportSuccessAsync(response); }