private async Task ShowNearestResults(List <Sight> nearest) { var userMessage = new VoiceCommandUserMessage { DisplayMessage = "Here are your closest Sights:", SpokenMessage = "Here are your closest sights" }; var sightsContentTiles = new List <VoiceCommandContentTile>(); foreach (var sight in nearest) { var sightTile = new VoiceCommandContentTile(); sightTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; if (sight.ImagePath.StartsWith("ms-appx")) { sightTile.Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri(sight.ImagePath)); } else { sightTile.Image = await StorageFile.GetFileFromPathAsync(sight.ImagePath); } sightTile.Title = sight.Name; sightTile.TextLine1 = sight.Description; sightTile.AppContext = sight.Id; sightTile.AppLaunchArgument = sight.Id.ToString("D"); sightsContentTiles.Add(sightTile); } var response = VoiceCommandResponse.CreateResponse(userMessage, sightsContentTiles); await _voiceServiceConnection.ReportSuccessAsync(response); }
private async void SendCompletionMessageForDestination(string destination) { var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "Here’s your cab details."; userMessage.SpokenMessage = "Ola cab /Uber Cab."; var destinationsContentTiles = new List <VoiceCommandContentTile>(); var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; destinationTile.Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///ContosoCabs.VoiceCommands/Images/cablogo.png")); destinationTile.AppLaunchArgument = destination; destinationTile.Title = "Hyderabad"; destinationTile.TextLine1 = "you have been amazing"; destinationsContentTiles.Add(destinationTile); // Create the VoiceCommandResponse from the userMessage and list // of content tiles. var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); response.AppLaunchArgument = string.Format("destination={0}”, “Hyderabad"); await voiceServiceConnection.ReportSuccessAsync(response); }
private async Task ShowResults(List <Session> results, VoiceCommandUserMessage userMessage) { var destinationsContentTiles = new List <VoiceCommandContentTile>(); foreach (var kvp in results) { var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; destinationTile.AppLaunchArgument = kvp.Title.GetValidString(); destinationTile.TextLine1 = kvp.Title.GetValidString(); destinationTile.TextLine2 = kvp.Speakers[0].Name.GetValidString(); destinationTile.TextLine3 = kvp.Location.Room.GetValidString(); IRandomAccessStreamReference thumbnail = RandomAccessStreamReference.CreateFromUri(new Uri(kvp.Speakers[0].Photo)); destinationTile.Image = await StorageFile.CreateStreamedFileFromUriAsync(kvp.Title, new Uri(kvp.Speakers[0].Photo), thumbnail); destinationTile.AppLaunchArgument = kvp.Title; destinationsContentTiles.Add(destinationTile); } var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); response.AppLaunchArgument = "session"; await voiceServiceConnection.ReportSuccessAsync(response); }
private async Task ShowLatestNews() { string progress = "Getting the latest news..."; await ShowProgressScreen(progress); RssService feedService = new RssService(); var news = await feedService.GetNews("http://blog.qmatteoq.com/feed"); List <VoiceCommandContentTile> contentTiles = new List <VoiceCommandContentTile>(); VoiceCommandUserMessage message = new VoiceCommandUserMessage(); string text = "Here are the latest news"; message.DisplayMessage = text; message.SpokenMessage = text; foreach (FeedItem item in news.Take(5)) { VoiceCommandContentTile tile = new VoiceCommandContentTile(); tile.ContentTileType = VoiceCommandContentTileType.TitleOnly; tile.Title = item.Title; tile.TextLine1 = item.PublishDate.ToString("g"); contentTiles.Add(tile); } VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(message, contentTiles); await _voiceServiceConnection.ReportSuccessAsync(response); }
private async Task ProcessGenerateFactAsync(SpeechRecognitionSemanticInterpretation interpretation) { await Helpers.ProgressHelper.ShowProgressScreenAsync(voiceServiceConnection, "Okay, get ready..."); string fact = await Helpers.FactHelper.GetFactAsync(); var destinationsContentTiles = new List <VoiceCommandContentTile>(); var destinationTile = new VoiceCommandContentTile(); try { destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWithText; destinationTile.AppContext = null; destinationTile.AppLaunchArgument = "fact=" + fact; destinationTile.Title = fact; destinationTile.TextLine1 = ""; destinationTile.TextLine1 = "(tap to add to favorites)"; destinationsContentTiles.Add(destinationTile); } catch (Exception ex) { } VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(new VoiceCommandUserMessage() { DisplayMessage = "Did you know...", SpokenMessage = fact }, destinationsContentTiles); await voiceServiceConnection.ReportSuccessAsync(response); }
private async Task DisambiguateAsync(string promptMessage, string repromptMessage) { var prompt = new VoiceCommandUserMessage(); prompt.DisplayMessage = prompt.SpokenMessage = promptMessage; var reprompt = new VoiceCommandUserMessage(); reprompt.DisplayMessage = reprompt.SpokenMessage = repromptMessage; var contentTiles = new List <VoiceCommandContentTile>(); for (var i = 1; i < 7; i++) { var tile = new VoiceCommandContentTile(); tile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; tile.Image = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(new Uri($"ms-appx:///ControlPanel.BackgroundServices/Assets/68_Dice_{i}.png")); tile.AppContext = i; tile.AppLaunchArgument = $"type={i}"; tile.Title = $"The dice result is {i}"; contentTiles.Add(tile); } response = VoiceCommandResponse.CreateResponseForPrompt(prompt, reprompt, contentTiles); try { var result = await voiceServiceConn.RequestDisambiguationAsync(response); if (result != null) { System.Diagnostics.Debug.WriteLine(result); } } catch (Exception ex) { } }
private async Task SendAnswer() { var destContentTiles = new List<VoiceCommandContentTile>(); var destTile = new VoiceCommandContentTile() { ContentTileType = VoiceCommandContentTileType.TitleWithText, Title = "Leaderboard", TextLine1 = "1. Vladimir - 9337\n2. Petri - 8000" }; destContentTiles.Add(destTile); var userMessagePlay = new VoiceCommandUserMessage(); userMessagePlay.DisplayMessage = "Do you want to play?"; userMessagePlay.SpokenMessage = "Yes, you are 1337 points behind Vladimir. Do you want to play?"; var userMessagePlay2 = new VoiceCommandUserMessage(); userMessagePlay2.DisplayMessage = "You are far behind. Do you want to play the game?"; userMessagePlay2.SpokenMessage = "You are far behind. Do you want to play the game now?"; var resp2 = VoiceCommandResponse.CreateResponseForPrompt(userMessagePlay, userMessagePlay2, destContentTiles); var confResp2 = await voiceServiceConnection.RequestConfirmationAsync(resp2); if(confResp2.Confirmed) { var umP = new VoiceCommandUserMessage(); umP.DisplayMessage = "Do you want to play?"; umP.SpokenMessage = "You are 1337 points behind Vladimir. Do you want to play?"; var resp3 = VoiceCommandResponse.CreateResponse(umP); await voiceServiceConnection.RequestAppLaunchAsync(resp3); } }
private static async Task HandleReadEventsCommandAsync(VoiceCommandServiceConnection connection) { try { ReadRepository ReadRepository = new ReadRepository(); //Genero un mensaje de espera para que el usuario vea var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "Buscando eventos próximos .."; userMessage.SpokenMessage = "Buscando eventos próximos "; var response = VoiceCommandResponse.CreateResponse(userMessage); await connection.ReportProgressAsync(response); var today = DateTime.Now.Date; var notices = await ReadRepository.GetNextEvents(); if (notices.Count > 1) { userMessage.SpokenMessage = userMessage.DisplayMessage = $"El dia de hoy se realizan {notices.Count} eventos"; var tile = new VoiceCommandContentTile(); tile.ContentTileType = VoiceCommandContentTileType.TitleOnly; var titleList = new List <VoiceCommandContentTile>(); var count = 0; foreach (var noticeModel in notices) { if (count <= 5) { titleList.Add(new VoiceCommandContentTile { Title = noticeModel.Title.ToString(), ContentTileType = VoiceCommandContentTileType.TitleWithText, TextLine1 = noticeModel.Date.ToString() }); ++count; } } response = VoiceCommandResponse.CreateResponse(userMessage, titleList); await connection.ReportProgressAsync(response); } else { if (notices != null) { userMessage.SpokenMessage = userMessage.DisplayMessage = $"Usted tiene {notices.First().Title} eventos próximos"; response = VoiceCommandResponse.CreateResponse(userMessage); } } await connection.ReportSuccessAsync(response); } catch (Exception ex) { throw; } }
private async Task ServiceCommandHandleAsync(SpeechRecognitionSemanticInterpretation interpretation) { var progressMessage = "Get ready"; await Helpers.VoiceCommandResponseHelper.ReportProgressAsync(voiceServiceConn, progressMessage, progressMessage); var randomInt = new Random((int)DateTime.UtcNow.Ticks).Next() % 6 + 1; System.Diagnostics.Debug.WriteLine(randomInt); var contentTiles = new List <VoiceCommandContentTile>(); var tile = new VoiceCommandContentTile(); try { tile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; tile.Image = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(new Uri($"ms-appx:///ControlPanel.BackgroundServices/Assets/68_Dice_{randomInt}.png")); tile.AppContext = randomInt; tile.AppLaunchArgument = "DiceResult=" + randomInt; tile.Title = $"The dice result is {randomInt}"; contentTiles.Add(tile); } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex.Message); } var successMessage = $"You got {randomInt}"; await Helpers.VoiceCommandResponseHelper.ReportSuccessAsync(voiceServiceConn, successMessage, successMessage, contentTiles); }
private async Task ShowLatestNews() { string progress = "Getting the latest news..."; await ShowProgressScreen(progress); RssService feedService = new RssService(); var news = await feedService.GetNews("http://blog.qmatteoq.com/feed"); List<VoiceCommandContentTile> contentTiles = new List<VoiceCommandContentTile>(); VoiceCommandUserMessage message = new VoiceCommandUserMessage(); string text = "Here are the latest news"; message.DisplayMessage = text; message.SpokenMessage = text; foreach (FeedItem item in news.Take(5)) { VoiceCommandContentTile tile = new VoiceCommandContentTile(); tile.ContentTileType = VoiceCommandContentTileType.TitleOnly; tile.Title = item.Title; tile.TextLine1 = item.PublishDate.ToString("g"); contentTiles.Add(tile); } VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(message, contentTiles); await _voiceServiceConnection.ReportSuccessAsync(response); }
public async Task<Node> AskUserForNode(List<Node> nodes, bool isToiletSearch) { if (nodes == null || nodes.Count() == 0) { return null; } if (nodes.Count() == 1) { return nodes.First(); } if (nodes.Count() > 10) { nodes = nodes.GetRange(0,10); } var contentTiles = new List<VoiceCommandContentTile>(); int i = 1; foreach (var node in nodes) { var tile = new VoiceCommandContentTile(); tile.ContentTileType = VoiceCommandContentTileType.TitleWithText; tile.AppContext = node; tile.Title = i + ": " + node.name; if (isToiletSearch && node.wheelchairToiletStatus == "yes") { // on toilet search there can only be node with the toiletstatus yes tile.TextLine1 = Stati.From(node.wheelchairToiletStatus).GetLocalizedToiletMessage(context); } else { // the node was not choosen because its toilet status // this can also be true if the node is of NodeType Toilet but has no toiletstatus rating tile.TextLine1 = Stati.From(node.wheelchairStatus).GetLocalizedMessage(context); } tile.TextLine2 = node.DistanceString; contentTiles.Add(tile); i++; } var userPrompt = new VoiceCommandUserMessage(); userPrompt.SpokenMessage = "CHOOSE_A_PLACE_SpokenMessage_1".t(context, R.File.CORTANA); userPrompt.DisplayMessage = "CHOOSE_A_PLACE_DisplayMessage_1".t(context, R.File.CORTANA); var userPrompt2 = new VoiceCommandUserMessage(); userPrompt2.SpokenMessage = "CHOOSE_A_PLACE_SpokenMessage_2".t(context, R.File.CORTANA); userPrompt2.DisplayMessage = "CHOOSE_A_PLACE_DisplayMessage_2".t(context, R.File.CORTANA); var response = VoiceCommandResponse.CreateResponseForPrompt(userPrompt, userPrompt2, contentTiles); response.AppLaunchArgument = new WheelmapParams().ToString(); var voiceCommandDisambiguationResult = await voiceServiceConnection.RequestDisambiguationAsync(response); if (voiceCommandDisambiguationResult != null) { return (Node) voiceCommandDisambiguationResult.SelectedItem.AppContext; } return null; }
private async Task HandleSearch(string searchQuery) { var loadingText = $"searching for {searchQuery} ..."; await ShowProgressScreen(loadingText); var service = new YouTubeSvc(); // query service to get items var searchItems = await service.ListItems(searchQuery, App.MainPageViewModel.MaxResults, "video"); var userMessage = new VoiceCommandUserMessage(); var destinationsContentTiles = new List <VoiceCommandContentTile>(); if (searchItems.Count() == 0) { string foundNoItem = $"Result of your search query is empty."; userMessage.DisplayMessage = foundNoItem; userMessage.SpokenMessage = foundNoItem; } else { string message = string.Empty; foreach (var foundItems in searchItems) { int i = 1; var destinationTile = new VoiceCommandContentTile(); // To handle UI scaling, Cortana automatically looks up files with FileName.scale-<n>.ext formats based on the requested filename. // See the VoiceCommandService\Images folder for an example. destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; destinationTile.Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri(foundItems.Thumbnail)); // destinationTile.AppLaunchArgument = foundCanteen.Name; destinationTile.Title = foundItems.Title; destinationTile.TextLine1 = foundItems.Description; message += $"{foundItems.Title}."; destinationsContentTiles.Add(destinationTile); i++; } userMessage.DisplayMessage = message; userMessage.SpokenMessage = message; } var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); await voiceServiceConnection.ReportSuccessAsync(response); }
private static async Task HandleReadNamedaysCommandAsync(VoiceCommandServiceConnection connection) { //Genero un mensaje de espera para que el usuario vea var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "Los eventos que se realizan el dia de hoy son"; userMessage.SpokenMessage = "Los eventos que se realizan el dia de hoy son"; var response = VoiceCommandResponse.CreateResponse(userMessage); await connection.ReportProgressAsync(response); var today = DateTime.Now.Date; var notices = await ReadResitory.GetNoticesInDay(); if (notices.Count() > 1) { userMessage.SpokenMessage = userMessage.DisplayMessage = $"El dia de hoy se realizan {notices.Count} eventos"; var tile = new VoiceCommandContentTile(); tile.ContentTileType = VoiceCommandContentTileType.TitleOnly; var titleList = new List <VoiceCommandContentTile>(); var count = 0; foreach (var noticeModel in notices) { if (count <= 5) { titleList.Add(new VoiceCommandContentTile { Title = noticeModel.Title.ToString(), ContentTileType = VoiceCommandContentTileType.TitleWithText, TextLine1 = noticeModel.Date }); ++count; } } response = VoiceCommandResponse.CreateResponse(userMessage, titleList); await connection.ReportProgressAsync(response); } else { if (notices != null) { userMessage.SpokenMessage = userMessage.DisplayMessage = $"El evento que se realiza hoy es {notices.First().Title} eventos"; response = VoiceCommandResponse.CreateResponse(userMessage); } } await connection.ReportSuccessAsync(response); }
private async Task ProcessInterestingFactAsync(SpeechRecognitionSemanticInterpretation interpretation) { await Core.Helpers.BackgroundProgressHelper.ShowProgressScreen(voiceServiceConnection, "Okay,get ready"); //here is this what I want to cortana will tell string fact = await Core.Helpers.FactHelper.GetRandomFactAsync(); var DestinationContentTiles = new List<VoiceCommandContentTile>(); var destinationTile = new VoiceCommandContentTile(); try { //What style we want Cortana shows us , size tile that will be displayed destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith280x140IconAndText; //What images should be inside of tiles destinationTile.Image = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///OfficePoint.Service.Background/Images/Fact_280.png")); //add to the VoiceCommandContenTile, and say how you handle sending a single respone back destinationTile.AppContext = null; //load up in the user interface destinationTile.AppLaunchArgument = "type=" + VoiceCommandType.InterestingQueryFact; destinationTile.Title = fact; //here is what i Want to cortan will write destinationTile.TextLine1 = ""; DestinationContentTiles.Add(destinationTile); } catch(Exception ex) { } //here I'm creating my response VoiceCommandResponse voiceRespone = VoiceCommandResponse.CreateResponse(new VoiceCommandUserMessage() { // what cortan write DisplayMessage = "did you know...", //randomly fact whih is chosen perviously which is speaking by Cortana SpokenMessage = fact, }, DestinationContentTiles); //voiceServiceConnection is connection to Cortana here is using to send our response await voiceServiceConnection.ReportSuccessAsync(voiceRespone); }
/// <summary> /// Demonstrates providing the user with a choice between multiple items. /// </summary> /// <param name="items">The set of items to choose between</param> /// <param name="titleFunc"> /// A function that returns the title of the item. /// </param> /// <param name="descriptionFunc"> /// A function that returns the description of the item. /// </param> /// <param name="message">The initial disambiguation message</param> /// <param name="secondMessage">Repeat prompt retry message</param> /// <returns></returns> private async Task <T> Disambiguate <T>(IEnumerable <T> items, Func <T, string> titleFunc, Func <T, string> descriptionFunc, string message, string secondMessage) { // Create the first prompt message. var userPrompt = new VoiceCommandUserMessage(); userPrompt.DisplayMessage = userPrompt.SpokenMessage = message; // Create a re-prompt message if the user responds with an out-of-grammar response. var userReprompt = new VoiceCommandUserMessage(); userPrompt.DisplayMessage = userPrompt.SpokenMessage = secondMessage; // Create items for each item. Ideally, should be limited to a small number of items. var destinationContentTiles = new List <VoiceCommandContentTile>(); foreach (T item in items) { var destinationTile = new VoiceCommandContentTile(); // Use a generic background image. This can be fetched from a service call, potentially, but // be aware of network latencies and ensure Cortana does not time out. destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; destinationTile.Image = await Package.Current.InstalledLocation.GetFileAsync("CortanaTodo.Background\\Images\\TodoIcon.png"); // The AppContext can be any arbitrary object, and will be maintained for the // response. destinationTile.AppContext = item; // Format title and description destinationTile.Title = titleFunc(item); destinationTile.TextLine1 = descriptionFunc(item); // Add destinationContentTiles.Add(destinationTile); } // Cortana will handle re-prompting if the user does not provide a valid response. var response = VoiceCommandResponse.CreateResponseForPrompt(userPrompt, userReprompt, destinationContentTiles); // If cortana is dismissed in this operation, null will be returned. var result = await voiceConnection.RequestDisambiguationAsync(response); if (result != null) { return((T)result.SelectedItem.AppContext); } return(default(T)); }
//Very similar like above private async Task ProcessWeekOfYearAsync(SpeechRecognitionSemanticInterpretation interpretation) { DateTimeFormatInfo dfi = new DateTimeFormatInfo.CurrentInfo; Calendar cal = dfi.Calendar; var firstDay = cal.GetWeekOfYear(DateTime.Today, CalendarWeekRule.FirstDay, dfi.FirstDayOfWeek); var firstFourDay = cal.GetWeekOfYear(DateTime.Today, CalendarWeekRule.FirstFourDayWeek, dfi.FirstDayOfWeek); var firstFullDay = cal.GetWeekOfYear(DateTime.Today, CalendarWeekRule.FirstFullWeek, dfi.FirstDayOfWeek); string fullDayLabel = "Today is week:" + firstDay + "by the first day rule" + firstFourDay; var destinationContentTile = new List<VoiceCommandContentTile>(); var destinationTile = new VoiceCommandContentTile(); try { destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWithText; destinationTile.AppContext = null; destinationTile.AppLaunchArgument = "type=" + VoiceCommandType.WeekOfYearQueryCommand; destinationTile.Title = DateTime.Today.ToString(dfi.LongDatePattern); destinationTile.TextLine1 = "today is week #" + firstDay + "by the first day rule"; destinationTile.TextLine2 = "Week #" + firstFourDay + "by the first four day rule"; destinationTile.TextLine3 = " Week #" + firstFullDay + "by the first full week rule"; destinationContentTile.Add(destinationTile); } catch(Exception ex) { } VoiceCommandResponse response2 = VoiceCommandResponse.CreateResponse(new VoiceCommandUserMessage { DisplayMessage = "Week of the year", SpokenMessage = fullDayLabel, }, destinationContentTile); await voiceServiceConnection.ReportSuccessAsync(response2); }
//Search for the requested data (future population) and give a response in cortana private async Task SendCompletionMessageForFuturePopulationML(string year) { // If this operation is expected to take longer than 0.5 seconds, the task must // provide a progress response to Cortana prior to starting the operation, and // provide updates at most every 5 seconds. string calculatingFuturePopulation = string.Format( cortanaResourceMap.GetValue("CalculatingPopulation", cortanaContext).ValueAsString, "Germany", year); await ShowProgressScreen(calculatingFuturePopulation); //this var will be filled with the according response data from the following REST Call var result = await InvokeRequestResponseServicePredictiveML(year); string population = Convert.ToDouble(result).ToString("#,##,, Million", CultureInfo.InvariantCulture); var userMessage = new VoiceCommandUserMessage(); var responseContentTile = new VoiceCommandContentTile(); //set the type of the ContentTyle responseContentTile.ContentTileType = VoiceCommandContentTileType.TitleWithText; //fill the responseContentTile with the data we got responseContentTile.AppLaunchArgument = "Germany"; responseContentTile.Title = "Germany" + " " + year; responseContentTile.TextLine1 = "Population: " + Math.Round(Convert.ToDouble(result), 2).ToString(); //the VoiceCommandResponse needs to be a list var tileList = new List <VoiceCommandContentTile>(); tileList.Add(responseContentTile); // Set a message for the Response Cortana Page string message = String.Format(cortanaResourceMap.GetValue("ShowFuturePopulation", cortanaContext).ValueAsString, "Germany", year, population); userMessage.DisplayMessage = message; userMessage.SpokenMessage = message; var response = VoiceCommandResponse.CreateResponse(userMessage, tileList); //general infos await Windows.System.Launcher.LaunchUriAsync(new Uri(@"https://app.powerbi.com/groups/me/dashboards/1e13afdf-70f8-4d7c-b4f5-c95499802d44")); //country info await Windows.System.Launcher.LaunchUriAsync(new Uri(@"https://app.powerbi.com/groups/me/reports/6ae73462-1d4b-4bb7-928f-75d23fc6bc84/ReportSection?filter=World/Country eq '" + "Germany" + "'")); await voiceServiceConnection.ReportSuccessAsync(response); }
//Search for the requested data (women proportion) and give a response in cortana private async Task SendCompletionMessageForPastWomenProportion(string country, string year, string searchType) { // If this operation is expected to take longer than 0.5 seconds, the task must // provide a progress response to Cortana prior to starting the operation, and // provide updates at most every 5 seconds. string calculatingWomenProportion = string.Format( cortanaResourceMap.GetValue("CalculatingWomenProportion", cortanaContext).ValueAsString, country, year); await ShowProgressScreen(calculatingWomenProportion); //this var will be filled with the according response data from the following REST Call var result = await InvokeRequestResponseService(country, year, searchType); string womenProportion = Convert.ToDouble(result).ToString("#.##") + "%"; var userMessage = new VoiceCommandUserMessage(); var responseContentTile = new VoiceCommandContentTile(); //set the type of the ContentTyle responseContentTile.ContentTileType = VoiceCommandContentTileType.TitleWithText; //fill the responseContentTile with the data we got responseContentTile.AppLaunchArgument = country; responseContentTile.Title = country + " " + year; responseContentTile.TextLine1 = "Women proportion: " + womenProportion; //the VoiceCommandResponse needs to be a list var tileList = new List <VoiceCommandContentTile>(); tileList.Add(responseContentTile); // Set a message for the Response Cortana Page string message = String.Format(cortanaResourceMap.GetValue("ShowWomenProportion", cortanaContext).ValueAsString, country, year, womenProportion); userMessage.DisplayMessage = message; userMessage.SpokenMessage = message; var response = VoiceCommandResponse.CreateResponse(userMessage, tileList); //general infos await Windows.System.Launcher.LaunchUriAsync(new Uri(@"https://app.powerbi.com/groups/me/dashboards/1e13afdf-70f8-4d7c-b4f5-c95499802d44")); //women proportion await Windows.System.Launcher.LaunchUriAsync(new Uri(@"https://app.powerbi.com/groups/me/reports/16a72348-d0d6-4add-989e-aad4cf560073/ReportSection?filter=World/Year eq '" + year + "'")); await voiceServiceConnection.ReportSuccessAsync(response); }
private async Task TellAJoke() { var msgback = new VoiceCommandUserMessage(); var jokes = await GetDuans(); // 随机挑一个段子 var d = jokes[new Random(DateTime.Now.Millisecond).Next(jokes.Count)]; string msg = $"找到一枚段子:"; var jokeTile = new VoiceCommandContentTile(); jokeTile.ContentTileType = VoiceCommandContentTileType.TitleWithText; jokeTile.Title = $"来自{d.Author}的段子:"; var jokeStr = $"{d.Content}"; if (jokeStr.Length >= 300) { jokeTile.TextLine1 = jokeStr.Substring(0, 100); jokeTile.TextLine2 = jokeStr.Substring(100, 100); jokeTile.TextLine3 = jokeStr.Substring(200, 100); } else if (jokeStr.Length >= 200) { jokeTile.TextLine1 = jokeStr.Substring(0, 100); jokeTile.TextLine2 = jokeStr.Substring(100, 100); jokeTile.TextLine3 = jokeStr.Substring(200); } else if (jokeStr.Length >= 100) { jokeTile.TextLine1 = jokeStr.Substring(0, 100); jokeTile.TextLine2 = jokeStr.Substring(100); } else { jokeTile.TextLine1 = jokeStr; } var jokeTiles = new List <VoiceCommandContentTile>(); jokeTiles.Add(jokeTile); msgback.DisplayMessage = msgback.SpokenMessage = msg; var response = VoiceCommandResponse.CreateResponse(msgback, jokeTiles); await _serviceConnection.ReportSuccessAsync(response); }
private async Task SendCompletionMessageForFilter(string filter) { var loadingSearchByType = string.Format( _cortanaResourceMap.GetValue("Cortana_Loading", _cortanaContext).ValueAsString, filter); await ShowProgressScreen(loadingSearchByType); var store = new CatalogItem(); // TODO: Get depending on configuration configuration _catalogProvider = new LocalCatalogProvider(); var items = await _catalogProvider?.GetItemsByVoiceCommandAsync(filter); var userMessage = new VoiceCommandUserMessage(); var ListContentTiles = new List <VoiceCommandContentTile>(); if (items == null || !items.Any()) { var foundNoSearchByType = string.Format(_cortanaResourceMap.GetValue("Cortana_foundNoSearchByType", _cortanaContext).ValueAsString, filter); userMessage.DisplayMessage = foundNoSearchByType; userMessage.SpokenMessage = foundNoSearchByType; } else { int cont = 1; foreach (CatalogItem item in items.Take(10)) { var typeTile = new VoiceCommandContentTile(); typeTile.ContentTileType = VoiceCommandContentTileType.TitleWithText; typeTile.AppLaunchArgument = item.Id.ToString(); typeTile.Title = item.Name; typeTile.TextLine1 = $"{item.Price.ToString()}$"; ListContentTiles.Add(typeTile); cont++; } } var message = WaitingForResult(filter, items.Count()); userMessage.DisplayMessage = message; userMessage.SpokenMessage = message; var response = VoiceCommandResponse.CreateResponse(userMessage, ListContentTiles); await _voiceServiceConnection.ReportSuccessAsync(response); }
private async void RespondTouser(string text, Geopoint point = null) { var userMessage = new VoiceCommandUserMessage(); string keepingTripToDestination = text; //How can i Help you? userMessage.DisplayMessage = userMessage.SpokenMessage = keepingTripToDestination; var destinationsContentTiles = new List <VoiceCommandContentTile>(); var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith280x140IconAndText; if (point != null) { destinationTile.Title = await GeocodeHelper.GetAddress(point); userMessage.SpokenMessage = "Seems you are here, " + destinationTile.Title; try { var http = new HttpClient(); var httpres = await http.GetAsync(new Uri($"https://maps.googleapis.com/maps/api/staticmap?center={point.Position.Latitude},{point.Position.Longitude}&zoom=16&size=560x280&markers=Red|label:G|{point.Position.Latitude},{point.Position.Longitude}", UriKind.RelativeOrAbsolute)); var buf = await httpres.Content.ReadAsBufferAsync(); var f = await ApplicationData.Current.LocalFolder.CreateFileAsync("CortanaResp.png", CreationCollisionOption.OpenIfExists); var fread = await f.OpenAsync(FileAccessMode.ReadWrite); await fread.WriteAsync(buf); fread.Dispose(); destinationTile.Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appdata:///local/CortanaResp.png", UriKind.RelativeOrAbsolute)); } catch { destinationTile.Title = text; destinationTile.Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Assets/SplashScreen.scale-200.png", UriKind.RelativeOrAbsolute)); } } else { destinationTile.Title = text; destinationTile.Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Assets/SplashScreen.scale-200.png", UriKind.RelativeOrAbsolute)); } destinationsContentTiles.Add(destinationTile); VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); await voiceServiceConnection.ReportSuccessAsync(response); }
private async void SendCompletionMessageForDestination(string destination) { // Take action and determine when the next trip to destination // Insert code here. // Replace the hardcoded strings used here with strings // appropriate for your application. // First, create the VoiceCommandUserMessage with the strings // that Cortana will show and speak. var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "Here’s your trip."; userMessage.SpokenMessage = "Your trip to Vegas is on August 3rd."; // Optionally, present visual information about the answer. // For this example, create a VoiceCommandContentTile with an // icon and a string. var destinationsContentTiles = new List <VoiceCommandContentTile>(); var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; // The user can tap on the visual content to launch the app. // Pass in a launch argument to enable the app to deep link to a // page relevant to the item displayed on the content tile. destinationTile.AppLaunchArgument = string.Format("destination={0}”, “Las Vegas"); destinationTile.Title = "Las Vegas"; destinationTile.TextLine1 = "August 3rd 2015"; destinationsContentTiles.Add(destinationTile); // Create the VoiceCommandResponse from the userMessage and list // of content tiles. var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); // Cortana will present a “Go to app_name” link that the user // can tap to launch the app. // Pass in a launch to enable the app to deep link to a page // relevant to the voice command. response.AppLaunchArgument = string.Format("destination={0}”, “Las Vegas"); // Ask Cortana to display the user message and content tile and // also speak the user message. await voiceServiceConnection.ReportSuccessAsync(response); }
private async Task seeABoringPic() { var msgback = new VoiceCommandUserMessage(); var pics = await GetPics(); int[] pic_idx = GetRandomArray(3, 0, pics.Count); // 挑三张最新无聊图 var p = new List <BoringPic> { pics[pic_idx[0]], pics[pic_idx[1]], pics[pic_idx[2]] }; var picTiles = new List <VoiceCommandContentTile>(); int i = 1; foreach (var item in p) { var file_name = Path.GetFileName(item.Thumb[0].URL); var uri = new Uri(item.Thumb[0].URL, UriKind.Absolute); var picTile = new VoiceCommandContentTile(); picTile.ContentTileType = VoiceCommandContentTileType.TitleWith280x140IconAndText; picTile.Image = await StorageFile.CreateStreamedFileFromUriAsync( file_name, uri, RandomAccessStreamReference.CreateFromUri(uri)); picTile.AppContext = item; picTile.Title = $"第{i}张:"; picTile.TextLine1 = item.Content == null ? "" : item.Content; picTile.TextLine2 = $"来自{item.Author}上传的无聊图"; picTile.TextLine3 = ""; picTiles.Add(picTile); i++; } msgback.DisplayMessage = msgback.SpokenMessage = "找到最近的三张无聊图"; var response = VoiceCommandResponse.CreateResponse(msgback, picTiles); await _serviceConnection.ReportSuccessAsync(response); }
async void SendCompletionMessageFortodolist() { var destinationsContentTiles = new List <VoiceCommandContentTile>(); var mycol = TodoService.GetTodayList(); VoiceCommandResponse response = null; if (mycol.Count == 0) { var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "Nothing in Denna"; userMessage.SpokenMessage = "You have no tasks ! add one."; response = VoiceCommandResponse.CreateResponse(userMessage); } else { var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "Here's your to do list on Denna"; userMessage.SpokenMessage = "Here's your to do list"; foreach (var item in mycol) { var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWithText; destinationTile.AppLaunchArgument = "agsonCortana"; destinationTile.Title = item.Subject; destinationTile.TextLine1 = item.Detail; destinationTile.TextLine2 = item.StartTime.ToString(); destinationsContentTiles.Add(destinationTile); response = VoiceCommandResponse.CreateResponse( userMessage, destinationsContentTiles); } } // Create the VoiceCommandResponse from the userMessage and list // of content tiles. // Cortana will present a “Go to app_name” link that the user // can tap to launch the app. // Pass in a launch to enable the app to deep link to a page // relevant to the voice command. response.AppLaunchArgument = "agsonCortana"; // Ask Cortana to display the user message and content tile and // also speak the user message. await voiceServiceConnection.ReportSuccessAsync(response); }
/// <summary> /// Provide the user with a way to identify which record to select. /// </summary> /// <param name="records">The set of records</param> private async Task <Record> DisambiguateRecords(IEnumerable <Record> records) { if (records.Count() > 1) { // Create the first prompt message. var userPrompt = new VoiceCommandUserMessage(); userPrompt.DisplayMessage = userPrompt.SpokenMessage = "Which record do you want to select?"; // Create a re-prompt message if the user responds with an out-of-grammar response. var userReprompt = new VoiceCommandUserMessage(); userReprompt.DisplayMessage = userReprompt.SpokenMessage = "Sorry, which one do you want to select?"; // Create card for each item. var destinationContentTiles = new List <VoiceCommandContentTile>(); int i = 1; foreach (Record record in records) { var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; //destinationTile.Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///AdventureWorks.VoiceCommands/Images/GreyTile.png")); // The AppContext can be any arbitrary object. destinationTile.AppContext = record; destinationTile.Title = record.recordName; destinationContentTiles.Add(destinationTile); i++; } // Cortana handles re-prompting if no valid response. var response = VoiceCommandResponse.CreateResponseForPrompt(userPrompt, userReprompt, destinationContentTiles); // If cortana is dismissed in this operation, null is returned. var voiceCommandDisambiguationResult = await voiceServiceConnection.RequestDisambiguationAsync(response); if (voiceCommandDisambiguationResult != null) { return((Record)voiceCommandDisambiguationResult.SelectedItem.AppContext); } } return(null); }
private async Task SendCompletionMessageForDestination(string v) { VoiceCommandUserMessage um = new VoiceCommandUserMessage(); string msg = $"I'm done! I sent a {v}"; um.SpokenMessage = msg; um.DisplayMessage = msg; var destinationsContentTiles = new List <VoiceCommandContentTile>(); var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; var response = VoiceCommandResponse.CreateResponse(um, destinationsContentTiles); await voiceServiceConnection.ReportSuccessAsync(response); }
private async Task FindCommandHandleAsync(SpeechRecognitionSemanticInterpretation interpretation) { var searchQuery = string.Empty; if (interpretation.Properties.ContainsKey("DiceNum")) { searchQuery = interpretation.Properties["DiceNum"].FirstOrDefault(); } if (!string.IsNullOrEmpty(searchQuery) && !string.IsNullOrWhiteSpace(searchQuery)) { response = VoiceCommandResponse.CreateResponse(new VoiceCommandUserMessage() { SpokenMessage = "Get ready", DisplayMessage = "Get ready" }); await voiceServiceConn.ReportProgressAsync(response); //await DisambiguateAsync("Select a result", "Please select a result"); var promptStr = "Select a result"; var repromptStr = "Please select a result"; var contentTiles = new List <VoiceCommandContentTile>(); for (var i = 1; i < 7; i++) { var tile = new VoiceCommandContentTile(); tile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; tile.Image = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(new Uri($"ms-appx:///ControlPanel.BackgroundServices/Assets/68_Dice_{i}.png")); tile.AppContext = i; tile.AppLaunchArgument = $"type={i}"; tile.Title = $"The dice result is {i}"; contentTiles.Add(tile); } var result = await Helpers.VoiceCommandResponseHelper.RequestDisambiguationAsync(voiceServiceConn, promptStr, repromptStr, contentTiles); if (result != null) { contentTiles.Clear(); contentTiles.Add(result.SelectedItem); var successStr = "You select a dice"; await Helpers.VoiceCommandResponseHelper.ReportSuccessAsync(voiceServiceConn, successStr, successStr, contentTiles); } } }
private List <VoiceCommandContentTile> GetLikelihoodForSelectedModel(double Lambda, double Mu, int model) { var resultContentTiles = new List <VoiceCommandContentTile>(); for (int k = 0; k <= 9; k++) { var modelTile = new VoiceCommandContentTile(); modelTile.ContentTileType = VoiceCommandContentTileType.TitleOnly; if (model == 1) { modelTile.Title = Models.MM1.CortanaCalkPk(Lambda, Mu, k); } else if (model == 2) { modelTile.Title = Models.MMinf.CortanaCalkPk(Lambda, Mu, k); } resultContentTiles.Add(modelTile); } return(resultContentTiles); }
/// <summary> /// Recherche une liste de mot de passe dans un dossier et ses sous dossiers à partir d'une chaine de caractère /// </summary> /// <param name="identifiant">l'identifiant recherché</param> /// <param name="dossier">le dossier à explorer</param> /// <returns>une liste de tuile à afficher par cortana</returns> public static List <VoiceCommandContentTile> GetMotDePasseTile(string identifiant, Dossier dossier) { var listeMdp = GetMotDePasse(identifiant, dossier); var destinationsContentTiles = new List <VoiceCommandContentTile>(); foreach (var motDePass in listeMdp) { var destinationTile = new VoiceCommandContentTile { ContentTileType = VoiceCommandContentTileType.TitleWithText, Title = motDePass.Titre, TextLine1 = motDePass.Login, TextLine2 = motDePass.MotDePasseObjet }; destinationsContentTiles.Add(destinationTile); } return(destinationsContentTiles); }
/// <summary> /// Gets the full name system users. /// </summary> /// <returns></returns> private async Task GetFullNameSystemUsers() { try { //Retrieve //The URL will change in 2016 to include the API version - api/data/v8.0/systemusers HttpResponseMessage retrieveResponse = await httpClient.GetAsync("systemusers?$select=fullname&$orderby=fullname asc"); if (retrieveResponse.IsSuccessStatusCode) { JObject jRetrieveResponse = JObject.Parse(retrieveResponse.Content.ReadAsStringAsync().Result); dynamic systemUserObject = JsonConvert.DeserializeObject(jRetrieveResponse.ToString()); var userMessage = new VoiceCommandUserMessage(); var destinationsContentTiles = new List <VoiceCommandContentTile>(); string recordsRetrieved = string.Format( cortanaResourceMap.GetValue("UsersRetrieved", cortanaContext).ValueAsString); userMessage.DisplayMessage = userMessage.SpokenMessage = recordsRetrieved; int i = 0; foreach (var data in systemUserObject.value) { if (i >= 10) { break; } var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleOnly; destinationTile.Title = data.fullname.Value; destinationsContentTiles.Add(destinationTile); i++; } var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); await voiceServiceConnection.ReportSuccessAsync(response); } } catch (Exception ex) { //ShowException(ex); } }
private async Task <CabEstimate> AvailableList(IEnumerable <CabEstimate> selected, string selectionMessage, string secondSelectionMessage) { var userPrompt = new VoiceCommandUserMessage(); userPrompt.DisplayMessage = userPrompt.SpokenMessage = selectionMessage; var userReprompt = new VoiceCommandUserMessage(); userReprompt.DisplayMessage = userReprompt.SpokenMessage = secondSelectionMessage; var ContentTiles = new List <VoiceCommandContentTile>(); foreach (var cabAvailable in selected) { var cabTile = new VoiceCommandContentTile(); // cabTile.ContentTileType = VoiceCommandContentTileType.TitleWithText; cabTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; if (cabAvailable.Provider.Equals("UBER")) { cabTile.Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///ContosoCabs.VoiceCommandService/img/uber.png")); } else { cabTile.Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///ContosoCabs.VoiceCommandService/img/ola.png")); } cabTile.AppContext = cabAvailable; // cabTile.Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri(cabAvailable.ImageURL)); cabTile.Title = cabAvailable.Provider; cabTile.TextLine1 = cabAvailable.Type; cabTile.TextLine3 = "Es. Fare: " + cabAvailable.CurrentEstimate.LowRange + "-" + cabAvailable.CurrentEstimate.HighRange; cabTile.TextLine2 = "ETA : " + cabAvailable.Eta; ContentTiles.Add(cabTile); } var response = VoiceCommandResponse.CreateResponseForPrompt(userPrompt, userReprompt, ContentTiles); var voiceCommandDisambiguationResult = await voiceServiceConnection.RequestDisambiguationAsync(response); if (voiceCommandDisambiguationResult != null) { return((CabEstimate)voiceCommandDisambiguationResult.SelectedItem.AppContext); } return(null); }
private async Task seeFreshNews() { var msgback = new VoiceCommandUserMessage(); var news = await GetNews(); var p = news.Take(10).ToList(); // 取10条最新新鲜事 var picTiles = new List <VoiceCommandContentTile>(); int i = 1; foreach (var item in p) { var file_name = Path.GetFileName(item.Thumb_c); var uri = new Uri(item.Thumb_c, UriKind.Absolute); var newsTile = new VoiceCommandContentTile(); newsTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; newsTile.Image = await StorageFile.CreateStreamedFileFromUriAsync( file_name, uri, RandomAccessStreamReference.CreateFromUri(uri)); newsTile.AppContext = item; newsTile.Title = $"{item.Title}"; newsTile.TextLine1 = $"@{item.Tag[0].Title}"; newsTile.TextLine2 = $"by {item.Author.Name}"; newsTile.TextLine3 = ""; picTiles.Add(newsTile); i++; } msgback.DisplayMessage = msgback.SpokenMessage = "找到最近的十条新鲜事"; var response = VoiceCommandResponse.CreateResponse(msgback, picTiles); await _serviceConnection.ReportSuccessAsync(response); }
private async Task GetNotesForPerson(string person) { var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "Here are your notes."; userMessage.SpokenMessage = "Tom is a bastard man"; var personTiles = new List <VoiceCommandContentTile>(); var personTile = new VoiceCommandContentTile() { ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText, Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Images/GreyTile.scale-100.png")), AppLaunchArgument = $"person={person}", Title = "Tom", TextLine1 = "Because he is" }; personTiles.Add(personTile); var response = VoiceCommandResponse.CreateResponse(userMessage, personTiles); await this.voiceServiceConnection.ReportSuccessAsync(response); }
private static async Task HandleReadNamedaysCommandAsync(VoiceCommandServiceConnection connection) { var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "Fetching today's namedays for you"; userMessage.SpokenMessage = "Fetching today's namedays for you"; var response = VoiceCommandResponse.CreateResponse(userMessage); await connection.ReportProgressAsync(response); var today = DateTime.Now.Date; var namedays = await NamedayRepository.GetAllNamedaysAsync(); var todaysNameday = namedays.Find(e => e.Day == today.Day && e.Month == today.Month); var namedaysAsString = todaysNameday.NamesAsString; if (todaysNameday.Names.Count() == 1) { userMessage.SpokenMessage = userMessage.DisplayMessage = $"It is {namedaysAsString}'s nameday today"; response = VoiceCommandResponse.CreateResponse(userMessage); } else { userMessage.SpokenMessage = $"Today's namedays are: {namedaysAsString}"; userMessage.DisplayMessage = "Here are today's namedays:"; var tile = new VoiceCommandContentTile(); tile.ContentTileType = VoiceCommandContentTileType.TitleOnly; tile.Title = namedaysAsString; response = VoiceCommandResponse.CreateResponse(userMessage, new List<VoiceCommandContentTile> { tile }); } await connection.ReportSuccessAsync(response); }
private async void ShowTVProgramm() { var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "TV Programm "; userMessage.SpokenMessage = "Folgende Filme kommen heute im Fernsehen."; var taskTiles = new List<VoiceCommandContentTile>(); var taskTile = new VoiceCommandContentTile(); taskTile.ContentTileType = VoiceCommandContentTileType.TitleWithText; taskTile.AppLaunchArgument = "action=tv¶m=Titanic"; taskTile.Title = "Titanic"; taskTile.TextLine1 = "Schnulze"; taskTiles.Add(taskTile); taskTile = new VoiceCommandContentTile(); taskTile.ContentTileType = VoiceCommandContentTileType.TitleWithText; taskTile.AppLaunchArgument = "action=tv¶m=PacificRim"; taskTile.Title = "Pacific Rim"; taskTile.TextLine1 = "Megacooler Action-Film"; taskTiles.Add(taskTile); ////// Create the VoiceCommandResponse from the userMessage and list ////// of content tiles. var response = VoiceCommandResponse.CreateResponse(userMessage, taskTiles); ////// Cortana will present a “Go to app_name” link that the user ////// can tap to launch the app. ////// Pass in a launch to enable the app to deep link to a page ////// relevant to the voice command. //////response.AppLaunchArgument = string.Format("action={0}", "Account"); ////// Ask Cortana to display the user message and content tile and ////// also speak the user message. //await voiceServiceConnection.ReportSuccessAsync(response); //var response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); //// Take action and determine when the next trip to destination //// Inset code here //// Replace the hardcoded strings used here with strings //// appropriate for your application. //// First, create the VoiceCommandUserMessage with the strings //// that Cortana will show and speak. //var userMessage = new VoiceCommandUserMessage(); //userMessage.DisplayMessage = "Here’s your trip."; //userMessage.SpokenMessage = "Your trip to Vegas is on August 3rd."; //// Optionally, present visual information about the answer. //// For this example, create a VoiceCommandContentTile with an //// icon and a string. //var destinationsContentTiles = new List<VoiceCommandContentTile>(); //var destinationTile = new VoiceCommandContentTile(); //destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; //// The user can tap on the visual content to launch the app. //// Pass in a launch argument to enable the app to deep link to a //// page relevant to the item displayed on the content tile. //destinationTile.AppLaunchArgument = string.Format("destination={0}”, “Las Vegas"); //destinationTile.Title = "Las Vegas"; //destinationTile.TextLine1 = "August 3rd 2015"; //destinationsContentTiles.Add(destinationTile); //// Create the VoiceCommandResponse from the userMessage and list //// of content tiles. //var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); //// Cortana will present a “Go to app_name” link that the user //// can tap to launch the app. //// Pass in a launch to enable the app to deep link to a page //// relevant to the voice command. //response.AppLaunchArgument = string.Format("destination={0}”, “Las Vegas"); //// Ask Cortana to display the user message and content tile and //// also speak the user message. //await voiceServiceConnection.ReportSuccessAsync(response); }
private async Task<VoiceCommandResponse> CortanaList(List<VoiceCommandContentTile> destContentTiles, List<XElement> commandList, List<XComment> commentList, int cmdsTook, int cmdCountingNo) { var destinationContentTiles = destContentTiles; var cmdList = commandList; var cmtList = commentList; var commandsTook = cmdsTook; var commandsCountingNo = cmdCountingNo; //back for cortana to show the content var msgback = new VoiceCommandUserMessage(); msgback.DisplayMessage = msgback.SpokenMessage = CortanaReply; //Cortana var msgRepeat = new VoiceCommandUserMessage(); msgRepeat.DisplayMessage = msgRepeat.SpokenMessage = CortanaSecondReply; var moreCommands = "Select next page commands"; var iconsFolder = await ApplicationData.Current.LocalFolder.GetFolderAsync("builditmedia"); destinationContentTiles.Clear(); if (cmdList.Count - commandsTook <= 5) { for (int i = commandsCountingNo; i <= cmdList.Count - 1; i++) { //var command = cmdList[commandsTook]; var attributeName = cmdList[commandsTook].Attribute("Name").Value; var descriptionComment = (from comment in cmdList[commandsTook].DescendantNodes().OfType<XComment>() where comment.Value.StartsWith("Description:") select comment.Value) .FirstOrDefault() // Get the first comment that starts with "Description:" ?.Replace("Description:", "") ?? ""; // If one exists, trim "Description:" by replacing it with "" if (attributeName.Contains("buildit") != true) { attributeName = "buildit_customTile"; } //var iconFile = await iconsFolder.GetFileAsync($"{attributeName}.png"); destinationContentTiles.Add(new VoiceCommandContentTile { AppLaunchArgument = cmdList[commandsTook].Attribute("Name").Value, ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText, Title = cmdList[commandsTook].Element(VoiceCommandSchema.Example).Value, TextLine1 = descriptionComment, Image = await iconsFolder.GetFileAsync($"{attributeName}.png") }); commandsTook++; } } else { destinationContentTiles.Clear(); for (int i = 0; i < 4; i++) { var command = cmdList[commandsTook]; var attributeName = cmdList[commandsTook].Attribute("Name").Value; var descriptionComment = (from comment in command.DescendantNodes().OfType<XComment>() where comment.Value.StartsWith(" Description:") select comment.Value) .FirstOrDefault() // Get the first comment that starts with "Description:" ?.Replace("Description:", "") ?? ""; // If one exists, trim "Description:" by replacing it with "" if (attributeName.Contains("buildit") != true) { attributeName = "buildit_customTile"; } //var iconFile = await iconsFolder.GetFileAsync($"{attributeName}.png"); destinationContentTiles.Add(new VoiceCommandContentTile { AppLaunchArgument = cmdList[commandsTook].Attribute("Name").Value, ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText, Title = cmdList[commandsTook].Element(VoiceCommandSchema.Example).Value, TextLine1 = descriptionComment, Image = await iconsFolder.GetFileAsync($"{attributeName}.png") }); commandsTook++; } var nextPage = new VoiceCommandContentTile { ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText, Title = MoreVoiceCommands, AppLaunchArgument = MoreAppLaunchArgument, TextLine1 = moreCommands, Image = await iconsFolder.GetFileAsync("buildit_help.png") }; destinationContentTiles.Add(nextPage); commandsCountingNo += 4; } // Cortana will handle re-prompting if the user does not provide a valid response. var response = VoiceCommandResponse.CreateResponseForPrompt(msgback, msgRepeat, destinationContentTiles); // If cortana is dismissed in this operation, null will be returned. var selectedRes = await VoiceServiceConnection.RequestDisambiguationAsync(response); //Create dialogue confirm that user selected msgback.DisplayMessage = msgback.SpokenMessage = "Are you sure you want select " + selectedRes.SelectedItem.Title + " ?"; msgRepeat.DisplayMessage = msgRepeat.SpokenMessage = "Please select Yes or No"; response = VoiceCommandResponse.CreateResponseForPrompt(msgback, msgRepeat); //return YES OR NO var result = await VoiceServiceConnection.RequestConfirmationAsync(response); if (result.Confirmed) { if (selectedRes.SelectedItem.AppLaunchArgument == MoreAppLaunchArgument) { await CortanaList(destinationContentTiles, cmdList, cmtList, commandsTook, commandsCountingNo); msgback.DisplayMessage = msgback.SpokenMessage = $"Please speak to Cortana to select voice command."; msgRepeat.DisplayMessage = msgRepeat.SpokenMessage = $"Please speak to Cortana to select voice command."; response = VoiceCommandResponse.CreateResponseForPrompt(msgback, msgRepeat); return response; } else { msgback.DisplayMessage = msgback.SpokenMessage = $"Please speak to Cortana to select voice command."; msgRepeat.DisplayMessage = msgRepeat.SpokenMessage = $"Please speak to Cortana to select voice command."; response = VoiceCommandResponse.CreateResponseForPrompt(msgback, msgRepeat); return response; } } else { await Task.Delay(3000); await CortanaHelpList(); } return response; }
private List<VoiceCommandContentTile> GetLikelihoodForSelectedModel(double Lambda, double Mu, int model) { var resultContentTiles = new List<VoiceCommandContentTile>(); for (int k = 0; k <= 9; k++) { var modelTile = new VoiceCommandContentTile(); modelTile.ContentTileType = VoiceCommandContentTileType.TitleOnly; if (model == 1) modelTile.Title = Models.MM1.CortanaCalkPk(Lambda, Mu, k); else if (model == 2) modelTile.Title = Models.MMinf.CortanaCalkPk(Lambda, Mu, k); resultContentTiles.Add(modelTile); } return resultContentTiles; }
private async void ShowResultsInCortana(string textSpoken) { var userFeedBackMessage = new VoiceCommandUserMessage(); userFeedBackMessage.DisplayMessage = "Showing results"; userFeedBackMessage.SpokenMessage = "Here are your results"; //QueryWikiPedia IKnowledgeBase ikb = new Wikipedia(); var teste = await ikb.GetImagesByTitle(textSpoken); if (teste != null) { var ContentTiles = new List<VoiceCommandContentTile>(); var destinationTile = new VoiceCommandContentTile(); // The user can tap on the visual content to launch the app. // Pass in a launch argument to enable the app to deep link to a // page relevant to the item displayed on the content tile. destinationTile.AppLaunchArgument = "Add Argument to APP"; destinationTile.Title = "Titulo do Tile"; destinationTile.TextLine1 = "Texto 01 do Tile"; destinationTile.TextLine2 = "Texto 02 do Tile"; destinationTile.TextLine3 = "Texto 03 do Tile"; destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWithText; ContentTiles.Add(destinationTile); var response = VoiceCommandResponse.CreateResponse(userFeedBackMessage, ContentTiles); await voiceServiceConnection.ReportSuccessAsync(response); } else { //TODO: Implement error } }
/// <summary> /// Search for, and show details related to a single trip, if the trip can be /// found. This demonstrates a simple response flow in Cortana. /// </summary> /// <param name="destination">The destination, expected to be in the phrase list.</param> /// <returns></returns> private async Task SendCompletionMessageForDestination(string destination) { // If this operation is expected to take longer than 0.5 seconds, the task must // provide a progress response to Cortana prior to starting the operation, and // provide updates at most every 5 seconds. string loadingTripToDestination = string.Format( cortanaResourceMap.GetValue("LoadingTripToDestination", cortanaContext).ValueAsString, destination); await ShowProgressScreen(loadingTripToDestination); Model.TripStore store = new Model.TripStore(); await store.LoadTrips(); // Look for the specified trip. The destination *should* be pulled from the grammar we // provided, and the subsequently updated phrase list, so it should be a 1:1 match, including case. // However, we might have multiple trips to the destination. For now, we just pick the first. IEnumerable<Model.Trip> trips = store.Trips.Where(p => p.Destination == destination); var userMessage = new VoiceCommandUserMessage(); var destinationsContentTiles = new List<VoiceCommandContentTile>(); if (trips.Count() == 0) { // In this scenario, perhaps someone has modified data on your service outside of your // control. If you're accessing a remote service, having a background task that // periodically refreshes the phrase list so it's likely to be in sync is ideal. // This is unlikely to occur for this sample app, however. string foundNoTripToDestination = string.Format( cortanaResourceMap.GetValue("FoundNoTripToDestination", cortanaContext).ValueAsString, destination); userMessage.DisplayMessage = foundNoTripToDestination; userMessage.SpokenMessage = foundNoTripToDestination; } else { // Set a title message for the page. string message = ""; if (trips.Count() > 1) { message = cortanaResourceMap.GetValue("PluralUpcomingTrips", cortanaContext).ValueAsString; } else { message = cortanaResourceMap.GetValue("SingularUpcomingTrip", cortanaContext).ValueAsString; } userMessage.DisplayMessage = message; userMessage.SpokenMessage = message; // file in tiles for each destination, to display information about the trips without // launching the app. foreach (Model.Trip trip in trips) { int i = 1; var destinationTile = new VoiceCommandContentTile(); // To handle UI scaling, Cortana automatically looks up files with FileName.scale-<n>.ext formats based on the requested filename. // See the VoiceCommandService\Images folder for an example. destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; destinationTile.Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///AdventureWorks.VoiceCommands/Images/GreyTile.png")); destinationTile.AppLaunchArgument = string.Format("destination={0}", trip.Destination); destinationTile.Title = trip.Destination; if (trip.StartDate != null) { destinationTile.TextLine1 = trip.StartDate.Value.ToString(dateFormatInfo.LongDatePattern); } else { destinationTile.TextLine1 = trip.Destination + " " + i; } destinationsContentTiles.Add(destinationTile); i++; } } var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); if (trips.Count() > 0) { response.AppLaunchArgument = string.Format("destination={0}", destination); } await voiceServiceConnection.ReportSuccessAsync(response); }
/// <summary> /// Demonstrates providing the user with a choice between multiple items. In this case, if a user /// has two trips to the same destination with different dates, this will provide a way to differentiate /// them. Provide a way to choose between the items. /// </summary> /// <param name="trips">The set of trips to choose between</param> /// <param name="disambiguationMessage">The initial disambiguation message</param> /// <param name="secondDisambiguationMessage">Repeat prompt retry message</param> /// <returns></returns> private async Task<Model.Trip> DisambiguateTrips(IEnumerable<Model.Trip> trips, string disambiguationMessage, string secondDisambiguationMessage) { // Create the first prompt message. var userPrompt = new VoiceCommandUserMessage(); userPrompt.DisplayMessage = userPrompt.SpokenMessage = disambiguationMessage; // Create a re-prompt message if the user responds with an out-of-grammar response. var userReprompt = new VoiceCommandUserMessage(); userReprompt.DisplayMessage = userReprompt.SpokenMessage = secondDisambiguationMessage; // Create items for each item. Ideally, should be limited to a small number of items. var destinationContentTiles = new List<VoiceCommandContentTile>(); int i = 1; foreach (Model.Trip trip in trips) { var destinationTile = new VoiceCommandContentTile(); // Use a generic background image. This can be fetched from a service call, potentially, but // be aware of network latencies and ensure Cortana does not time out. destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; destinationTile.Image = await Package.Current.InstalledLocation.GetFileAsync("AdventureWorks.VoiceCommands\\Images\\GreyTile.png"); // The AppContext can be any arbitrary object, and will be maintained for the // response. destinationTile.AppContext = trip; string dateFormat = ""; if (trip.StartDate != null) { dateFormat = trip.StartDate.Value.ToString(dateFormatInfo.LongDatePattern); } else { // The app allows a trip to not have a date, but the choices must be unique // so they can be spoken aloud and be distinct, so add a number to identify them. dateFormat = string.Format("{0}", i); } //destinationTile.Title = trip.Destination + " " + dateFormat; //destinationTile.TextLine1 = trip.Description; destinationTile.Title = trip.Description; //REVERT lines above destinationTile.TextLine1 = dateFormat; destinationContentTiles.Add(destinationTile); i++; } // Cortana will handle re-prompting if the user does not provide a valid response. var response = VoiceCommandResponse.CreateResponseForPrompt(userPrompt, userReprompt, destinationContentTiles); // If cortana is dismissed in this operation, null will be returned. var voiceCommandDisambiguationResult = await voiceServiceConnection.RequestDisambiguationAsync(response); if (voiceCommandDisambiguationResult != null) { xyz = voiceCommandDisambiguationResult.SelectedItem; return (Model.Trip)voiceCommandDisambiguationResult.SelectedItem.AppContext; } return null; }
protected override async void OnRun(IBackgroundTaskInstance taskInstance) { this.serviceDeferral = taskInstance.GetDeferral(); taskInstance.Canceled += OnTaskCanceled; var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; VoiceCommandResponse response; try { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails); voiceServiceConnection.VoiceCommandCompleted += VoiceCommandCompleted; VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); VoiceCommandUserMessage userMessage = new VoiceCommandUserMessage(); List<VoiceCommandContentTile> contentTiles; switch (voiceCommand.CommandName) { case "what": _todoItemRepository = TODOAdaptiveUISample.Repositories.TodoItemFileRepository.GetInstance(); var data = await _todoItemRepository.RefreshTodoItemsAsync(); contentTiles = new List<VoiceCommandContentTile>(); userMessage.SpokenMessage = "Your Top To Do's are: "; foreach (var item in data.Where(x => x.IsComplete == false).OrderBy(x => x.DueDate).Take((int)VoiceCommandResponse.MaxSupportedVoiceCommandContentTiles)) { var tile = new VoiceCommandContentTile(); tile.ContentTileType = VoiceCommandContentTileType.TitleWithText; tile.Title = item.Title; //tile.TextLine1 = item.Details; contentTiles.Add(tile); userMessage.SpokenMessage += item.Title + ", "; } userMessage.DisplayMessage = "Here are the top " + contentTiles.Count + " To Do's"; response = VoiceCommandResponse.CreateResponse(userMessage, contentTiles); await voiceServiceConnection.ReportSuccessAsync(response); break; case "new": var todo = voiceCommand.Properties["todo"][0]; var responseMessage = new VoiceCommandUserMessage() { DisplayMessage = String.Format("Add \"{0}\" to your To Do's?", todo), SpokenMessage = String.Format("Do you want me to add \"{0}\" to your To Do's?", todo) }; var repeatMessage = new VoiceCommandUserMessage() { DisplayMessage = String.Format("Are you sure you want me to add \"{0}\" to your To Do's?", todo), SpokenMessage = String.Format("Are you sure you want me to add \"{0}\" to your To Do's?", todo) }; bool confirmed = false; response = VoiceCommandResponse.CreateResponseForPrompt(responseMessage, repeatMessage); try { var confirmation = await voiceServiceConnection.RequestConfirmationAsync(response); confirmed = confirmation.Confirmed; } catch { } if (confirmed) { _todoItemRepository = TODOAdaptiveUISample.Repositories.TodoItemFileRepository.GetInstance(); var i = _todoItemRepository.Factory(title: todo); await _todoItemRepository.InsertTodoItem(i); var todos = await _todoItemRepository.RefreshTodoItemsAsync(); contentTiles = new List<VoiceCommandContentTile>(); foreach (var itm in todos.Where(x => x.IsComplete == false).OrderBy(x => x.DueDate).Take((int)VoiceCommandResponse.MaxSupportedVoiceCommandContentTiles)) { var tile = new VoiceCommandContentTile(); tile.ContentTileType = VoiceCommandContentTileType.TitleWithText; tile.Title = itm.Title; contentTiles.Add(tile); } userMessage.SpokenMessage = "Done and Done! Here are your top To Do's"; userMessage.DisplayMessage = "Here are your top " + contentTiles.Count + " To Do's"; response = VoiceCommandResponse.CreateResponse(userMessage, contentTiles); await voiceServiceConnection.ReportSuccessAsync(response); } else { userMessage.DisplayMessage = userMessage.SpokenMessage = "OK then"; response = VoiceCommandResponse.CreateResponse(userMessage); await voiceServiceConnection.ReportSuccessAsync(response); } break; } } catch (Exception ex) { if (Debugger.IsAttached) { Debugger.Break(); } } finally { if (this.serviceDeferral != null) { //Complete the service deferral this.serviceDeferral.Complete(); } } }
private async void SendCompletionMessageForLocations() { // First, create the VoiceCommandUserMessage with the strings // that Cortana will show and speak. var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "Hier sind ein paar Orte in Deiner Nähe."; userMessage.SpokenMessage = "Orte in Deiner Nähe."; string loadingLocations = "Suche nach Orte"; await ShowProgressScreen(loadingLocations); var locationsContentTiles = new List<VoiceCommandContentTile>(); MobileServiceInvalidOperationException exception = null; try { // This code is querying the address table. items = await addressTable.ToCollectionAsync(); } catch (MobileServiceInvalidOperationException e) { exception = e; } if (exception != null) { Debug.WriteLine("Error loading items"); } else { if (items.Count() == 0) { string foundNoTripToDestination = "Keine Orte gefunden"; userMessage.DisplayMessage = foundNoTripToDestination; userMessage.SpokenMessage = foundNoTripToDestination; } else { foreach (var item in items) { int i = 1; var locationTile = new VoiceCommandContentTile(); // To handle UI scaling, Cortana automatically looks up files with FileName.scale-<n>.ext formats based on the requested filename. // See the VoiceCommandService\Images folder for an example. locationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; locationTile.Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///AlternaVoiceCommandService/Images/GreyTile.png")); locationTile.Title = item.Name; if (item.Street != null) { locationTile.TextLine1 = item.Street.ToString(); } if (item.Number != null) { locationTile.TextLine2 = item.Number.ToString(); } locationsContentTiles.Add(locationTile); i++; } } } // Create the VoiceCommandResponse from the userMessage and list // of content tiles. var response = VoiceCommandResponse.CreateResponse(userMessage, locationsContentTiles); // Ask Cortana to display the user message and content tile and // also speak the user message. await voiceServiceConnection.ReportSuccessAsync(response); }
/// <summary> /// Search for, and show details related to a single trip, if the trip can be /// found. This demonstrates a simple response flow in Cortana. /// </summary> /// <param name="destination">The destination, expected to be in the phrase list.</param> /// <returns></returns> private async Task ShowInfomation(string condition) { // If this operation is expected to take longer than 0.5 seconds, the task must // provide a progress response to Cortana prior to starting the operation, and // provide updates at most every 5 seconds. string loadingTripToDestination = string.Format( cortanaResourceMap.GetValue("LoadingInsiderCondition", cortanaContext).ValueAsString, condition); //await ShowProgressScreen(loadingTripToDestination); await ShowProgressScreen(loadingTripToDestination); var userMessage = new VoiceCommandUserMessage(); var destinationsContentTiles = new List<VoiceCommandContentTile>(); // Set a title message for the page. string message = ""; message = cortanaResourceMap.GetValue("currentCondition", cortanaContext).ValueAsString; userMessage.DisplayMessage = message; userMessage.SpokenMessage = "message"; // file in tiles for each destination, to display information about the trips without // launching the app. string result = null; using (HttpClient _httpClient = new HttpClient()) { CancellationTokenSource _cts; _cts = new CancellationTokenSource(); try { // 需要 post 的数据 var postData = new HttpFormUrlEncodedContent( new List<KeyValuePair<string, string>> { new KeyValuePair<string, string>("param1",condition), //new KeyValuePair<string, string>("param2", "abcd") } ); HttpResponseMessage httpresponse = await _httpClient.PostAsync( new Uri("http://" + "10.168.32.44" + ":0808/"), postData).AsTask(_cts.Token); // 取消请求的方式改为通过 CancellationTokenSource 来实现了 // HttpContent.ReadAsStringAsync() - 以 string 方式获取响应数据 // HttpContent.ReadAsByteArrayAsync() - 以 byte[] 方式获取响应数据 // HttpContent.ReadAsStreamAsync() - 以 stream 方式获取响应数据 if ((int)httpresponse.StatusCode != 200) { result = string.Format("连接失败,状态码为:{0}", ((int)httpresponse.StatusCode)); } result += await httpresponse.Content.ReadAsStringAsync(); //result += Environment.NewLine; Debug.WriteLine(result); } catch (TaskCanceledException) { result += "取消了"; //result += Environment.NewLine; } catch (Exception ex) { //result += ex.ToString(); result += "连接失败"; } } char[] seprator = { ';' }; string[] results = result.Split(seprator); if(results.Length == 1) { var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; destinationTile.Image = await Package.Current.InstalledLocation.GetFileAsync("VoiceService\\Images\\weather.png"); //destinationTile.Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///Pics/weather.jpg")); destinationTile.Title = "错误信息"; destinationTile.TextLine1 = Trim100(result); destinationsContentTiles.Add(destinationTile); } else { for(int i=0; i<3 && i<results.Length; i++) { var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; destinationTile.Image = await Package.Current.InstalledLocation.GetFileAsync("VoiceService\\Images\\weather.png"); //string Destination = string.Format("地点{0}", i); //destinationTile.AppLaunchArgument = string.Format("destination={0}", Destination); switch(i) { case 0: destinationTile.Title = "温度(摄氏度)"; destinationTile.TextLine1 = Trim100(results[i]); destinationsContentTiles.Add(destinationTile); break; case 1: destinationTile.Title = "温度(华氏度)"; destinationTile.TextLine1 = Trim100(results[i]); destinationsContentTiles.Add(destinationTile); break; case 2: destinationTile.Title = "湿度"; destinationTile.TextLine1 = Trim100(results[i]); destinationsContentTiles.Add(destinationTile); break; } } } var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); //response.AppLaunchArgument = string.Format(destination); await voiceServiceConnection.ReportSuccessAsync(response); }
/// <summary> /// Search for, and show details related to a single trip, if the trip can be /// found. This demonstrates a simple response flow in Cortana. /// </summary> /// <param name="destination">The destination, expected to be in the phrase list.</param> /// <returns></returns> private async Task ShowInfomation(string condition) { // If this operation is expected to take longer than 0.5 seconds, the task must // provide a progress response to Cortana prior to starting the operation, and // provide updates at most every 5 seconds. string loadingTripToDestination = string.Format( cortanaResourceMap.GetValue("LoadingInsiderCondition", cortanaContext).ValueAsString, condition); await ShowProgressScreen(loadingTripToDestination); var userMessage = new VoiceCommandUserMessage(); var destinationsContentTiles = new List<VoiceCommandContentTile>(); // file in tiles for each destination, to display information about the trips without // launching the app. string result = await GetContent("hello"); char[] seprator = { ';' }; string[] results = result.Split(seprator); if (results.Length == 1) { var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; destinationTile.Image = await Package.Current.InstalledLocation.GetFileAsync("VoiceService\\Images\\weather.png"); destinationTile.Title = "错误信息"; destinationTile.TextLine1 = result; destinationsContentTiles.Add(destinationTile); } else { for (int i = 0; i < 3 && i < results.Length; i++) { var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; destinationTile.Image = await Package.Current.InstalledLocation.GetFileAsync("VoiceService\\Images\\weather.png"); //string Destination = string.Format("地点{0}", i); //destinationTile.AppLaunchArgument = string.Format("destination={0}", Destination); switch (i) { case 0: destinationTile.Title = "摄氏度"; destinationTile.TextLine1 = (results[i]) + "℃"; destinationsContentTiles.Add(destinationTile); break; case 1: destinationTile.Title = "华氏度"; destinationTile.TextLine1 = (results[i]) + "℉"; destinationsContentTiles.Add(destinationTile); break; case 2: destinationTile.Title = "湿度"; destinationTile.TextLine1 = (results[i]) + "%RH"; destinationsContentTiles.Add(destinationTile); break; } } } //message = cortanaResourceMap.GetValue("currentCondition", cortanaContext).ValueAsString; double temp = double.Parse(results[0]); if (temp < 28) { try { userMessage.DisplayMessage = "库房状态正常"; userMessage.SpokenMessage = "库房温湿度正常"; var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); //response.AppLaunchArgument = string.Format(destination); await voiceServiceConnection.ReportSuccessAsync(response); } catch (Exception ex) { string meg = ex.Message; Debug.WriteLine(meg); } } else { VoiceCommandResponse response; var userPrompt = new VoiceCommandUserMessage(); // Prompt the user for confirmation that we've selected the correct trip to cancel. string cancelTripToDestination = "气温超过28度,是否打开风扇?"; userPrompt.DisplayMessage = userPrompt.SpokenMessage = cancelTripToDestination; var userReprompt = new VoiceCommandUserMessage(); string confirmCancelTripToDestination = "超过28度,是否打开风扇?"; userReprompt.DisplayMessage = userReprompt.SpokenMessage = confirmCancelTripToDestination; //REVERT var cancelledContentTiles = new List<VoiceCommandContentTile>(); //cancelledContentTiles.Add(destinationTile); response = VoiceCommandResponse.CreateResponseForPrompt(userPrompt, userReprompt, cancelledContentTiles); string parameter = "fanopen"; try { var voiceCommandConfirmation = await voiceServiceConnection.RequestConfirmationAsync(response); if (voiceCommandConfirmation != null) { if (voiceCommandConfirmation.Confirmed == true) { userMessage = new VoiceCommandUserMessage(); string stropen = "正在打开风扇"; await ShowProgressScreen(stropen); string res = await GetContent(parameter); if (string.IsNullOrEmpty(res)) { string cancelledTripToDestination = "风扇好像有点问题请稍后再试"; userMessage.DisplayMessage = userMessage.SpokenMessage = cancelledTripToDestination; response = VoiceCommandResponse.CreateResponse(userMessage, cancelledContentTiles); //REVERT cancelledContentTiles response.AppLaunchArgument = "打开风扇"; //REVERT } else { string cancelledTripToDestination = "风扇已打开,请放心吧。"; userMessage.DisplayMessage = userMessage.SpokenMessage = cancelledTripToDestination; response = VoiceCommandResponse.CreateResponse(userMessage, cancelledContentTiles); //REVERT cancelledContentTiles response.AppLaunchArgument = "打开风扇"; //REVERT } } else { userMessage = new VoiceCommandUserMessage(); string keepingTripToDestination = "好吧,那就随它去吧!"; userMessage.DisplayMessage = userMessage.SpokenMessage = keepingTripToDestination; response = VoiceCommandResponse.CreateResponse(userMessage); response.AppLaunchArgument = "cancel"; //REVERT } await voiceServiceConnection.ReportSuccessAsync(response); } } catch (Exception ex) { string meg = ex.Message; Debug.WriteLine(meg); } } }
//private async Task CortanaHelpList() //{ // await ShowProgressScreen(); // var userMessage = new VoiceCommandUserMessage(); // userMessage.DisplayMessage = "Here is the help list for you"; // userMessage.SpokenMessage = "Here is the help list for you"; // var storageFile = await Package.Current.InstalledLocation.GetFileAsync("assets\\artwork.png"); // //load temporary xml file // var tempVoiceFile = await ApplicationData.Current.TemporaryFolder.GetFileAsync("_voices.xml"); // var randomAccessStream = await tempVoiceFile.OpenReadAsync(); // var stream = randomAccessStream.AsStreamForRead(); // var xml = XDocument.Load(stream); // var destinationContentTiles = new List<VoiceCommandContentTile>(); // var ns = XNamespace.Get("http://schemas.microsoft.com/voicecommands/1.2"); // var xmlns = XNamespace.Get("http://www.w3.org/XML/1998/namespace"); // //get current user location // var currentLocation = CultureInfo.CurrentCulture.Name.ToLower(); // //get CommandSet which match currentLocation // var commandSet = (from c in xml.Descendants() // where ns.GetName("CommandSet") == c.Name // where c.Attribute(xmlns.GetName("lang")).Value == currentLocation // select c); // //get all command in a list // var commandList = (from c in commandSet.Descendants() // where ns.GetName("Command") == c.Name // select c).ToList(); // var totalCommandNo = Math.Min(commandList.Count, 4); // await Task.Yield(); // //var destinationContentTiles = commandList.Take(4).Select(command => new VoiceCommandContentTile // //{ // // ContentTileType = VoiceCommandContentTileType.TitleOnly, // // AppLaunchArgument = command.Attribute("Name").Value, // // Title = command.Element(ns.GetName("Example")).Value // //}).ToList(); // //if (totalCommandNo > 10) // //{ // // foreach (var command in commandList.Take(9)) // // { // // var commands = new VoiceCommandContentTile // // { // // ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText, // // Title = command.Element(ns.GetName("Example")).Value, // // Image = storageFile, // // AppLaunchArgument = command.Attribute("Name").Value // // }; // // destinationContentTiles.Add(commands); // // } // // var nextPage = new VoiceCommandContentTile // // { // // ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText, // // Title = "More commands", // // AppLaunchArgument = "", // // }; // // destinationContentTiles.Add(nextPage); // //} // //else // //{ // // foreach (var command in commandList) // // { // // var commands = new VoiceCommandContentTile // // { // // ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText, // // Title = command.Element(ns.GetName("Example")).Value, // // Image = storageFile, // // AppLaunchArgument = command.Attribute("Name").Value // // }; // // destinationContentTiles.Add(commands); // // } // //} // foreach (var command in commandList.Take(totalCommandNo)) // { // var commands = new VoiceCommandContentTile // { // ContentTileType = VoiceCommandContentTileType.TitleWith280x140IconAndText, // Title = command.Element(ns.GetName("Example")).Value, // Image = storageFile, // AppLaunchArgument = command.Attribute("Name").Value // }; // destinationContentTiles.Add(commands); // } // //var nextPage = new VoiceCommandContentTile // //{ // // ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText, // // Title = "More commands", // // Image = storageFile, // // AppLaunchArgument = "", // //}; // //destinationContentTiles.Add(nextPage); // await // voiceServiceConnection.ReportSuccessAsync(VoiceCommandResponse.CreateResponse(userMessage, // destinationContentTiles)); //} private async Task CortanaHelpList() { //back for cortana to show the content var msgback = new VoiceCommandUserMessage(); msgback.DisplayMessage = msgback.SpokenMessage = "Here is the help list for you"; //Cortana var msgRepeat = new VoiceCommandUserMessage(); msgRepeat.DisplayMessage = msgRepeat.SpokenMessage = "Here is another help list for you"; var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = "Here is the help list for you"; userMessage.SpokenMessage = "Here is the help list for you"; var storageFile = await Package.Current.InstalledLocation.GetFileAsync("assets\\artwork.png"); await ShowProgressScreen(); //load temporary xml file var tempVoiceFile = await ApplicationData.Current.TemporaryFolder.GetFileAsync("_voices.xml"); var randomAccessStream = await tempVoiceFile.OpenReadAsync(); var stream = randomAccessStream.AsStreamForRead(); var xml = XDocument.Load(stream); // Create items for each item. Ideally, should be limited to a small number of items. var destinationContentTiles = new List<VoiceCommandContentTile>(); var ns = XNamespace.Get("http://schemas.microsoft.com/voicecommands/1.2"); var xmlns = XNamespace.Get("http://www.w3.org/XML/1998/namespace"); //get current user location var currentLocation = CultureInfo.CurrentCulture.Name.ToLower(); //get CommandSet which match currentLocation var commandSet = (from c in xml.Descendants() where ns.GetName("CommandSet") == c.Name where c.Attribute(xmlns.GetName("lang")).Value == currentLocation select c); //get all command in a list var commandList = (from c in commandSet.Descendants() where ns.GetName("Command") == c.Name select c).ToList(); var totalCommandNo = Math.Min(commandList.Count, 4); // var test = new VoiceCommandContentTile(); foreach (var command in commandList.Take(totalCommandNo)) { destinationContentTiles.Add(new VoiceCommandContentTile { AppLaunchArgument = command.Attribute("Name").Value, ContentTileType = VoiceCommandContentTileType.TitleOnly, Title = command.Element(ns.GetName("Example")).Value, }); } if (totalCommandNo == 4) { var nextPage = new VoiceCommandContentTile { ContentTileType = VoiceCommandContentTileType.TitleOnly, Title = "More voice commands", AppLaunchArgument = "More", }; destinationContentTiles.Add(nextPage); } TilesList: // Cortana will handle re-prompting if the user does not provide a valid response. var response = VoiceCommandResponse.CreateResponseForPrompt(msgback,msgRepeat, destinationContentTiles); // If cortana is dismissed in this operation, null will be returned. var selectedRes = await voiceServiceConnection.RequestDisambiguationAsync(response); //Create dialogue confirm that user selected msgback.DisplayMessage = msgback.SpokenMessage = "Are you sure you want select " + selectedRes.SelectedItem.Title + " ?"; msgRepeat.DisplayMessage = msgRepeat.SpokenMessage = "Please select Yes or No"; response = VoiceCommandResponse.CreateResponseForPrompt(msgback, msgRepeat); //var voiceAppLaunchArgument = string.Empty; //return YES OR NO var result = await voiceServiceConnection.RequestConfirmationAsync(response); if (result.Confirmed) { var testTilesList = new List<VoiceCommandContentTile>(); if (selectedRes.SelectedItem.AppLaunchArgument == "More") { for (int i = totalCommandNo; i < commandList.Count -1; i++) { testTilesList.Add(new VoiceCommandContentTile { AppLaunchArgument = commandList[i].Attribute("Name").Value, ContentTileType = VoiceCommandContentTileType.TitleOnly, Title = commandList[i].Element(ns.GetName("Example")).Value, }); } await voiceServiceConnection.ReportSuccessAsync(VoiceCommandResponse.CreateResponse(userMessage, testTilesList)); return; } msgback.DisplayMessage = msgback.SpokenMessage = $"You've selected {selectedRes.SelectedItem.Title}"; msgRepeat.DisplayMessage = msgRepeat.SpokenMessage = $"You've selected {selectedRes.SelectedItem.Title}"; response = VoiceCommandResponse.CreateResponseForPrompt(msgback, msgRepeat); //var value = selectedRes.SelectedItem.AppLaunchArgument; //voiceAppLaunchArgument = selectedRes.SelectedItem.AppLaunchArgument; //response = VoiceCommandResponse.CreateResponse() } else { goto TilesList; } await voiceServiceConnection.ReportSuccessAsync(response); }
/// <summary> /// Demonstrates providing the user with a choice between multiple items. In this case, if a user /// has two trips to the same destination with different dates, this will provide a way to differentiate /// them. Provide a way to choose between the items. /// </summary> /// <param name="trips">The set of trips to choose between</param> /// <param name="disambiguationMessage">The initial disambiguation message</param> /// <param name="secondDisambiguationMessage">Repeat prompt retry message</param> /// <returns></returns> private async Task<Model.Trip> DisambiguateTrips(IEnumerable<Model.Trip> trips, string disambiguationMessage, string secondDisambiguationMessage) { // Create the first prompt message. var userPrompt = new VoiceCommandUserMessage(); userPrompt.DisplayMessage = userPrompt.SpokenMessage = disambiguationMessage; // Create a re-prompt message if the user responds with an out-of-grammar response. var userReprompt = new VoiceCommandUserMessage(); userPrompt.DisplayMessage = userPrompt.SpokenMessage = secondDisambiguationMessage; // Create items for each item. Ideally, should be limited to a small number of items. var destinationContentTiles = new List<VoiceCommandContentTile>(); int i = 1; foreach (Model.Trip trip in trips) { var destinationTile = new VoiceCommandContentTile(); // To handle UI scaling, Cortana automatically looks up files with FileName.scale-<n>.ext formats based on the requested filename. // See the VoiceCommandService\Images folder for an example. destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; destinationTile.Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///AdventureWorks.VoiceCommands/Images/GreyTile.png")); // The AppContext can be any arbitrary object, and will be maintained for the // response. destinationTile.AppContext = trip; string dateFormat = ""; if (trip.StartDate != null) { dateFormat = trip.StartDate.Value.ToString(dateFormatInfo.LongDatePattern); } else { // The app allows a trip to not have a date, but the choices must be unique // so they can be spoken aloud and be distinct, so add a number to identify them. dateFormat = string.Format("{0}", i); } destinationTile.Title = trip.Destination + " " + dateFormat; destinationTile.TextLine1 = trip.Description; destinationContentTiles.Add(destinationTile); i++; } // Cortana will handle re-prompting if the user does not provide a valid response. var response = VoiceCommandResponse.CreateResponseForPrompt(userPrompt, userReprompt, destinationContentTiles); // If cortana is dismissed in this operation, null will be returned. var voiceCommandDisambiguationResult = await voiceServiceConnection.RequestDisambiguationAsync(response); if (voiceCommandDisambiguationResult != null) { return (Model.Trip)voiceCommandDisambiguationResult.SelectedItem.AppContext; } return null; }
protected override async void OnRun(IBackgroundTaskInstance taskInstance) { this.serviceDeferral = taskInstance.GetDeferral(); taskInstance.Canceled += OnTaskCanceled; var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails; VoiceCommandUserMessage userMessage; VoiceCommandResponse response; try { voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails); voiceServiceConnection.VoiceCommandCompleted += VoiceCommandCompleted; VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync(); switch (voiceCommand.CommandName) { case "where": var city = voiceCommand.Properties["city"][0]; var imageFile = await GenerateWideIconWithCity(city); var localFolder = ApplicationData.Current.LocalFolder; StorageFile cityIcon = await localFolder.GetFileAsync(imageFile); var contentTiles = new List<VoiceCommandContentTile>(); var tile1 = new VoiceCommandContentTile(); tile1.ContentTileType = VoiceCommandContentTileType.TitleWith280x140IconAndText; tile1.AppLaunchArgument = city; tile1.Image = cityIcon; contentTiles.Add(tile1); userMessage = new VoiceCommandUserMessage() { DisplayMessage = "Here you go Best Friend, it's " + city, SpokenMessage = "Here you go Best Friend, it's " + city }; response = VoiceCommandResponse.CreateResponse(userMessage, contentTiles); await voiceServiceConnection.ReportSuccessAsync(response); break; case "sendMessageInCanvas": var message = voiceCommand.Properties["message"][0]; var bot = new Bot(); string firstResponse = await bot.SendMessageAndGetResponseFromBot(message); var responseMessage = new VoiceCommandUserMessage(); responseMessage.DisplayMessage = responseMessage.SpokenMessage = "Your Best Friend says \"" + firstResponse + "\""; response = VoiceCommandResponse.CreateResponse(responseMessage); await voiceServiceConnection.ReportSuccessAsync(response); break; } } catch (Exception ex) { Debug.WriteLine(ex.Message); } finally { if (this.serviceDeferral != null) { //Complete the service deferral this.serviceDeferral.Complete(); } } }
private async Task ShowResults(List<Session> results, VoiceCommandUserMessage userMessage) { var destinationsContentTiles = new List<VoiceCommandContentTile>(); foreach (var kvp in results) { var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText; destinationTile.AppLaunchArgument = kvp.Title.GetValidString(); destinationTile.TextLine1 = kvp.Title.GetValidString(); destinationTile.TextLine2 = kvp.Speakers[0].Name.GetValidString(); destinationTile.TextLine3 = kvp.Location.Room.GetValidString(); IRandomAccessStreamReference thumbnail = RandomAccessStreamReference.CreateFromUri(new Uri(kvp.Speakers[0].Photo)); destinationTile.Image = await StorageFile.CreateStreamedFileFromUriAsync(kvp.Title, new Uri(kvp.Speakers[0].Photo), thumbnail); destinationTile.AppLaunchArgument = kvp.Title; destinationsContentTiles.Add(destinationTile); } var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); response.AppLaunchArgument = "session"; await voiceServiceConnection.ReportSuccessAsync(response); }
private async void SendWeatherForecastIn(CitySettingsModel where) { // Take action and determine when the next trip to destination // Insert code here. // Replace the hardcoded strings used here with strings // appropriate for your application. // First, create the VoiceCommandUserMessage with the strings // that Cortana will show and speak. var userMessage = new VoiceCommandUserMessage(); userMessage.DisplayMessage = where.City + "的天气状况:"; userMessage.SpokenMessage = "这是" + where.City + "的天气状况"; // Optionally, present visual information about the answer. // For this example, create a VoiceCommandContentTile with an // icon and a string. var destinationsContentTiles = new List<VoiceCommandContentTile>(); var destinationTile = new VoiceCommandContentTile(); destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWithText; // The user can tap on the visual content to launch the app. // Pass in a launch argument to enable the app to deep link to a // page relevant to the item displayed on the content tile. destinationTile.AppLaunchArgument = where.Id; destinationTile.Title = where.City; destinationTile.TextLine1 = "August 3rd 2015"; destinationsContentTiles.Add(destinationTile); // Create the VoiceCommandResponse from the userMessage and list // of content tiles. var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles); // Cortana will present a “Go to app_name” link that the user // can tap to launch the app. // Pass in a launch to enable the app to deep link to a page // relevant to the voice command. response.AppLaunchArgument = where.Id; // Ask Cortana to display the user message and content tile and // also speak the user message. await voiceServiceConnection.ReportSuccessAsync(response); }