Beispiel #1
0
        async void taskcounter()
        {
            var x = TodoService.GetMustDoList().Count;
            VoiceCommandResponse response = null;

            if (x == 0)
            {
                var userMessage = new VoiceCommandUserMessage();
                userMessage.DisplayMessage = "No tasks on Denna";
                userMessage.SpokenMessage  = "You have no tasks ! Add one";
                response =
                    VoiceCommandResponse.CreateResponse(userMessage);
            }
            else
            {
                var userMessage = new VoiceCommandUserMessage();
                userMessage.DisplayMessage = "You have " + x + " tasks" + " on Denna";
                userMessage.SpokenMessage  = "You have " + x + " tasks";
                response =
                    VoiceCommandResponse.CreateResponse(userMessage);
            }

            // Create the VoiceCommandResponse from the userMessage and list
            // of content tiles.

            // Cortana will present a “Go to app_name” link that the user
            // can tap to launch the app.
            // Pass in a launch to enable the app to deep link to a page
            // relevant to the voice command.
            response.AppLaunchArgument = "agsonCortana";

            // Ask Cortana to display the user message and content tile and
            // also speak the user message.
            await voiceServiceConnection.ReportSuccessAsync(response);
        }
Beispiel #2
0
        private async Task HandleLightsOnOrOff(string lightState)
        {
            await ShowProgressScreen("Hold on");

            VoiceCommandUserMessage userMessage = new VoiceCommandUserMessage();

            string defaultMessage = $"Turning your lights {lightState}";

            JObject on = new JObject();

            try
            {
                SetOnOrOff(lightState, on);
                httpClient.PutAsync($"{baseUrl}/groups/0/action", new StringContent(on.ToString()));
                userMessage.DisplayMessage = defaultMessage;
                userMessage.SpokenMessage  = defaultMessage;
            }
            catch (Exception ex)
            {
                SetError(userMessage);
                VoiceCommandResponse errResponse = VoiceCommandResponse.CreateResponse(userMessage);
                await voiceServiceConnection.ReportFailureAsync(errResponse);
            }

            VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(userMessage);
            await voiceServiceConnection.ReportSuccessAsync(response);
        }
Beispiel #3
0
        private async Task SendCompletionMessageForFixedAmount(string amount, string subject)
        {
            var userMessage = new VoiceCommandUserMessage();
            int amountnumber;

            if (int.TryParse(amount, out amountnumber))
            {
                userMessage.DisplayMessage = String.Format("Das habe ich gespeichert.", amount, subject);
                userMessage.SpokenMessage  = String.Format("Ich habe {0} {1} gespeichert.", amount, subject);

                var contentTiles = new List <VoiceCommandContentTile>();
                contentTiles.Add(new VoiceCommandContentTile()
                {
                    ContentTileType = VoiceCommandContentTileType.TitleOnly,
                    Title           = String.Format("{0} {1} gespeichert.", amount, subject)
                });

                var response = VoiceCommandResponse.CreateResponse(userMessage, contentTiles);
                await voiceServiceConnection.ReportSuccessAsync(response);
            }
            else
            {
            }
            await Task.Delay(2000);
        }
Beispiel #4
0
        private async Task CompleteMessage(string message)
        {
            // Provide a completion message to the user.
            var userMessage = new VoiceCommandUserMessage();

            userMessage.DisplayMessage = userMessage.SpokenMessage = message;
            var response = VoiceCommandResponse.CreateResponse(userMessage);
            await voiceServiceConnection.ReportSuccessAsync(response);
        }
        private async Task ProcessInterestingFactAsync(SpeechRecognitionSemanticInterpretation interpretation)
        {
            await Core.Helpers.BackgroundProgressHelper.ShowProgressScreen(voiceServiceConnection, "Okay,get ready");

            //here is this what I want to cortana will tell
            string fact = await Core.Helpers.FactHelper.GetRandomFactAsync();
            var DestinationContentTiles = new List<VoiceCommandContentTile>();
            var destinationTile = new VoiceCommandContentTile();
            try
            {

                //What style we want Cortana shows us , size tile that will be displayed
                destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith280x140IconAndText;
                //What images should be inside of tiles
                destinationTile.Image = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///OfficePoint.Service.Background/Images/Fact_280.png"));


                //add to the VoiceCommandContenTile, and say how you handle sending a single  respone back
                destinationTile.AppContext = null;

                //load up in the user interface
                destinationTile.AppLaunchArgument = "type=" + VoiceCommandType.InterestingQueryFact;

                destinationTile.Title = fact;

                //here is what i Want to cortan will write
                destinationTile.TextLine1 = "";

                DestinationContentTiles.Add(destinationTile);


                    }

            catch(Exception ex)
            {

            }

            //here I'm creating my response  
            VoiceCommandResponse voiceRespone = VoiceCommandResponse.CreateResponse(new VoiceCommandUserMessage()
            {
                // what cortan write
                DisplayMessage = "did you know...",
                //randomly fact whih is chosen perviously which is speaking by Cortana
                SpokenMessage = fact,
            }, DestinationContentTiles);


            //voiceServiceConnection is connection to Cortana here is using to send our response 
            await voiceServiceConnection.ReportSuccessAsync(voiceRespone);


        }
        private async Task HandleChangeWallpaper(VoiceCommandServiceConnection connection, VoiceCommandUserMessage user_message)
        {
            //copy images to appdata
            var local_folder = ApplicationData.Current.LocalFolder;
            var install_path = Package.Current.InstalledLocation;
            var media_path   = await install_path.GetFolderAsync("media\\images");

            var images = await media_path.GetFilesAsync();

            foreach (var image in images)
            {
                try
                {
                    await local_folder.GetFileAsync(image.Name);

                    continue;
                }
                catch { }
                await image.CopyAsync(local_folder, image.Name, ReplaceExisting);
            }

            //change wallpaper and prepare response back to user

            var result = await UserSettings.ChangeWallpaperAsync();

            user_message.SpokenMessage = "Your wallpaper was modified, do you want me to change the lock screen as well?";

            var backup_message = new VoiceCommandUserMessage
            {
                SpokenMessage = "Change your lock screen",
            };

            var response       = VoiceCommandResponse.CreateResponseForPrompt(user_message, backup_message);
            var confirm_result = await connection.RequestConfirmationAsync(response);

            if (confirm_result.Confirmed)
            {
                await UserSettings.ChangeLockScreenAsync();

                user_message.SpokenMessage = "Your lock screen was also modified.";
                response = VoiceCommandResponse.CreateResponse(user_message);
                await connection.ReportSuccessAsync(response);
            }
            else
            {
                user_message.SpokenMessage = "okay, you're all set then";
                response = VoiceCommandResponse.CreateResponse(user_message);
                await connection.ReportSuccessAsync(response);
            }
        }
Beispiel #7
0
        private async void ShowEndMyPresentation()
        {
            var userMessage = new VoiceCommandUserMessage();

            //string message = "Okay Oliver, ich starte jetzt deinen Vortrag und wünsch Dir viel Erfolg.";

            string message = "Oliver! Mein Name ist nicht Siri. Und du solltest mich lieber nicht noch mal so ansprechen, sonst bin ich echt sauer.";

            userMessage.SpokenMessage  = message;
            userMessage.DisplayMessage = message;

            var response = VoiceCommandResponse.CreateResponse(userMessage);
            await voiceServiceConnection.ReportSuccessAsync(response);
        }
Beispiel #8
0
        async void taskcounter()
        {
            try
            {
                var x = _service.GetMustDoList().Count;
                VoiceCommandResponse response = null;
                if (x == 0)
                {
                    var userMessage = new VoiceCommandUserMessage();
                    userMessage.DisplayMessage = "No tasks on Denna";
                    userMessage.SpokenMessage  = "You have no tasks ! Add one";
                    response =
                        VoiceCommandResponse.CreateResponse(userMessage);
                }
                else
                {
                    var userMessage = new VoiceCommandUserMessage();
                    userMessage.DisplayMessage = "You have " + x + " tasks" + " on Denna";
                    userMessage.SpokenMessage  = "You have " + x + " tasks";
                    response =
                        VoiceCommandResponse.CreateResponse(userMessage);
                }
                // Create the VoiceCommandResponse from the userMessage and list
                // of content tiles.

                // Cortana will present a “Go to app_name” link that the user
                // can tap to launch the app.
                // Pass in a launch to enable the app to deep link to a page
                // relevant to the voice command.
                response.AppLaunchArgument = "agsonCortana";

                // Ask Cortana to display the user message and content tile and
                // also speak the user message.
                await voiceServiceConnection.ReportSuccessAsync(response);
            }
            catch (Exception ex)
            {
                var userMessage = new VoiceCommandUserMessage();
                userMessage.SpokenMessage  = "You gotta screenshot report this to app developer";
                userMessage.DisplayMessage = ex.Message;

                var response = VoiceCommandResponse.CreateResponse(userMessage);

                // When launching the app in the foreground, pass an app
                // specific launch parameter to indicate what page to show.
                response.AppLaunchArgument = "agsonCortana";
                await voiceServiceConnection.ReportSuccessAsync(response);
            }
        }
        //Search for the requested data (population from the past) and give a response in cortana
        private async Task SendCompletionMessageForPastPopulation(string country, string year, string searchType)
        {
            // If this operation is expected to take longer than 0.5 seconds, the task must
            // provide a progress response to Cortana prior to starting the operation, and
            // provide updates at most every 5 seconds.
            string calculatingPopulation = string.Format(
                cortanaResourceMap.GetValue("CalculatingPopulation", cortanaContext).ValueAsString,
                country, year);

            await ShowProgressScreen(calculatingPopulation);

            //this var will be filled with the according response data from the following REST Call
            var result = await InvokeRequestResponseService(country, year, searchType);

            string population = Convert.ToDouble(result).ToString("#,##,, Million", CultureInfo.InvariantCulture);

            var userMessage         = new VoiceCommandUserMessage();
            var responseContentTile = new VoiceCommandContentTile();

            //set the type of the ContentTyle
            responseContentTile.ContentTileType = VoiceCommandContentTileType.TitleWithText;

            //fill the responseContentTile with the data we got
            responseContentTile.AppLaunchArgument = country;
            responseContentTile.Title             = country + " " + year;

            responseContentTile.TextLine1 = "Population: " + result;

            //the VoiceCommandResponse needs to be a list
            var tileList = new List <VoiceCommandContentTile>();

            tileList.Add(responseContentTile);

            // Set a message for the Response Cortana Page
            string message = String.Format(cortanaResourceMap.GetValue("ShowPopulation", cortanaContext).ValueAsString, country, year, population);

            userMessage.DisplayMessage = message;
            userMessage.SpokenMessage  = message;

            var response = VoiceCommandResponse.CreateResponse(userMessage, tileList);

            //general infos
            await Launcher.LaunchUriAsync(new Uri(@"https://app.powerbi.com/groups/me/dashboards/1e13afdf-70f8-4d7c-b4f5-c95499802d44"));

            //country info
            await Launcher.LaunchUriAsync(new Uri(@"https://app.powerbi.com/groups/me/reports/6ae73462-1d4b-4bb7-928f-75d23fc6bc84/ReportSection?filter=World/Country eq '" + country + "'"));

            await voiceServiceConnection.ReportSuccessAsync(response);
        }
Beispiel #10
0
        /// <summary>
        /// Handles the command to change the state of a specific light.
        /// </summary>
        private async Task ChangeSpecificLightStateAsync()
        {
            string name  = _voiceCommand.Properties["name"][0];
            string state = _voiceCommand.Properties["state"][0];
            Light  light = _lights.FirstOrDefault(x =>
                                                  x.Name.Equals(name, StringComparison.OrdinalIgnoreCase));

            if (null != light)
            {
                await ExecutePhrase(light, state);

                var response = CreateCortanaResponse($"Turned {name} {state}.");
                await _voiceServiceConnection.ReportSuccessAsync(response);
            }
        }
        private async Task HandleMakeSuggestions(VoiceCommandServiceConnection connection, VoiceCommand command, VoiceCommandUserMessage user_message)
        {
            var tiles    = new List <VoiceCommandContentTile>();
            var location = command.SpeechRecognitionResult.SemanticInterpretation.Properties["location"][0].ToLower();

            //find events
            var events = State.Events;

            events = (from evt in events
                      where evt.Address.ToLower().Contains(location)
                      select evt).ToList();

            //create tiles
            foreach (var evt in events)
            {
                tiles.Add(new VoiceCommandContentTile
                {
                    ContentTileType   = VoiceCommandContentTileType.TitleWithText,
                    AppLaunchArgument = $"event,event_id={evt.EventID}",
                    Title             = evt.EventTitle,
                    TextLine1         = evt.Description,
                });
            }

            //respond
            var response = VoiceCommandResponse.CreateResponse(user_message, tiles);

            response.AppLaunchArgument = "all_events";

            await connection.ReportSuccessAsync(response);
        }
        private async void SendCompletionMessageForDestination(string destination)
        {
            var userMessage = new VoiceCommandUserMessage();

            userMessage.DisplayMessage = "Here’s your cab details.";
            userMessage.SpokenMessage  = "Ola cab /Uber Cab.";


            var destinationsContentTiles = new List <VoiceCommandContentTile>();

            var destinationTile = new VoiceCommandContentTile();

            destinationTile.ContentTileType =
                VoiceCommandContentTileType.TitleWith68x68IconAndText;
            destinationTile.Image = await StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///ContosoCabs.VoiceCommands/Images/cablogo.png"));

            destinationTile.AppLaunchArgument = destination;
            destinationTile.Title             = "Hyderabad";
            destinationTile.TextLine1         = "you have been amazing";
            destinationsContentTiles.Add(destinationTile);

            // Create the VoiceCommandResponse from the userMessage and list
            // of content tiles.
            var response =
                VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles);

            response.AppLaunchArgument =
                string.Format("destination={0}”, “Hyderabad");
            await voiceServiceConnection.ReportSuccessAsync(response);
        }
        public async void Run(IBackgroundTaskInstance taskInstance)
        {
            // Create the deferral by requesting it from the task instance
            serviceDeferral = taskInstance.GetDeferral();

            AppServiceTriggerDetails triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails;

            if (triggerDetails != null && triggerDetails.Name.Equals("VoiceCommandService"))
            {
                voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails);

                VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync();

                // Perform the appropriate command depending on the operation defined in VCD
                switch (voiceCommand.CommandName)
                {
                case "CheckTemperature":
                    VoiceCommandUserMessage userMessage = new VoiceCommandUserMessage();
                    userMessage.DisplayMessage = "The current temperature is 23 degrees";
                    userMessage.SpokenMessage  = "The current temperature is 23 degrees";

                    VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(userMessage, null);
                    await voiceServiceConnection.ReportSuccessAsync(response);

                    break;

                default:
                    break;
                }
            }

            // Once the asynchronous method(s) are done, close the deferral
            serviceDeferral.Complete();
        }
        public async void Run(IBackgroundTaskInstance taskInstance)
        {
            // Create the deferral by requesting it from the task instance
            serviceDeferral = taskInstance.GetDeferral();

            AppServiceTriggerDetails triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails;

            if (triggerDetails != null && triggerDetails.Name.Equals("IMCommandVoice"))
            {
                voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails);

                VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync();

                // Perform the appropriate command depending on the operation defined in VCD
                switch (voiceCommand.CommandName)
                {
                    case "oldback":
                        VoiceCommandUserMessage userMessage = new VoiceCommandUserMessage();
                        userMessage.DisplayMessage = "The current temperature is 23 degrees";
                        userMessage.SpokenMessage = "The current temperature is 23 degrees";

                        VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(userMessage, null);
                        await voiceServiceConnection.ReportSuccessAsync(response);
                        break;

                    default:
                        break;
                }
            }

            // Once the asynchronous method(s) are done, close the deferral
            serviceDeferral.Complete();
        }
        private async Task ShowLatestNews()
        {
            string progress = "Getting the latest news...";

            await ShowProgressScreen(progress);

            RssService feedService = new RssService();
            var        news        = await feedService.GetNews("http://blog.qmatteoq.com/feed");

            List <VoiceCommandContentTile> contentTiles = new List <VoiceCommandContentTile>();

            VoiceCommandUserMessage message = new VoiceCommandUserMessage();
            string text = "Here are the latest news";

            message.DisplayMessage = text;
            message.SpokenMessage  = text;

            foreach (FeedItem item in news.Take(5))
            {
                VoiceCommandContentTile tile = new VoiceCommandContentTile();
                tile.ContentTileType = VoiceCommandContentTileType.TitleOnly;
                tile.Title           = item.Title;
                tile.TextLine1       = item.PublishDate.ToString("g");

                contentTiles.Add(tile);
            }

            VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(message, contentTiles);
            await _voiceServiceConnection.ReportSuccessAsync(response);
        }
Beispiel #16
0
        private async Task QueryBaikeByKeyword(string keyword)
        {
            var userProgressMessage = new VoiceCommandUserMessage();

            userProgressMessage.DisplayMessage = userProgressMessage.SpokenMessage = $"正在查询{keyword}";
            VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(userProgressMessage);
            await voiceServiceConnection.ReportProgressAsync(response);

            var userMessage = new VoiceCommandUserMessage();

            var data = await QueryBaike.BaiduBaike.QueryByKeyword(keyword);

            userMessage.DisplayMessage = userMessage.SpokenMessage = data.Summary;

            VoiceCommandResponse queryResponse = null;

            if (data.Image != null)
            {
                queryResponse = VoiceCommandResponse.CreateResponse(userMessage, new List <VoiceCommandContentTile> {
                    new VoiceCommandContentTile()
                    {
                        Image = data.Image, ContentTileType = data.TileType
                    }
                });
            }
            else
            {
                queryResponse = VoiceCommandResponse.CreateResponse(userMessage);
            }

            queryResponse.AppLaunchArgument = keyword;

            await voiceServiceConnection.ReportSuccessAsync(queryResponse);
        }
        private static async Task HandleReadEventsCommandAsync(VoiceCommandServiceConnection connection)
        {
            try
            {
                ReadRepository ReadRepository = new ReadRepository();
                //Genero un mensaje de espera para que el usuario vea
                var userMessage = new VoiceCommandUserMessage();
                userMessage.DisplayMessage = "Buscando eventos próximos ..";
                userMessage.SpokenMessage  = "Buscando eventos próximos ";
                var response = VoiceCommandResponse.CreateResponse(userMessage);
                await connection.ReportProgressAsync(response);

                var today   = DateTime.Now.Date;
                var notices = await ReadRepository.GetNextEvents();


                if (notices.Count > 1)
                {
                    userMessage.SpokenMessage      =
                        userMessage.DisplayMessage =
                            $"El dia de hoy se realizan {notices.Count} eventos";

                    var tile = new VoiceCommandContentTile();
                    tile.ContentTileType = VoiceCommandContentTileType.TitleOnly;
                    var titleList = new List <VoiceCommandContentTile>();
                    var count     = 0;
                    foreach (var noticeModel in notices)
                    {
                        if (count <= 5)
                        {
                            titleList.Add(new VoiceCommandContentTile
                            {
                                Title           = noticeModel.Title.ToString(),
                                ContentTileType = VoiceCommandContentTileType.TitleWithText,
                                TextLine1       = noticeModel.Date.ToString()
                            });
                            ++count;
                        }
                    }
                    response = VoiceCommandResponse.CreateResponse(userMessage, titleList);
                    await connection.ReportProgressAsync(response);
                }
                else
                {
                    if (notices != null)
                    {
                        userMessage.SpokenMessage      =
                            userMessage.DisplayMessage =
                                $"Usted tiene {notices.First().Title} eventos próximos";
                        response = VoiceCommandResponse.CreateResponse(userMessage);
                    }
                }

                await connection.ReportSuccessAsync(response);
            }
            catch (Exception ex)
            {
                throw;
            }
        }
Beispiel #18
0
        private async Task ShowNearestResults(List <Sight> nearest)
        {
            var userMessage = new VoiceCommandUserMessage
            {
                DisplayMessage = "Here are your closest Sights:",
                SpokenMessage  = "Here are your closest sights"
            };

            var sightsContentTiles = new List <VoiceCommandContentTile>();

            foreach (var sight in nearest)
            {
                var sightTile = new VoiceCommandContentTile();
                sightTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText;
                if (sight.ImagePath.StartsWith("ms-appx"))
                {
                    sightTile.Image =
                        await StorageFile.GetFileFromApplicationUriAsync(new Uri(sight.ImagePath));
                }
                else
                {
                    sightTile.Image = await StorageFile.GetFileFromPathAsync(sight.ImagePath);
                }
                sightTile.Title             = sight.Name;
                sightTile.TextLine1         = sight.Description;
                sightTile.AppContext        = sight.Id;
                sightTile.AppLaunchArgument = sight.Id.ToString("D");
                sightsContentTiles.Add(sightTile);
            }


            var response = VoiceCommandResponse.CreateResponse(userMessage, sightsContentTiles);
            await _voiceServiceConnection.ReportSuccessAsync(response);
        }
        private async void SendCompletionMessageForDestination(double proablity, DSAVoiceCommand voiceCommand)
        {
            // Take action and determine when the next trip to destination
            // Insert code here.

            // Replace the hardcoded strings used here with strings
            // appropriate for your application.

            // First, create the VoiceCommandUserMessage with the strings
            // that Cortana will show and speak.
            var userMessage = new VoiceCommandUserMessage();

            userMessage.DisplayMessage = $"Die Chance auf erfolg ist: {proablity:P2}";
            userMessage.SpokenMessage  = $"Die Chance auf erfolg ist: {proablity:P2}";

            // Create the VoiceCommandResponse from the userMessage and list
            // of content tiles.
            var response =
                VoiceCommandResponse.CreateResponse(userMessage);

            // Cortana will present a “Go to app_name” link that the user
            // can tap to launch the app.
            // Pass in a launch to enable the app to deep link to a page
            // relevant to the voice command.
            response.AppLaunchArgument = $"{voiceCommand.Eigentschaft1};{voiceCommand.Eigentschaft2};{voiceCommand.Eigentschaft3}";

            // Ask Cortana to display the user message and content tile and
            // also speak the user message.
            await voiceServiceConnection.ReportSuccessAsync(response);
        }
        /// <summary>
        /// Search for, and show details related to a single trip, if the trip can be
        /// found. This demonstrates a simple response flow in Cortana.
        /// </summary>
        /// <param name="destination">The destination, expected to be in the phrase list.</param>
        /// <returns></returns>
        private async Task SendCompletionMessageForDestination(string destination)
        {
            // If this operation is expected to take longer than 0.5 seconds, the task must
            // provide a progress response to Cortana prior to starting the operation, and
            // provide updates at most every 5 seconds.
            string loadingTripToDestination = string.Format(
                cortanaResourceMap.GetValue("Loading", cortanaContext).ValueAsString,
                destination);

            await ShowProgressScreen(loadingTripToDestination);

            //state

            // Look for the specified trip. The destination *should* be pulled from the grammar we
            // provided, and the subsequently updated phrase list, so it should be a 1:1 match, including case.
            // However, we might have multiple trips to the destination. For now, we just pick the first.

            var userMessage = new VoiceCommandUserMessage();

            userMessage.DisplayMessage = "OK";
            userMessage.SpokenMessage  = "OK";


            var response = VoiceCommandResponse.CreateResponse(userMessage);



            await voiceServiceConnection.ReportSuccessAsync(response);
        }
        public async void Run(IBackgroundTaskInstance taskInstance)
        {
            serviceDeferral        = taskInstance.GetDeferral();
            taskInstance.Canceled += OnTaskCanceled;

            var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails;

            cortanaResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources");

            cortanaContext = ResourceContext.GetForViewIndependentUse();

            dateFormatInfo = CultureInfo.CurrentCulture.DateTimeFormat;

            if (triggerDetails != null && triggerDetails.Name == "JeedomAppVoiceCommandService")
            {
                try
                {
                    voiceServiceConnection =
                        VoiceCommandServiceConnection.FromAppServiceTriggerDetails(
                            triggerDetails);

                    voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted;

                    VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync();

                    var    userMessage = new VoiceCommandUserMessage();
                    string message     = "";


                    // Ajout d'une requet jeedom pour retrouver la commande
                    switch (voiceCommand.CommandName)
                    {
                    case "JeedomInteractList":
                        string CortanaVoiceCommande = voiceCommand.Properties["InteractList"][0];
                        await Jeedom.RequestViewModel.Instance.interactTryToReply(CortanaVoiceCommande);

                        message = Jeedom.RequestViewModel.Instance.InteractReply;
                        break;

                    default:
                        LaunchAppInForeground();
                        break;
                    }

                    userMessage.DisplayMessage = message;
                    userMessage.SpokenMessage  = message;


                    var response = VoiceCommandResponse.CreateResponse(userMessage);
                    response.AppLaunchArgument = message;


                    await voiceServiceConnection.ReportSuccessAsync(response);
                }
                catch (Exception ex)
                {
                    //System.Diagnostics.Debug.WriteLine("Handling Voice Command failed " + ex.ToString());
                }
            }
        }
Beispiel #22
0
        private async Task ProcessGenerateFactAsync(SpeechRecognitionSemanticInterpretation interpretation)
        {
            await Helpers.ProgressHelper.ShowProgressScreenAsync(voiceServiceConnection, "Okay, get ready...");

            string fact = await Helpers.FactHelper.GetFactAsync();

            var destinationsContentTiles = new List <VoiceCommandContentTile>();

            var destinationTile = new VoiceCommandContentTile();

            try
            {
                destinationTile.ContentTileType   = VoiceCommandContentTileType.TitleWithText;
                destinationTile.AppContext        = null;
                destinationTile.AppLaunchArgument = "fact=" + fact;
                destinationTile.Title             = fact;
                destinationTile.TextLine1         = "";
                destinationTile.TextLine1         = "(tap to add to favorites)";

                destinationsContentTiles.Add(destinationTile);
            }
            catch (Exception ex)
            {
            }

            VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(new VoiceCommandUserMessage()
            {
                DisplayMessage = "Did you know...",
                SpokenMessage  = fact
            }, destinationsContentTiles);

            await voiceServiceConnection.ReportSuccessAsync(response);
        }
Beispiel #23
0
        private async Task ShowResults(List <Session> results, VoiceCommandUserMessage userMessage)
        {
            var destinationsContentTiles = new List <VoiceCommandContentTile>();

            foreach (var kvp in results)
            {
                var destinationTile = new VoiceCommandContentTile();
                destinationTile.ContentTileType   = VoiceCommandContentTileType.TitleWith68x68IconAndText;
                destinationTile.AppLaunchArgument = kvp.Title.GetValidString();
                destinationTile.TextLine1         = kvp.Title.GetValidString();
                destinationTile.TextLine2         = kvp.Speakers[0].Name.GetValidString();
                destinationTile.TextLine3         = kvp.Location.Room.GetValidString();
                IRandomAccessStreamReference thumbnail =
                    RandomAccessStreamReference.CreateFromUri(new Uri(kvp.Speakers[0].Photo));
                destinationTile.Image = await StorageFile.CreateStreamedFileFromUriAsync(kvp.Title,
                                                                                         new Uri(kvp.Speakers[0].Photo), thumbnail);

                destinationTile.AppLaunchArgument = kvp.Title;
                destinationsContentTiles.Add(destinationTile);
            }
            var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles);

            response.AppLaunchArgument = "session";
            await voiceServiceConnection.ReportSuccessAsync(response);
        }
Beispiel #24
0
        private async Task SendCompletionMessageForParkingPlace()
        {
            await ShowProgressScreen("I'm getting free parking places for Outlook");

            var places = await LocationHelper.GetParkingPlaces();

            var message = $"There are {places.Current} free places and this is {places.TrendString}.";

            if (places.Trend == ParkTrend.FillingFast || places.Trend == ParkTrend.FillingSlow)
            {
                if (places.RemainingMinutes < 60)
                {
                    var fillTime = DateTime.Now.AddMinutes(places.RemainingMinutes);
                    message += $" Parking place will be full at {fillTime.Hour}:{fillTime.Minute}.";
                }
                else
                {
                    message += $" It will take at least one hour before parking is full.";
                }
            }

            var userMessage = new VoiceCommandUserMessage();

            userMessage.DisplayMessage = message;
            userMessage.SpokenMessage  = message;

            var response = VoiceCommandResponse.CreateResponse(userMessage);
            await voiceServiceConnection.ReportSuccessAsync(response);
        }
Beispiel #25
0
 private static async Task AsyncReportSuccess(VoiceCommandServiceConnection connection, string spokenMessage, string displayMessage, IEnumerable <VoiceCommandContentTile> contentTiles)
 {
     var responseMsg = new VoiceCommandUserMessage {
         SpokenMessage = spokenMessage, DisplayMessage = displayMessage
     };
     var response = VoiceCommandResponse.CreateResponse(responseMsg, contentTiles);
     await connection.ReportSuccessAsync(response);
 }
Beispiel #26
0
        public async void ReportSuccess(VoiceCommandServiceConnection voiceCommandServiceConnection)
        {
            VoiceCommandUserMessage userMessage = new VoiceCommandUserMessage();

            userMessage.SpokenMessage = userMessage.DisplayMessage = "已成功";
            VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(userMessage);
            await voiceCommandServiceConnection.ReportSuccessAsync(response);
        }
        /// <summary>
        /// The background task entrypoint.
        ///
        /// Background tasks must respond to activation by Cortana within 0.5 seconds, and must
        /// report progress to Cortana every 5 seconds (unless Cortana is waiting for user
        /// input). There is no execution time limit on the background task managed by Cortana,
        /// but developers should use plmdebug (https://msdn.microsoft.com/library/windows/hardware/jj680085%28v=vs.85%29.aspx)
        /// on the Cortana app package in order to prevent Cortana timing out the task during
        /// debugging.
        ///
        /// The Cortana UI is dismissed if Cortana loses focus.
        /// The background task is also dismissed even if being debugged.
        /// Use of Remote Debugging is recommended in order to debug background task behaviors.
        /// Open the project properties for the app package (not the background task project),
        /// and enable Debug -> "Do not launch, but debug my code when it starts".
        /// Alternatively, add a long initial progress screen, and attach to the background task process while it executes.
        /// </summary>
        /// <param name="taskInstance">Connection to the hosting background service process.</param>
        public async void Run(IBackgroundTaskInstance taskInstance)
        {
            // Create the deferral by requesting it from the task instance
            serviceDeferral = taskInstance.GetDeferral();

            AppServiceTriggerDetails triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails;

            if (triggerDetails != null && triggerDetails.Name.Equals("VitWifiVoiceCommandService"))
            {
                voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails);

                VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync();

                // Perform the appropriate command depending on the operation defined in VCD
                switch (voiceCommand.CommandName)
                {
                case "Login":
                    string x = NetworkNames.ToString();
                    VoiceCommandUserMessage userMessage = new VoiceCommandUserMessage();
                    userMessage.DisplayMessage = string.Format("The current networks is {0} ", x);
                    userMessage.SpokenMessage  = string.Format("The current networks is {0} ", x);

                    VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(userMessage, null);
                    await voiceServiceConnection.ReportSuccessAsync(response);

                    break;

                case "Logout":
                    string logoutMessage = NetworkNames.ToString();
                    VoiceCommandUserMessage userLogoutMessage = new VoiceCommandUserMessage();
                    userLogoutMessage.DisplayMessage = string.Format("The current networks is {0} ", logoutMessage);
                    userLogoutMessage.SpokenMessage  = string.Format("The current networks is {0} ", logoutMessage);
                    VoiceCommandResponse logoutResponse = VoiceCommandResponse.CreateResponse(userLogoutMessage, null);
                    await voiceServiceConnection.ReportSuccessAsync(logoutResponse);

                    break;

                default:
                    break;
                }
            }

            // Once the asynchronous method(s) are done, close the deferral
            serviceDeferral.Complete();
        }
Beispiel #28
0
        /// <summary>
        /// Shows a completion message to the user.
        /// </summary>
        /// <param name="message">
        /// The message to display (and speak).
        /// </param>
        /// <returns>
        /// A <see cref="Task"/> that represents the operation.
        /// </returns>
        private async Task ReportCompleteAsync(string message)
        {
            var userMessage = new VoiceCommandUserMessage();

            userMessage.DisplayMessage = userMessage.SpokenMessage = message;

            var response = VoiceCommandResponse.CreateResponse(userMessage);
            await voiceConnection.ReportSuccessAsync(response);
        }
        public async void Run(IBackgroundTaskInstance taskInstance)
        {
            serviceDeferral = taskInstance.GetDeferral();
            taskInstance.Canceled += OnTaskCanceled;

            var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails;

            cortanaResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources");

            cortanaContext = ResourceContext.GetForViewIndependentUse();

            dateFormatInfo = CultureInfo.CurrentCulture.DateTimeFormat;

            if (triggerDetails != null && triggerDetails.Name == "DomojeeVoiceCommandService")
            {
                try
                {
                    voiceServiceConnection =
                        VoiceCommandServiceConnection.FromAppServiceTriggerDetails(
                            triggerDetails);

                    voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted;

                    VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync();
                    var userMessage = new VoiceCommandUserMessage();
                    string message = "";


                    // Ajout d'une requet jeedom pour retrouver la commande
                    switch (voiceCommand.CommandName)
                    {
                        case "JeedomInteractList":
                            string CortanaVoiceCommande= voiceCommand.Properties["InteractList"][0];
                            await Jeedom.RequestViewModel.Instance.interactTryToReply(CortanaVoiceCommande);
                            message = Jeedom.RequestViewModel.Instance.InteractReply;
                            break;
                        default:
                            LaunchAppInForeground();
                            break;
                    }

                    userMessage.DisplayMessage = message;
                    userMessage.SpokenMessage = message;
                    

            var response = VoiceCommandResponse.CreateResponse(userMessage);
            response.AppLaunchArgument = message;


                await voiceServiceConnection.ReportSuccessAsync(response);
            }
                catch (Exception ex)
                {
                    System.Diagnostics.Debug.WriteLine("Handling Voice Command failed " + ex.ToString());
                }
            }
        }
Beispiel #30
0
        private async void RespondTouser(string text)
        {
            var    userMessage = new VoiceCommandUserMessage();
            string keepingTripToDestination = text; //How can i Help you?

            userMessage.DisplayMessage = userMessage.SpokenMessage = keepingTripToDestination;

            VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(userMessage);
            await voiceServiceConnection.ReportSuccessAsync(response);
        }
        private async Task HandleSearch(string searchQuery)
        {
            var loadingText = $"searching for {searchQuery} ...";

            await ShowProgressScreen(loadingText);

            var service = new YouTubeSvc();

            // query service to get items
            var searchItems = await service.ListItems(searchQuery, App.MainPageViewModel.MaxResults, "video");

            var userMessage = new VoiceCommandUserMessage();
            var destinationsContentTiles = new List <VoiceCommandContentTile>();

            if (searchItems.Count() == 0)
            {
                string foundNoItem = $"Result of your search query is empty.";

                userMessage.DisplayMessage = foundNoItem;
                userMessage.SpokenMessage  = foundNoItem;
            }
            else
            {
                string message = string.Empty;

                foreach (var foundItems in searchItems)
                {
                    int i = 1;

                    var destinationTile = new VoiceCommandContentTile();

                    // To handle UI scaling, Cortana automatically looks up files with FileName.scale-<n>.ext formats based on the requested filename.
                    // See the VoiceCommandService\Images folder for an example.
                    destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText;
                    destinationTile.Image           = await StorageFile.GetFileFromApplicationUriAsync(new Uri(foundItems.Thumbnail));

                    // destinationTile.AppLaunchArgument = foundCanteen.Name;
                    destinationTile.Title     = foundItems.Title;
                    destinationTile.TextLine1 = foundItems.Description;

                    message += $"{foundItems.Title}.";

                    destinationsContentTiles.Add(destinationTile);
                    i++;
                }

                userMessage.DisplayMessage = message;
                userMessage.SpokenMessage  = message;
            }

            var response = VoiceCommandResponse.CreateResponse(userMessage, destinationsContentTiles);

            await voiceServiceConnection.ReportSuccessAsync(response);
        }
Beispiel #32
0
        private async Task SendResponse(string textResponse)
        {
            var userMessage = new VoiceCommandUserMessage
            {
                DisplayMessage = textResponse,
                SpokenMessage  = textResponse
            };

            var response = VoiceCommandResponse.CreateResponse(userMessage);
            await voiceServiceConnection.ReportSuccessAsync(response);
        }
        public async void Run(IBackgroundTaskInstance taskInstance)
        {
            serviceDeferral = taskInstance.GetDeferral();

            taskInstance.Canceled += OnTaskCanceled;

            var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails;

            if (triggerDetails != null && triggerDetails.Name == "HolVoiceCommandService")
            {
                try
                {
                    voiceServiceConnection =
                                    VoiceCommandServiceConnection.FromAppServiceTriggerDetails(
                                        triggerDetails);

                    voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted;


                    VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync();

                    switch (voiceCommand.CommandName)
                    {
                        case "SayHello":

                            var userMessage = new VoiceCommandUserMessage();
                            userMessage.DisplayMessage = "お店で合言葉話してね。";
                            userMessage.SpokenMessage = "ごきげんよう。";

                            var response = VoiceCommandResponse.CreateResponse(userMessage);

                            await voiceServiceConnection.ReportSuccessAsync(response);

                            break;


                        default:
                            break;
                    }


                }
                catch (Exception ex)
                {
                    System.Diagnostics.Debug.WriteLine("Handling Voice Command failed " + ex.ToString());
                }
            }


        }
Beispiel #34
0
        protected override async void OnRun(IBackgroundTaskInstance taskInstance)
        {
            this.serviceDeferral = taskInstance.GetDeferral();

            var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails;

            // get the voiceCommandServiceConnection from the tigger details
            voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails);

            VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync();

            VoiceCommandResponse response;

            // switch statement to handle different commands
            switch (voiceCommand.CommandName)
            {
                case "sendMessage":
                    // get the message the user has spoken
                    var message = voiceCommand.Properties["message"][0];
                    //var bot = new Bot();

                    // get response from bot
                    string firstResponse = "";
                        //await bot.SendMessageAndGetResponseFromBot(message);

                    // create response messages for Cortana to respond
                    var responseMessage = new VoiceCommandUserMessage();
                    var responseMessage2 = new VoiceCommandUserMessage();
                    responseMessage.DisplayMessage =
                        responseMessage.SpokenMessage = firstResponse;
                    responseMessage2.DisplayMessage =
                        responseMessage2.SpokenMessage = "did you not hear me?";

                    // create a response and ask Cortana to respond with success
                    response = VoiceCommandResponse.CreateResponse(responseMessage);
                    await voiceServiceConnection.ReportSuccessAsync(response);

                    break;
            }

            if (this.serviceDeferral != null)
            {
                //Complete the service deferral
                this.serviceDeferral.Complete();
            }

        }
        public async void Run(IBackgroundTaskInstance taskInstance)
        {
            serviceDeferral = taskInstance.GetDeferral();

            taskInstance.Canceled += OnTaskCanceled;

            var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails;

            // Load localized resources for strings sent to Cortana to be displayed to the user.
            cortanaResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources");

            // Select the system language, which is what Cortana should be running as.
            cortanaContext = ResourceContext.GetForViewIndependentUse();


            if (triggerDetails != null && triggerDetails.Name == "HolVoiceCommandService")
            {
                try
                {
                    voiceServiceConnection =
                        VoiceCommandServiceConnection.FromAppServiceTriggerDetails(
                            triggerDetails);

                    voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted;

                    VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync();

                    switch (voiceCommand.CommandName)
                    {
                        case "SayHello":
                            var userMessage = new VoiceCommandUserMessage();
                            userMessage.DisplayMessage = "Hello!";
                            userMessage.SpokenMessage = "Your app says hi. It is having a great time.";
                            var response = VoiceCommandResponse.CreateResponse(userMessage);
                            await voiceServiceConnection.ReportSuccessAsync(response);
                            break;
                        default:
                            break;
                    }
                }
                catch (Exception ex)
                {
                    System.Diagnostics.Debug.WriteLine("Handling Voice Command failed " + ex.ToString());
                }
            }

        }
Beispiel #36
0
        protected override async void OnRun(IBackgroundTaskInstance taskInstance)
        {
            this.serviceDeferral = taskInstance.GetDeferral();
            taskInstance.Canceled += OnTaskCanceled;

            var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails;

            VoiceCommandUserMessage userMessage;
            VoiceCommandResponse response;
            try
            {
                voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails);
                voiceServiceConnection.VoiceCommandCompleted += VoiceCommandCompleted;
                VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync();

                switch (voiceCommand.CommandName)
                {

                    case "getPatientData":
                        userMessage = new VoiceCommandUserMessage();
                        userMessage.SpokenMessage = "Here is the Patient Data";

                        var responseMessage = new VoiceCommandUserMessage();
                        responseMessage.DisplayMessage = responseMessage.SpokenMessage = "Patient Name: John Spartan\nAge: 47\nBlood Type: O+\nPatient ID: 000S00117";

                        response = VoiceCommandResponse.CreateResponse(responseMessage);
                        await voiceServiceConnection.ReportSuccessAsync(response);

                        break;

                }
            }
            catch (Exception ex)
            {
                Debug.WriteLine(ex.Message);
            }
            finally
            {
                if (this.serviceDeferral != null)
                {
                    //Complete the service deferral
                    this.serviceDeferral.Complete();
                }
            }
        }
        public async void Run(IBackgroundTaskInstance taskInstance)
        {
            serviceDeferral = taskInstance.GetDeferral();
            taskInstance.Canceled += OnTaskCanceled;
            var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails;
            if (triggerDetails != null && triggerDetails.Name == "CodecampSessionsVoiceCommandService")
            {
                voiceServiceConnection =
                        VoiceCommandServiceConnection.FromAppServiceTriggerDetails(
                            triggerDetails);

                voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted;

                VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync();

                await _agendaService.GetSessionsAsync();
                switch (voiceCommand.CommandName)
                {
                    case "sayPresentationDescription":
                        var userMessage = new VoiceCommandUserMessage();
                        userMessage.DisplayMessage = "You already forgot? You are going to talk about how I can help developers to create voice activated apps";
                        userMessage.SpokenMessage = "You already forgot? You are going to talk about how I can help developers to create voice activated apps. By the way...asshole, stop forcing me to help you with this stupid presentation. You're lucky I can't use curse words";
                        var response = VoiceCommandResponse.CreateResponse(userMessage);
                        await voiceServiceConnection.ReportSuccessAsync(response);
                        break;
                    case "findSessionsWithCortana":
                        var tags = voiceCommand.SpeechRecognitionResult.SemanticInterpretation.Properties["search"][0];
                        await FindSessionsByTag(tags);
                        break;
                    default:
                        // As with app activation VCDs, we need to handle the possibility that
                        // an app update may remove a voice command that is still registered.
                        // This can happen if the user hasn't run an app since an update.
                        LaunchAppInForeground();
                        break;
                }
            }
        }
        private static async Task HandleReadNamedaysCommandAsync(VoiceCommandServiceConnection connection)
        {
            var userMessage = new VoiceCommandUserMessage();
            userMessage.DisplayMessage = "Fetching today's namedays for you";
            userMessage.SpokenMessage = "Fetching today's namedays for you";
            var response = VoiceCommandResponse.CreateResponse(userMessage);
            await connection.ReportProgressAsync(response);

            var today = DateTime.Now.Date;
            var namedays = await NamedayRepository.GetAllNamedaysAsync();
            var todaysNameday = namedays.Find(e => e.Day == today.Day && e.Month == today.Month);
            var namedaysAsString = todaysNameday.NamesAsString;

            if (todaysNameday.Names.Count() == 1)
            {
                userMessage.SpokenMessage =
                    userMessage.DisplayMessage =
                    $"It is {namedaysAsString}'s nameday today";

                response = VoiceCommandResponse.CreateResponse(userMessage);
            }
            else
            {
                userMessage.SpokenMessage = $"Today's namedays are: {namedaysAsString}";
                userMessage.DisplayMessage = "Here are today's namedays:";

                var tile = new VoiceCommandContentTile();
                tile.ContentTileType = VoiceCommandContentTileType.TitleOnly;
                tile.Title = namedaysAsString;

                response = VoiceCommandResponse.CreateResponse(userMessage,
                    new List<VoiceCommandContentTile> { tile });
            }

            await connection.ReportSuccessAsync(response);
        }
        public async void Run(IBackgroundTaskInstance taskInstance)
        {
            this.serviceDeferral = taskInstance.GetDeferral();
            try {
                var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails;
                if (triggerDetails != null && triggerDetails.Name == "CortanaCommandService")
                {

                    voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails);
                    var voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync();
                    Debug.WriteLine(voiceCommand.CommandName);

                    MainViewModel viewModel = new MainViewModel();
                    var vm = await DataLoadAsync();
                    if (vm != null)
                    {
                        viewModel = vm;
                    }

                    var cols = voiceCommand.CommandName.Split('_');
                    var commandName = cols[0];
                    var stateName = cols[1];

                    var commandViewModel = viewModel.CommandList.First(q => q.Name == commandName);
                    
                    commandViewModel.CurrentStateNum++;
                    var stateViewModel = commandViewModel.StateList.ElementAt(commandViewModel.CurrentStateNum - 1);
                    if (commandViewModel.CurrentStateNum>=commandViewModel.StateList.Count)
                    {
                        commandViewModel.CurrentStateNum = 0;
                    }

                    if(stateViewModel is SuccessStateViewModel)
                    {
                        var state = stateViewModel as SuccessStateViewModel;
                        if (string.IsNullOrEmpty(state.Utterance))
                        {
                            state.Utterance = "";
                        }
                        var message = new VoiceCommandUserMessage();
                        message.SpokenMessage = state.Utterance;
                        message.DisplayMessage = state.Utterance;
                        var response = VoiceCommandResponse.CreateResponse(message);
                        await voiceServiceConnection.ReportSuccessAsync(response);
                    }
                    else if(stateViewModel is ScriptStateViewModel)
                    {
                        var state = stateViewModel as ScriptStateViewModel;
                        if (!string.IsNullOrEmpty(state.Script))
                        {
                            try {
                                ConnectionData connectionData = new ConnectionData();
                                connectionData.AcceptPass = viewModel.PassCode;
                                connectionData.Script = state.Script.Replace("\n", ";").Replace("\r", "").Replace("\t", "");
                                string json = JsonConvert.SerializeObject(connectionData);
                                var byteData = Encoding.UTF8.GetBytes(json);
                                StreamSocket socket = new StreamSocket();

                                await socket.ConnectAsync(new HostName("127.0.0.1"), SettingManager.ServerPort);
                                var writer = new DataWriter(socket.OutputStream);
                                writer.WriteBytes(byteData);
                                await writer.StoreAsync();
                                await writer.FlushAsync();
                                writer.Dispose();
                                socket.Dispose();
                                
                            }
                            catch (Exception)
                            {
                                var errorMsg = new VoiceCommandUserMessage();
                                string msg = "スクリプトの実行を試みましたがサーバーが起動してませんでした";
                                errorMsg.SpokenMessage = msg;
                                errorMsg.DisplayMessage = msg;
                                var errorResponse = VoiceCommandResponse.CreateResponse(errorMsg);
                                await voiceServiceConnection.ReportFailureAsync(errorResponse);
                                return;
                            }
                        }


                        if (string.IsNullOrEmpty(state.Utterance))
                        {
                            state.Utterance = "";
                        }
                        var message = new VoiceCommandUserMessage();
                        message.SpokenMessage = state.Utterance;
                        message.DisplayMessage = state.Utterance;
                        var response = VoiceCommandResponse.CreateResponse(message);
                        await voiceServiceConnection.ReportSuccessAsync(response);
                    }

                    await DataSaveAsync(viewModel);
                }

            }catch(Exception e)
            {
                var message = new VoiceCommandUserMessage();
                message.SpokenMessage = "何かしらのエラーが起きました";
                message.DisplayMessage = e.Message;
                var response = VoiceCommandResponse.CreateResponse(message);
                await voiceServiceConnection.ReportSuccessAsync(response);

                var toast = ToastNotificationManager.GetTemplateContent(ToastTemplateType.ToastImageAndText01);
                ToastNotificationManager.CreateToastNotifier().Show(new ToastNotification(toast));
            }
            

            this.serviceDeferral.Complete();
        }
        protected override async void OnRun(IBackgroundTaskInstance taskInstance)
        {
            serviceDeferral = taskInstance.GetDeferral();
            taskInstance.Canceled += OnTaskCanceled;
            var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails;

            VoiceCommandUserMessage userMessage;
            VoiceCommandResponse response;
            try
            {
                voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails);
                voiceServiceConnection.VoiceCommandCompleted += VoiceCommandCompleted;
                VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync();

                switch (voiceCommand.CommandName)
                {
                    case "graphParams":
                        await ShowProgressScreen("Working on it...");
                        var modelnumber = voiceCommand.Properties["modelnumber"][0];
                        double lambda = 0;
                        double mu = 0;
                        int model = Models.Point.GetNumberByModel(Models.Point.GetModelByNumber(modelnumber));
                        
                        if (GetAllParameters(model, voiceCommand, ref lambda, ref mu))
                        {
                            bool allowed = false;
                            bool unsupported = false;
                            if (model.Equals(1) || model.Equals(2))
                            {
                                var responseMessage = new VoiceCommandUserMessage()
                                {
                                    DisplayMessage = String.Format("Get likelihood results for the model {0} with λ={1} and μ={2}?", modelnumber, lambda, mu),
                                    SpokenMessage = String.Format("Do you want me to get likelihood results for the model {0} with these input data?", modelnumber)
                                };
                                var repeatMessage = new VoiceCommandUserMessage()
                                {
                                    DisplayMessage = String.Format("Do you still want me to get likelihood results for the model {0} with λ={1} and μ={2}?", modelnumber, lambda, mu),
                                    SpokenMessage = String.Format("Do you still want me to get likelihood results for the model {0} with these input data?", modelnumber)
                                };

                                response = VoiceCommandResponse.CreateResponseForPrompt(responseMessage, repeatMessage);
                                try
                                {
                                    var confirmation = await voiceServiceConnection.RequestConfirmationAsync(response);
                                    allowed = confirmation.Confirmed;
                                }
                                catch
                                { }
                            }
                            else
                            {
                                unsupported = true;
                            }

                            if (allowed)
                            {
                                await ShowProgressScreen("Calculating...");
                                List<VoiceCommandContentTile> resultContentTiles = GetLikelihoodForSelectedModel(lambda, mu, model);
                                userMessage = new VoiceCommandUserMessage()
                                {
                                    DisplayMessage = String.Format("Here is your likelihood results for the model {0}", modelnumber),
                                    SpokenMessage = "Done and Done! Here is your results"
                                };
                                response = VoiceCommandResponse.CreateResponse(userMessage, resultContentTiles);
                                response.AppLaunchArgument = modelnumber;
                                await voiceServiceConnection.ReportSuccessAsync(response);
                            }
                            else if (unsupported)
                            {
                                userMessage = new VoiceCommandUserMessage()
                                {
                                    DisplayMessage = String.Format("Model {0} is not supported now", modelnumber),
                                    SpokenMessage = "Sorry, this model is not supported now"
                                };
                                response = VoiceCommandResponse.CreateResponse(userMessage);
                                response.AppLaunchArgument = modelnumber;
                                await voiceServiceConnection.ReportFailureAsync(response);
                            }
                            else
                            {
                                userMessage = new VoiceCommandUserMessage()
                                {
                                    DisplayMessage = "Okay then",
                                    SpokenMessage = "Okay, then"
                                };
                                response = VoiceCommandResponse.CreateResponse(userMessage);
                                await voiceServiceConnection.ReportSuccessAsync(response);
                            }
                        }
                        else
                        {
                            userMessage = new VoiceCommandUserMessage()
                            {
                                DisplayMessage = "The arguments is incorrect",
                                SpokenMessage = "Sorry, it seems the arguments is incorrect"
                            };
                            response = VoiceCommandResponse.CreateResponse(userMessage);
                            response.AppLaunchArgument = "";
                            await voiceServiceConnection.ReportFailureAsync(response);
                        }
                        break;
                    default:
                        LaunchAppInForeground();
                        break;
                }
            }
            catch (Exception ex)
            {
                Debug.WriteLine(ex.Message);
            }
            finally
            {
                if (serviceDeferral != null)
                {
                    //Complete the service deferral
                    serviceDeferral.Complete();
                }
            }
        }
Beispiel #41
0
        /// <summary>
        /// Make Cortana to speech api.ai response.
        /// </summary>
        /// <param name="voiceServiceConnection"></param>
        /// <param name="aiResponse"></param>
        /// <returns></returns>
        public async Task SendResponseToCortanaAsync(VoiceCommandServiceConnection voiceServiceConnection, AIResponse aiResponse)
        {
            var textResponse = aiResponse.Result.Fulfillment?.Speech ?? string.Empty;
            var userMessage = new VoiceCommandUserMessage
            {
                DisplayMessage = textResponse,
                SpokenMessage = textResponse
            };

            var response = VoiceCommandResponse.CreateResponse(userMessage);

            // Cortana will present a “Go to app_name” link that the user 
            // can tap to launch the app. 
            // Pass in a launch to enable the app to deep link to a page 
            // relevant to the voice command.
            //response.AppLaunchArgument =
            //  string.Format("destination={0}”, “Las Vegas");

            await voiceServiceConnection.ReportSuccessAsync(response);
        }
Beispiel #42
0
        protected override async void OnRun(IBackgroundTaskInstance taskInstance)
        {
            this.serviceDeferral = taskInstance.GetDeferral();
            taskInstance.Canceled += OnTaskCanceled;

            var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails;

            VoiceCommandUserMessage userMessage;
            VoiceCommandResponse response;
            try
            {
                voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails);
                voiceServiceConnection.VoiceCommandCompleted += VoiceCommandCompleted;
                VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync();

                switch (voiceCommand.CommandName)
                {
                    case "where":

                        var city = voiceCommand.Properties["city"][0];

                        var imageFile = await GenerateWideIconWithCity(city);
                        var localFolder = ApplicationData.Current.LocalFolder;
                        StorageFile cityIcon = await localFolder.GetFileAsync(imageFile);

                        var contentTiles = new List<VoiceCommandContentTile>();
                        var tile1 = new VoiceCommandContentTile();
                        tile1.ContentTileType = VoiceCommandContentTileType.TitleWith280x140IconAndText;
                        tile1.AppLaunchArgument = city;
                        tile1.Image = cityIcon;
                        contentTiles.Add(tile1);

                        userMessage = new VoiceCommandUserMessage()
                        {
                            DisplayMessage = "Here you go Best Friend, it's " + city,
                            SpokenMessage = "Here you go Best Friend, it's " + city
                        };

                        response = VoiceCommandResponse.CreateResponse(userMessage, contentTiles);
                        await voiceServiceConnection.ReportSuccessAsync(response);

                        break;

                    
                    case "sendMessageInCanvas":
                        var message = voiceCommand.Properties["message"][0];
                        var bot = new Bot();
                        string firstResponse = await bot.SendMessageAndGetResponseFromBot(message);

                        var responseMessage = new VoiceCommandUserMessage();
                        responseMessage.DisplayMessage = responseMessage.SpokenMessage = "Your Best Friend says \"" + firstResponse + "\"";
                        
                        response = VoiceCommandResponse.CreateResponse(responseMessage);
                        await voiceServiceConnection.ReportSuccessAsync(response);

                        break;

                }
            }
            catch (Exception ex)
            {
                Debug.WriteLine(ex.Message);
            }
            finally
            {
                if (this.serviceDeferral != null)
                {
                    //Complete the service deferral
                    this.serviceDeferral.Complete();
                }
            }
        }
        public async void Run(IBackgroundTaskInstance taskInstance)
        {
            serviceDeferral = taskInstance.GetDeferral();
     
            taskInstance.Canceled += OnTaskCanceled;
            
            var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails;

            if (triggerDetails != null)
            {

                var config = new AIConfiguration("cb9693af-85ce-4fbf-844a-5563722fc27f",
                           "40048a5740a1455c9737342154e86946",
                           SupportedLanguage.English);

                apiAi = new ApiAi(config);
                apiAi.DataService.PersistSessionId();
                
                try
                {
                    voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails);
                    voiceServiceConnection.VoiceCommandCompleted += VoiceCommandCompleted;
                    var voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync();
                    var recognizedText = voiceCommand.SpeechRecognitionResult?.Text;

                    switch (voiceCommand.CommandName)
                    {
                        case "type":
                            {
                                var aiResponse = await apiAi.TextRequestAsync(recognizedText);
                                await apiAi.LaunchAppInForegroundAsync(voiceServiceConnection, aiResponse);
                            }
                            break;
                        case "unknown":
                            {
                                if (!string.IsNullOrEmpty(recognizedText))
                                {
                                    var aiResponse = await apiAi.TextRequestAsync(recognizedText);
                                    if (aiResponse != null)
                                    {
                                        await apiAi.SendResponseToCortanaAsync(voiceServiceConnection, aiResponse);
                                    }
                                }
                            }
                            break;

                        case "greetings":
                            {
                                var aiResponse = await apiAi.TextRequestAsync(recognizedText);
                                
                                var repeatMessage = new VoiceCommandUserMessage
                                {
                                    DisplayMessage = "Repeat please",
                                    SpokenMessage = "Repeat please"
                                };

                                var processingMessage = new VoiceCommandUserMessage
                                {
                                    DisplayMessage = aiResponse?.Result?.Fulfillment?.Speech ?? "Pizza",
                                    SpokenMessage = ""
                                };

                                var resp = VoiceCommandResponse.CreateResponseForPrompt(processingMessage, repeatMessage);
                                await voiceServiceConnection.ReportSuccessAsync(resp);
                                break;
                            }

                        default:
                            if (!string.IsNullOrEmpty(recognizedText))
                            {
                                var aiResponse = await apiAi.TextRequestAsync(recognizedText);
                                if (aiResponse != null)
                                {
                                    await apiAi.SendResponseToCortanaAsync(voiceServiceConnection, aiResponse);
                                }
                            }
                            else
                            {
                                await SendResponse("Can't recognize");
                            }
                            
                            break;
                    }
                    
                }
                catch(Exception e)
                {
                    var message = e.ToString();
                    Debug.WriteLine(message);
                }
                finally
                {
                    serviceDeferral?.Complete();
                }
            }
        }
 private async Task ReportSuccess(string message, VoiceCommandServiceConnection voiceCommandServiceConnection)
 {
     var response = this.CreateResponse(message);
     await voiceCommandServiceConnection.ReportSuccessAsync(response);
 }
        public async void Run(IBackgroundTaskInstance taskInstance)
        {
            this.serviceDeferral = taskInstance.GetDeferral();


            var triggerDetails =
              taskInstance.TriggerDetails as AppServiceTriggerDetails;
            if (triggerDetails != null)
            {
                try
                {
                    voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails);
                    voiceServiceConnection.VoiceCommandCompleted += VoiceServiceConnection_VoiceCommandCompleted;
                    VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync();
                    switch (voiceCommand.CommandName)
                    {
                        case "AutomationService":
                            {
                                var locationofaction =
                                  voiceCommand.Properties["location"][0];
                                var action =
                                    voiceCommand.Properties["action"][0];
                                var service =
                                    voiceCommand.Properties["service"][0];

                                Message = string.Format("Turned {0} {1} {2}", locationofaction, action, service);

                                var blah = new SharedClasses.GetInterfaces();

                                var context = blah.GetAutomationList();

                                SharedClasses.PowerCommand command;
                                if (action.ToLower() == "on")
                                    command = SharedClasses.PowerCommand.on;
                                else
                                    command = SharedClasses.PowerCommand.off;

                                var commandURI = GetCommandUri(context, locationofaction.ToLower(), service.ToLower(), command);

                                if (!String.IsNullOrEmpty(commandURI))
                                {
                                    HttpClient client = new HttpClient();
                                    var x = await client.GetAsync(new Uri(commandURI));
                                    if (x.IsSuccessStatusCode)
                                    {
                                        IsSuccessful = true;
                                    }
                                    else
                                    {
                                        Message = "Server reported status code " + x.StatusCode;
                                    }
                                }
                                else
                                {
                                    IsSuccessful = false;
                                    Message = "No command found";
                                }
                                break;
                            }

                        // As a last resort launch the app in the foreground
                        default:
                            LaunchAppInForeground();
                            break;
                    }
                }
                finally
                {
                    if (this.serviceDeferral != null)
                    {
                        if (IsSuccessful)
                        {
                            var userMessage = new VoiceCommandUserMessage();
                            userMessage.DisplayMessage = userMessage.SpokenMessage = Message;
                            var response = VoiceCommandResponse.CreateResponse(userMessage);
                            await voiceServiceConnection.ReportSuccessAsync(response);
                        }
                        else
                        {
                            if (String.IsNullOrEmpty(Message))
                                Message = "Something went wrong";
                            var userMessage = new VoiceCommandUserMessage();
                            userMessage.DisplayMessage = userMessage.SpokenMessage = Message;
                            var response = VoiceCommandResponse.CreateResponse(userMessage);
                            await voiceServiceConnection.ReportFailureAsync(response);
                        }
                        //Complete the service deferral
                        this.serviceDeferral.Complete();
                    }
                }
            }
        }
Beispiel #46
0
        protected override async void OnRun(IBackgroundTaskInstance taskInstance)
        {
            this.serviceDeferral = taskInstance.GetDeferral();
            taskInstance.Canceled += OnTaskCanceled;

            var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails;

            
            VoiceCommandResponse response;
            try
            {
                voiceServiceConnection = VoiceCommandServiceConnection.FromAppServiceTriggerDetails(triggerDetails);
                voiceServiceConnection.VoiceCommandCompleted += VoiceCommandCompleted;
                VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync();
                VoiceCommandUserMessage userMessage = new VoiceCommandUserMessage();

                List<VoiceCommandContentTile> contentTiles;

                switch (voiceCommand.CommandName)
                {
                    case "what":

                        _todoItemRepository = TODOAdaptiveUISample.Repositories.TodoItemFileRepository.GetInstance();
                        var data = await _todoItemRepository.RefreshTodoItemsAsync();

                        contentTiles = new List<VoiceCommandContentTile>();
                        
                        userMessage.SpokenMessage = "Your Top To Do's are: ";

                        foreach (var item in data.Where(x => x.IsComplete == false).OrderBy(x => x.DueDate).Take((int)VoiceCommandResponse.MaxSupportedVoiceCommandContentTiles))
                        {
                            var tile = new VoiceCommandContentTile();
                            tile.ContentTileType = VoiceCommandContentTileType.TitleWithText;
                            tile.Title = item.Title;
                            //tile.TextLine1 = item.Details;
                            contentTiles.Add(tile);

                            userMessage.SpokenMessage += item.Title + ", ";
                        }

                        userMessage.DisplayMessage = "Here are the top " + contentTiles.Count + " To Do's";

                        
                        
                        response = VoiceCommandResponse.CreateResponse(userMessage, contentTiles);
                        await voiceServiceConnection.ReportSuccessAsync(response);

                        break;


                    case "new":
                        var todo = voiceCommand.Properties["todo"][0];

                        var responseMessage = new VoiceCommandUserMessage()
                        {
                            DisplayMessage = String.Format("Add \"{0}\" to your To Do's?", todo),
                            SpokenMessage = String.Format("Do you want me to add \"{0}\" to your To Do's?", todo)
                        };

                        var repeatMessage = new VoiceCommandUserMessage()
                        {
                            DisplayMessage = String.Format("Are you sure you want me to add \"{0}\" to your To Do's?", todo),
                            SpokenMessage = String.Format("Are you sure you want me to add \"{0}\" to your To Do's?", todo)
                        };

                        bool confirmed = false;
                        response = VoiceCommandResponse.CreateResponseForPrompt(responseMessage, repeatMessage);
                        try
                        {
                            var confirmation = await voiceServiceConnection.RequestConfirmationAsync(response);
                            confirmed = confirmation.Confirmed;
                        }
                        catch
                        {

                        }
                        if (confirmed)
                        {
                            _todoItemRepository = TODOAdaptiveUISample.Repositories.TodoItemFileRepository.GetInstance();
                            var i = _todoItemRepository.Factory(title: todo);
                            await _todoItemRepository.InsertTodoItem(i);

                            var todos = await _todoItemRepository.RefreshTodoItemsAsync();

                            contentTiles = new List<VoiceCommandContentTile>();

                            foreach (var itm in todos.Where(x => x.IsComplete == false).OrderBy(x => x.DueDate).Take((int)VoiceCommandResponse.MaxSupportedVoiceCommandContentTiles))
                            {
                                var tile = new VoiceCommandContentTile();
                                tile.ContentTileType = VoiceCommandContentTileType.TitleWithText;
                                tile.Title = itm.Title;
                                contentTiles.Add(tile);
                            }

                            userMessage.SpokenMessage = "Done and Done! Here are your top To Do's";
                            userMessage.DisplayMessage = "Here are your top " + contentTiles.Count + " To Do's";

                            response = VoiceCommandResponse.CreateResponse(userMessage, contentTiles);
                            await voiceServiceConnection.ReportSuccessAsync(response);
                        }
                        else
                        {
                            userMessage.DisplayMessage = userMessage.SpokenMessage = "OK then";
                            response = VoiceCommandResponse.CreateResponse(userMessage);
                            await voiceServiceConnection.ReportSuccessAsync(response);
                        }



                        break;

                }
            }
            catch (Exception ex)
            {
                if (Debugger.IsAttached)
                {
                    Debugger.Break();
                }
            }
            finally
            {
                if (this.serviceDeferral != null)
                {
                    //Complete the service deferral
                    this.serviceDeferral.Complete();
                }
            }
        }
		public async void Run( IBackgroundTaskInstance taskInstance )
		{
			//Get a deferral
			_deferral = taskInstance.GetDeferral();

			taskInstance.Canceled += OnTaskCanceled;

			var triggerDetails =
				taskInstance.TriggerDetails as AppServiceTriggerDetails;

			if ( triggerDetails != null
				//Name == [Name used in the appxmanifest Delaration]
				&& triggerDetails.Name == "ComputerAssistantCortanaAppService" )
			{
				try
				{
					voiceServiceConnection =
						VoiceCommandServiceConnection.FromAppServiceTriggerDetails( triggerDetails );

					voiceServiceConnection.VoiceCommandCompleted += VoiceCommandCompleted;

					VoiceCommand voiceCommand =
						await voiceServiceConnection.GetVoiceCommandAsync();

					VoiceCommandResponse response = null;

					switch ( voiceCommand.CommandName )
					{
						case "currentLocation":
							var userMessage = new VoiceCommandUserMessage();

							//Illegal? Geoposition geoposition = await LocationWrapper.Instance.GetSingleShotLocationAsync();

							//http://www.directionsmag.com/site/latlong-converter/
							string message = "You're at That Conference, of course.";
							//string message = "Current location Decimal Degrees. "
							//	+ $"Latitude {geoposition.Coordinate.Point.Position.Latitude}. "
							//	+ $"Longitude {geoposition.Coordinate.Point.Position.Longitude}.";
							userMessage.DisplayMessage = message;
							userMessage.SpokenMessage = message;
							response = VoiceCommandResponse.CreateResponse( userMessage );
							break;

						default:
							break;
					}

					if ( response != null )
					{
						await voiceServiceConnection.ReportSuccessAsync( response );
					}
				}
				catch ( Exception ex )
				{
					Debug.WriteLine( ex );
					LaunchAppInForeground();
				}
				finally
				{
					if ( _deferral != null )
					{
						_deferral.Complete();
					}
				}


			}

			//all done
			_deferral.Complete();
		}
        /// <summary>
        /// Background task entrypoint. Voice Commands using the <VoiceCommandService Target="...">
        /// tag will invoke this when they are recognized by Cortana, passing along details of the 
        /// invocation. 
        /// 
        /// Background tasks must respond to activation by Cortana within 0.5 seconds, and must 
        /// report progress to Cortana every 5 seconds (unless Cortana is waiting for user
        /// input). There is no execution time limit on the background task managed by Cortana,
        /// but developers should use plmdebug (https://msdn.microsoft.com/en-us/library/windows/hardware/jj680085%28v=vs.85%29.aspx)
        /// on the Cortana app package in order to prevent Cortana timing out the task during
        /// debugging.
        /// 
        /// Cortana dismisses its UI if it loses focus. This will cause it to terminate the background
        /// task, even if the background task is being debugged. Use of Remote Debugging is recommended
        /// in order to debug background task behaviors. In order to debug background tasks, open the
        /// project properties for the app package (not the background task project), and enable
        /// Debug -> "Do not launch, but debug my code when it starts". Alternatively, add a long
        /// initial progress screen, and attach to the background task process while it executes.
        /// </summary>
        /// <param name="taskInstance">Connection to the hosting background service process.</param>
        public async void Run(IBackgroundTaskInstance taskInstance)
        {
            serviceDeferral = taskInstance.GetDeferral();

            // Register to receive an event if Cortana dismisses the background task. This will
            // occur if the task takes too long to respond, or if Cortana's UI is dismissed.
            // Any pending operations should be cancelled or waited on to clean up where possible.
            taskInstance.Canceled += OnTaskCanceled;

            var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails;

            // Load localized resources for strings sent to Cortana to be displayed to the user.
            cortanaResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources");

            // Select the system language, which is what Cortana should be running as.
            cortanaContext = ResourceContext.GetForViewIndependentUse();

            // Get the currently used system date format
            dateFormatInfo = CultureInfo.CurrentCulture.DateTimeFormat;

            VoiceCommandResponse response = null;

            // This should match the uap:AppService and RuleVoiceCommandService references from the 
            // package manifest and VCD files, respectively. Make sure we've been launched by
            // a Cortana Voice Command.
            if (triggerDetails != null && triggerDetails.Name == this.GetType().Name)
            {
                try
                {
                    voiceServiceConnection =
                        VoiceCommandServiceConnection.FromAppServiceTriggerDetails(
                            triggerDetails);

                    voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted;

                    VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync();

                    HttpClient client = new HttpClient();

                    switch (voiceCommand.CommandName)
                    {
                        case "turnOnLight":

                            string postBody = JsonConvert.SerializeObject(new Settings
                            {
                                IsOn = false
                            });
                            client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
                            var webResponse = await client.PostAsync("http://hiremotemeetcortana.azurewebsites.net/api/settings", new StringContent(postBody, Encoding.UTF8, "application/json"));

                            if (webResponse.IsSuccessStatusCode)
                            {
                                var turnOnLightMessage = new VoiceCommandUserMessage
                                {
                                    DisplayMessage = "Wakeup Light has been turned on ",
                                    SpokenMessage = "Wakeup Light has been turned on "
                                };

                                response = VoiceCommandResponse.CreateResponse(turnOnLightMessage);
                                await voiceServiceConnection.ReportSuccessAsync(response);
                            } else
                            {
                                var turnOnLightMessage = new VoiceCommandUserMessage
                                {
                                    DisplayMessage = "Something went wrong",
                                    SpokenMessage = "Something went wrong"
                                };

                                response = VoiceCommandResponse.CreateResponse(turnOnLightMessage);
                                await voiceServiceConnection.ReportFailureAsync(response);
                            }
                            break;
                        case "turnOffLight":

                            string turnOffLightBody = JsonConvert.SerializeObject(new Settings
                            {
                                IsOn = false
                            });
                            client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
                            var saveRurnOffLight = await client.PostAsync("http://hiremotemeetcortana.azurewebsites.net/api/settings", new StringContent(turnOffLightBody, Encoding.UTF8, "application/json"));


                            if (saveRurnOffLight.IsSuccessStatusCode)
                            {
                                var turnOnLightMessage = new VoiceCommandUserMessage
                                {
                                    DisplayMessage = "Wakeup Light has been turned off ",
                                    SpokenMessage = "Wakeup Light has been turned off "
                                };

                                response = VoiceCommandResponse.CreateResponse(turnOnLightMessage);
                                await voiceServiceConnection.ReportSuccessAsync(response);
                            }
                            else
                            {
                                var turnOnLightMessage = new VoiceCommandUserMessage
                                {
                                    DisplayMessage = "Something went wrong",
                                    SpokenMessage = "Something went wrong"
                                };

                                response = VoiceCommandResponse.CreateResponse(turnOnLightMessage);
                                await voiceServiceConnection.ReportFailureAsync(response);
                            }
                            break;
                        default:
                            // As with app activation VCDs, we need to handle the possibility that
                            // an app update may remove a voice command that is still registered.
                            // This can happen if the user hasn't run an app since an update.
                            LaunchAppInForeground();
                            break;
                    }
                }
                catch (Exception ex)
                {
                    System.Diagnostics.Debug.WriteLine("Handling Voice Command failed " + ex.ToString());
                }
            }
        }
    static async Task ProcessShowLightsCommandAsync(
      List<ServiceInfoWithLocation> serviceInfoList, 
      VoiceCommandServiceConnection voiceConnection)
    {
      var onImageFile = await StorageFile.GetFileFromApplicationUriAsync(
        new Uri("ms-appx:///Assets/Cortana68x68On.png"));
      var offImageFile = await StorageFile.GetFileFromApplicationUriAsync(
        new Uri("ms-appx:///Assets/Cortana68x68Off.png"));

      var message = new VoiceCommandUserMessage();
      var tiles = new List<VoiceCommandContentTile>();

      if ((serviceInfoList == null) || (serviceInfoList.Count == 0))
      {
        message.SpokenMessage = "Either something went wrong, or there are no lights";
        message.DisplayMessage = "I didn't find any lights, sorry";
      }
      else
      {
        message.SpokenMessage = "Yay! I found some lights. Here you go";
        message.DisplayMessage = "Lights found in following places...";

        foreach (var light in serviceInfoList)
        {
          tiles.Add(
            new VoiceCommandContentTile()
            {
              Title = "Light",
              TextLine1 = $"located in {light.Location}",
              ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText,
              Image = light.IsOn ? onImageFile : offImageFile
            });
        }
      }
      var response = VoiceCommandResponse.CreateResponse(message, tiles);

      await voiceConnection.ReportSuccessAsync(response);
    }
    async Task ProcessSwitchLightCommandAsync(
      List<ServiceInfoWithLocation> serviceInfoList,
      VoiceCommandServiceConnection voiceConnection)
    {
      var message = new VoiceCommandUserMessage();
      var tiles = new List<VoiceCommandContentTile>();
      bool worked = false;

      if ((serviceInfoList == null) || (serviceInfoList.Count == 0))
      {
        message.SpokenMessage = "I couldn't find any lights at all, sorry";
        message.DisplayMessage = "No lights could be found at any location";
      }
      else
      {
        var voiceCommand = await voiceConnection.GetVoiceCommandAsync();

        var location = ExtractPropertyFromVoiceCommand(voiceCommand, VOICE_COMMAND_LOCATION_KEY);
        var onOff = ExtractPropertyFromVoiceCommand(voiceCommand, VOICE_COMMAND_ON_OFF_KEY);

        if (string.IsNullOrEmpty(location))
        {
          message.SpokenMessage = "I couldn't find a location in what you said, sorry";
          message.DisplayMessage = "Interpreted text did not contain an audible location";
        }
        else if (string.IsNullOrEmpty(onOff))
        {
          message.SpokenMessage = "I couldn't figure out whether you said on or off, sorry";
          message.DisplayMessage = "Not clear around on/off status";
        }
        else
        {
          var serviceInfo = serviceInfoList.SingleOrDefault(
            sinfo => string.Compare(sinfo.Location.Trim(), location.Trim(), true) == 0);

          if (serviceInfo == null)
          {
            message.SpokenMessage = $"I couldn't find any lights in the location {location}, sorry";
            message.DisplayMessage = $"No lights in the {location}";
          }
          else
          {
            // It may just work...  
            await serviceInfo.Consumer.SwitchAsync(string.Compare(onOff, "on", true) == 0);

            message.SpokenMessage = $"I think I did it! The light should now be {onOff}";
            message.DisplayMessage = $"the light is now {onOff}";
          }
        }
      }
      var response = VoiceCommandResponse.CreateResponse(message);

      if (worked)
      {
        await voiceConnection.ReportSuccessAsync(response);
      }
      else
      {
        await voiceConnection.ReportFailureAsync(response);
      }
    }
        /// <summary>
        /// Background task entrypoint. Voice Commands using the <VoiceCommandService Target="...">
        /// tag will invoke this when they are recognized by Cortana, passing along details of the 
        /// invocation. 
        /// 
        /// Background tasks must respond to activation by Cortana within 0.5 seconds, and must 
        /// report progress to Cortana every 5 seconds (unless Cortana is waiting for user
        /// input). There is no execution time limit on the background task managed by Cortana,
        /// but developers should use plmdebug (https://msdn.microsoft.com/en-us/library/windows/hardware/jj680085%28v=vs.85%29.aspx)
        /// on the Cortana app package in order to prevent Cortana timing out the task during
        /// debugging.
        /// 
        /// Cortana dismisses its UI if it loses focus. This will cause it to terminate the background
        /// task, even if the background task is being debugged. Use of Remote Debugging is recommended
        /// in order to debug background task behaviors. In order to debug background tasks, open the
        /// project properties for the app package (not the background task project), and enable
        /// Debug -> "Do not launch, but debug my code when it starts". Alternatively, add a long
        /// initial progress screen, and attach to the background task process while it executes.
        /// </summary>
        /// <param name="taskInstance">Connection to the hosting background service process.</param>
        public async void Run(IBackgroundTaskInstance taskInstance)
        {
            serviceDeferral = taskInstance.GetDeferral();

            // Register to receive an event if Cortana dismisses the background task. This will
            // occur if the task takes too long to respond, or if Cortana's UI is dismissed.
            // Any pending operations should be cancelled or waited on to clean up where possible.
            taskInstance.Canceled += OnTaskCanceled;

            var triggerDetails = taskInstance.TriggerDetails as AppServiceTriggerDetails;

            // Load localized resources for strings sent to Cortana to be displayed to the user.
            cortanaResourceMap = ResourceManager.Current.MainResourceMap.GetSubtree("Resources");

            // Select the system language, which is what Cortana should be running as.
            cortanaContext = ResourceContext.GetForViewIndependentUse();

            // Get the currently used system date format
            dateFormatInfo = CultureInfo.CurrentCulture.DateTimeFormat;

            // This should match the uap:AppService and VoiceCommandService references from the 
            // package manifest and VCD files, respectively. Make sure we've been launched by
            // a Cortana Voice Command.
            if (triggerDetails != null && triggerDetails.Name == "BandOnTheRunVoiceCommandService")
            {
                try
                {
                    voiceServiceConnection =
                        VoiceCommandServiceConnection.FromAppServiceTriggerDetails(
                            triggerDetails);

                    voiceServiceConnection.VoiceCommandCompleted += OnVoiceCommandCompleted;

                    VoiceCommand voiceCommand = await voiceServiceConnection.GetVoiceCommandAsync();
                    VoiceCommandUserMessage userMessage = new VoiceCommandUserMessage();

                    // Depending on the operation (defined in AdventureWorks:AdventureWorksCommands.xml)
                    // perform the appropriate command.
                    switch (voiceCommand.CommandName)
                    {
                        case "showbandinformation":
                            //hardcoded - needs to be hooked into real data flow.
                            userMessage.DisplayMessage = "Band 1 \n" +
                                                         "status: connected\n" +
                                                          "Motion: Jogging\n" +
                                                          "Speed: 10kph\n" +
                                                          "Skin Temp: 37\n" +
                                                           "UV: medium";
                            userMessage.SpokenMessage = "Showing band information";
;
                            var response = VoiceCommandResponse.CreateResponse(userMessage);
                            await voiceServiceConnection.ReportSuccessAsync(response);
                    
                            break;
                 
                        default:
                     
                            break;
                    }
                }
                catch (Exception ex)
                {
                    System.Diagnostics.Debug.WriteLine("Handling Voice Command failed " + ex.ToString());
                }
                finally
                {
                    if (this.serviceDeferral != null)
                    {
                        this.serviceDeferral.Complete();
                      
                    }
                }
            }
        }