Exemple #1
0
        private async Task ProcessGenerateFactAsync(SpeechRecognitionSemanticInterpretation interpretation)
        {
            await Helpers.ProgressHelper.ShowProgressScreenAsync(voiceServiceConnection, "Okay, get ready...");

            string fact = await Helpers.FactHelper.GetFactAsync();

            var destinationsContentTiles = new List <VoiceCommandContentTile>();

            var destinationTile = new VoiceCommandContentTile();

            try
            {
                destinationTile.ContentTileType   = VoiceCommandContentTileType.TitleWithText;
                destinationTile.AppContext        = null;
                destinationTile.AppLaunchArgument = "fact=" + fact;
                destinationTile.Title             = fact;
                destinationTile.TextLine1         = "";
                destinationTile.TextLine1         = "(tap to add to favorites)";

                destinationsContentTiles.Add(destinationTile);
            }
            catch (Exception ex)
            {
            }

            VoiceCommandResponse response = VoiceCommandResponse.CreateResponse(new VoiceCommandUserMessage()
            {
                DisplayMessage = "Did you know...",
                SpokenMessage  = fact
            }, destinationsContentTiles);

            await voiceServiceConnection.ReportSuccessAsync(response);
        }
        private async Task ServiceCommandHandleAsync(SpeechRecognitionSemanticInterpretation interpretation)
        {
            var progressMessage = "Get ready";
            await Helpers.VoiceCommandResponseHelper.ReportProgressAsync(voiceServiceConn, progressMessage, progressMessage);

            var randomInt = new Random((int)DateTime.UtcNow.Ticks).Next() % 6 + 1;

            System.Diagnostics.Debug.WriteLine(randomInt);
            var contentTiles = new List <VoiceCommandContentTile>();
            var tile         = new VoiceCommandContentTile();

            try
            {
                tile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText;
                tile.Image           = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(new Uri($"ms-appx:///ControlPanel.BackgroundServices/Assets/68_Dice_{randomInt}.png"));

                tile.AppContext        = randomInt;
                tile.AppLaunchArgument = "DiceResult=" + randomInt;
                tile.Title             = $"The dice result is {randomInt}";
                contentTiles.Add(tile);
            }
            catch (Exception ex)
            {
                System.Diagnostics.Debug.WriteLine(ex.Message);
            }

            var successMessage = $"You got {randomInt}";
            await Helpers.VoiceCommandResponseHelper.ReportSuccessAsync(voiceServiceConn, successMessage, successMessage, contentTiles);
        }
        public static string GetInterpretation(this SpeechRecognitionSemanticInterpretation interpretation, string key)
        {
            if (interpretation.Properties.ContainsKey(key))
            {
                return(interpretation.Properties[key][0]);
            }

            return(null);
        }
        private async Task ProcessInterestingFactAsync(SpeechRecognitionSemanticInterpretation interpretation)
        {
            await Core.Helpers.BackgroundProgressHelper.ShowProgressScreen(voiceServiceConnection, "Okay,get ready");

            //here is this what I want to cortana will tell
            string fact = await Core.Helpers.FactHelper.GetRandomFactAsync();
            var DestinationContentTiles = new List<VoiceCommandContentTile>();
            var destinationTile = new VoiceCommandContentTile();
            try
            {

                //What style we want Cortana shows us , size tile that will be displayed
                destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWith280x140IconAndText;
                //What images should be inside of tiles
                destinationTile.Image = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(new Uri("ms-appx:///OfficePoint.Service.Background/Images/Fact_280.png"));


                //add to the VoiceCommandContenTile, and say how you handle sending a single  respone back
                destinationTile.AppContext = null;

                //load up in the user interface
                destinationTile.AppLaunchArgument = "type=" + VoiceCommandType.InterestingQueryFact;

                destinationTile.Title = fact;

                //here is what i Want to cortan will write
                destinationTile.TextLine1 = "";

                DestinationContentTiles.Add(destinationTile);


                    }

            catch(Exception ex)
            {

            }

            //here I'm creating my response  
            VoiceCommandResponse voiceRespone = VoiceCommandResponse.CreateResponse(new VoiceCommandUserMessage()
            {
                // what cortan write
                DisplayMessage = "did you know...",
                //randomly fact whih is chosen perviously which is speaking by Cortana
                SpokenMessage = fact,
            }, DestinationContentTiles);


            //voiceServiceConnection is connection to Cortana here is using to send our response 
            await voiceServiceConnection.ReportSuccessAsync(voiceRespone);


        }
        //Very similar like above
        private async Task ProcessWeekOfYearAsync(SpeechRecognitionSemanticInterpretation interpretation)
        {

            DateTimeFormatInfo dfi = new DateTimeFormatInfo.CurrentInfo;

            Calendar cal = dfi.Calendar;

            var firstDay = cal.GetWeekOfYear(DateTime.Today, CalendarWeekRule.FirstDay, dfi.FirstDayOfWeek);

            var firstFourDay = cal.GetWeekOfYear(DateTime.Today, CalendarWeekRule.FirstFourDayWeek, dfi.FirstDayOfWeek);

            var firstFullDay = cal.GetWeekOfYear(DateTime.Today, CalendarWeekRule.FirstFullWeek, dfi.FirstDayOfWeek);


            string fullDayLabel = "Today is week:" + firstDay + "by the first day rule" + firstFourDay;

            var destinationContentTile = new List<VoiceCommandContentTile>();

            var destinationTile = new VoiceCommandContentTile();

            try
            {

                destinationTile.ContentTileType = VoiceCommandContentTileType.TitleWithText;
                destinationTile.AppContext = null;
                destinationTile.AppLaunchArgument = "type=" + VoiceCommandType.WeekOfYearQueryCommand;
                destinationTile.Title = DateTime.Today.ToString(dfi.LongDatePattern);

                destinationTile.TextLine1 = "today is week #" + firstDay + "by the first day rule";
                destinationTile.TextLine2 = "Week #" + firstFourDay + "by the first four day rule";
                destinationTile.TextLine3 = " Week #" + firstFullDay + "by the first full week rule";

                destinationContentTile.Add(destinationTile);


            }

            catch(Exception ex)
            {

            }

            VoiceCommandResponse response2 =  VoiceCommandResponse.CreateResponse(new VoiceCommandUserMessage
            {
                DisplayMessage = "Week of the year",
                SpokenMessage = fullDayLabel,

            }, destinationContentTile);

            await voiceServiceConnection.ReportSuccessAsync(response2);
        }
        private async Task FindCommandHandleAsync(SpeechRecognitionSemanticInterpretation interpretation)
        {
            var searchQuery = string.Empty;

            if (interpretation.Properties.ContainsKey("DiceNum"))
            {
                searchQuery = interpretation.Properties["DiceNum"].FirstOrDefault();
            }
            if (!string.IsNullOrEmpty(searchQuery) && !string.IsNullOrWhiteSpace(searchQuery))
            {
                response = VoiceCommandResponse.CreateResponse(new VoiceCommandUserMessage()
                {
                    SpokenMessage = "Get ready", DisplayMessage = "Get ready"
                });
                await voiceServiceConn.ReportProgressAsync(response);

                //await DisambiguateAsync("Select a result", "Please select a result");
                var promptStr    = "Select a result";
                var repromptStr  = "Please select a result";
                var contentTiles = new List <VoiceCommandContentTile>();
                for (var i = 1; i < 7; i++)
                {
                    var tile = new VoiceCommandContentTile();
                    tile.ContentTileType = VoiceCommandContentTileType.TitleWith68x68IconAndText;
                    tile.Image           = await Windows.Storage.StorageFile.GetFileFromApplicationUriAsync(new Uri($"ms-appx:///ControlPanel.BackgroundServices/Assets/68_Dice_{i}.png"));

                    tile.AppContext        = i;
                    tile.AppLaunchArgument = $"type={i}";
                    tile.Title             = $"The dice result is {i}";
                    contentTiles.Add(tile);
                }
                var result = await Helpers.VoiceCommandResponseHelper.RequestDisambiguationAsync(voiceServiceConn, promptStr, repromptStr, contentTiles);

                if (result != null)
                {
                    contentTiles.Clear();
                    contentTiles.Add(result.SelectedItem);
                    var successStr = "You select a dice";
                    await Helpers.VoiceCommandResponseHelper.ReportSuccessAsync(voiceServiceConn, successStr, successStr, contentTiles);
                }
            }
        }
        //This method is going use to Bing location search API and find bussines based on dictated text
        //for instnace  we can find a coffe shope around here
        private async Task ProcessFindBusinessAsync(SpeechRecognitionSemanticInterpretation interpretation)
        {
            string searchQuery = null;

            //It contains everything what we want to find, from this we pass value whch is using to find what we want
            //dictatedFindBusiness value is using to pass this what we  we want to find 
            //this name must be the same like lable in phrase list 
            if(interpretation.Properties.ContainsKey("dictatedFindBusinessText") && interpretation.Properties["dictatedFindBusinessText"].Count>0)
            {
                searchQuery = interpretation.Properties["dictatedFindBusinessText"].First().Trim();

            }
            if(!string.IsNullOrWhiteSpace(searchQuery))
            {
                await Core.Helpers.BackgroundProgressHelper.ShowProgressScreen(voiceServiceConnection, "Searching for" + searchQuery + "near you");


                //Taking our location to make sure  location is available for us  and send it to...
                var currentLocation-await Core.Helpers.LocationHelper.GetCurrentLocationAsync();

                LocationResultInformation selectedLocationResault = null;

                if(currentLocation!=null)
                {
                    double latitiude = currentLocation.Point.Position.Latitiude;
                    double longitiude = currentLocation.Point.Position.Longitiude;


                    //  this section, here  we  use a Bing API 
                    var locationResults = await Core.Helpers.SearchHelper.SearchLocation(searchQuery, latitiude, longitiude);

                   

                    //Cortand is able to  recognize devices so if we use WIndows Phone we want to take only five results
                    if(Helpers.PlatformHelper.IsWindowsPhone)
                    {
                       locationResults = LocationResult.Take(5).ToList();


                    }

                    //if i got ober 1 result use Disambiguate function 
                    if (locationResults.Count > 1)
                    {
                        //here we using Disambiguations functon which contains list locations we want ot find, we 're talking about
                        selectedLocationResault = await DisambiguateLocations(locationResults, searchQuery);

                        var userMassage = new VoiceCommandUserMessage();

                        userMassage.DisplayMessage = "Loading..." + selectedLocationResault.DisplayName + ".";
                        userMassage.SpokenMessage = "Loading..." + selectedLocationResault.Displayname + ".";


                        Core.Helpers.BacgroundProgressHelpers.LaunchAppInForeground(voiceServiceConnection, VoiceCommandType)

                    }
                    else if (locationResults == 1)
                    {
                        List<VoiceCommandContentTile> destinationContentTitle = new List<VoiceCommandContentTile>();

                        VoiceCommandContentTile destitantionTile = null;

                        selectedLocationResault = locationResults.FirstOrDefault();

                        destitantionTile = new VoiceCommandContentTile();

                        destitantionTile.ContentTileType = VoiceCommandContentTileType.TitleWithText;

                        destitantionTile.Title = locationResult.DisplayName;

                        destitantionTile.TextLine1 = locationResult.StreetAddress;

                        destitantionTile.TextLine2 = ((double)locationResult.DistanceAway).ToString("N2") + "miles away";

                        destitantionTile.AppContext = selectedLocationResault;

                        destitantionTile.AppLaunchArgument = "type:" + VoiceCommandType.FindBusinessQueryCommand + "&searchQuery";

                        destinationContentTitle.Add(destitantionTile);


                        var response = VoiceCommandResponse.CreateResponse(new VoiceCommandUserMessage()
                        {
                            DisplayMessage = searchQuery.ToUpper() + "found",
                            SpokenMessage = "I found" + searchQuery,

                        },destinationContentTitle);


                        await voiceServiceConnection.ReportSuccessAsync(response);
                    }




                    }
                }
            }
        private static Message getSpeechInputMessage(SpeechRecognitionSemanticInterpretation speechRecognitionSemanticInterpretation, RecognizedSpeech recognizedSpeech)
        {
            string home            = speechRecognitionSemanticInterpretation.GetInterpretation("home");
            string help            = speechRecognitionSemanticInterpretation.GetInterpretation("help");
            string time            = speechRecognitionSemanticInterpretation.GetInterpretation("time");
            string light           = speechRecognitionSemanticInterpretation.GetInterpretation("light");
            string weather         = speechRecognitionSemanticInterpretation.GetInterpretation("weather");
            string weatherforecast = speechRecognitionSemanticInterpretation.GetInterpretation("weatherforecast");
            string news            = speechRecognitionSemanticInterpretation.GetInterpretation("news");
            string quote           = speechRecognitionSemanticInterpretation.GetInterpretation("quote");
            string scroll          = speechRecognitionSemanticInterpretation.GetInterpretation("scroll");
            string navigate        = speechRecognitionSemanticInterpretation.GetInterpretation("navigate");
            string reload          = speechRecognitionSemanticInterpretation.GetInterpretation("reload");
            string speech          = speechRecognitionSemanticInterpretation.GetInterpretation("speech");
            string power           = speechRecognitionSemanticInterpretation.GetInterpretation("power");

            if (home != null)
            {
                recognizedSpeech.SemanticText = home;
                return(Message.HOME);
            }

            if (help != null)
            {
                recognizedSpeech.SemanticText = help;
                return(Message.HELP);
            }

            if (time != null)
            {
                recognizedSpeech.SemanticText = time;
                return(Message.TIME);
            }

            if (light != null)
            {
                recognizedSpeech.SemanticText = light;
                return(Message.LIGHT);
            }

            if (weather != null)
            {
                recognizedSpeech.SemanticText = weather;
                return(Message.WEATHER);
            }

            if (weatherforecast != null)
            {
                recognizedSpeech.SemanticText = weatherforecast;
                return(Message.WEATHERFORECAST);
            }

            if (news != null)
            {
                // ReSharper disable once ConvertIfStatementToSwitchStatement
                if (news == "sport")
                {
                    recognizedSpeech.SemanticText = news;
                    return(Message.NEWS_SPORTS);
                }

                if (news == "business")
                {
                    recognizedSpeech.SemanticText = news;
                    return(Message.NEWS_BUSINESS);
                }

                if (news == "entertainment")
                {
                    recognizedSpeech.SemanticText = news;
                    return(Message.NEWS_ENTERTAINMENT);
                }

                if (news == "health")
                {
                    recognizedSpeech.SemanticText = news;
                    return(Message.NEWS_HEALTH);
                }

                if (news == "science")
                {
                    recognizedSpeech.SemanticText = news;
                    return(Message.NEWS_SCIENCE);
                }

                if (news == "technology")
                {
                    recognizedSpeech.SemanticText = news;
                    return(Message.NEWS_TECHNOLOGY);
                }
            }

            if (quote != null)
            {
                recognizedSpeech.SemanticText = quote;
                return(Message.QUOTE);
            }

            if (scroll != null)
            {
                // ReSharper disable once ConvertIfStatementToSwitchStatement
                if (scroll == "up")
                {
                    recognizedSpeech.SemanticText = scroll;
                    return(Message.SCROLL_UP);
                }

                if (scroll == "down")
                {
                    recognizedSpeech.SemanticText = scroll;
                    return(Message.SCROLL_DOWN);
                }
            }

            if (navigate != null)
            {
                // ReSharper disable once ConvertIfStatementToSwitchStatement
                if (navigate == "back")
                {
                    recognizedSpeech.SemanticText = navigate;
                    return(Message.NAVIGATE_BACKWARDS);
                }

                if (navigate == "forward")
                {
                    recognizedSpeech.SemanticText = navigate;
                    return(Message.NAVIGATE_FOREWARDS);
                }
            }

            if (reload != null)
            {
                recognizedSpeech.SemanticText = reload;
                return(Message.RELOAD);
            }

            if (speech != null)
            {
                // ReSharper disable once ConvertIfStatementToSwitchStatement
                if (speech == "clock")
                {
                    recognizedSpeech.SemanticText = speech;
                    return(Message.SPEECH_TIME);
                }

                if (speech == "weather")
                {
                    recognizedSpeech.SemanticText = speech;
                    return(Message.SPEECH_WEATHER);
                }

                if (speech.Contains("weatherforecast"))
                {
                    recognizedSpeech.SemanticText = speech;
                    return(Message.SPEECH_WEATHERFORECAST);
                }

                if (speech == "temperature")
                {
                    recognizedSpeech.SemanticText = speech;
                    return(Message.SPEECH_WEATHER_TEMPERATURE);
                }

                if (speech == "sunrise")
                {
                    recognizedSpeech.SemanticText = speech;
                    return(Message.SPEECH_SUNRISE);
                }

                if (speech == "sunset")
                {
                    recognizedSpeech.SemanticText = speech;
                    return(Message.SPEECH_SUNSET);
                }

                if (speech == "name")
                {
                    recognizedSpeech.SemanticText = speech;
                    return(Message.SPEECH_NAME);
                }

                if (speech == "look")
                {
                    recognizedSpeech.SemanticText = speech;
                    return(Message.SPEECH_LOOK);
                }

                if (speech == "gender")
                {
                    recognizedSpeech.SemanticText = speech;
                    return(Message.SPEECH_GENDER);
                }

                if (speech == "mirror")
                {
                    recognizedSpeech.SemanticText = speech;
                    return(Message.SPEECH_MIRROR);
                }

                if (speech == "quote")
                {
                    recognizedSpeech.SemanticText = speech;
                    return(Message.SPEECH_QUOTE);
                }

                if (speech == "joke")
                {
                    recognizedSpeech.SemanticText = speech;
                    return(Message.SPEECH_JOKE);
                }

                if (speech == "creator")
                {
                    recognizedSpeech.SemanticText = speech;
                    return(Message.SPEECH_CREATOR);
                }
            }

            if (power != null)
            {
                recognizedSpeech.SemanticText = power;
                return(Message.POWER);
            }

            return(Message.UNKNOWN);
        }