Exemple #1
0
        public override SkillResponse Process(IntentRequest request)
        {
            var responseText = new StringBuilder();

            //We will use SSML as reponse format
            //https://developer.amazon.com/docs/custom-skills/speech-synthesis-markup-language-ssml-reference.html
            responseText.Append("<speak>");

            var helper = new QueueHelper <Appointment>(
                System.Environment.GetEnvironmentVariable("AppointmentQueueConnection", System.EnvironmentVariableTarget.Process));
            var appointment = helper.Receive();

            if (appointment != null)
            {
                responseText.Append("<audio src = 'https://s3.amazonaws.com/ask-soundlibrary/ui/gameshow/amzn_ui_sfx_gameshow_positive_response_01.mp3'/>");
                responseText.Append(Ssml.SayAs(appointment.Subject, "interjection"));
                responseText.Append(Ssml.SayAs(appointment.Start, true));
                responseText.Append("<break/>");
            }
            else
            {
                responseText.Append("<audio src='soundbank://soundlibrary/cartoon/amzn_sfx_boing_long_1x_01'/>");
                responseText.Append(Ssml.SayAs("You got all appointments! Get to work!", "interjection"));
                responseText.Append("<break/>");
            }

            responseText.Append("</speak>");
            SsmlOutputSpeech speech = new SsmlOutputSpeech();

            speech.Ssml = responseText.ToString();
            return(ResponseBuilder.Tell(speech));
        }
        // ReSharper disable once FlagArgument
        public async Task <string> BuildAlexaResponseAsync(IResponse response, IAlexaSession session)
        {
            if (!(response.outputSpeech is null))
            {
                var outputSpeech = response.outputSpeech;

                var speech = new StringBuilder();

                speech.Append(outputSpeech.sound);
                speech.Append(Ssml.InsertStrengthBreak(StrengthBreak.strong));
                speech.Append(outputSpeech.phrase);

                outputSpeech.ssml  = "<speak>";
                outputSpeech.ssml += speech.ToString();
                outputSpeech.ssml += "</speak>";
            }

            response.reprompt = new Reprompt
            {
                outputSpeech = new OutputSpeech()
                {
                    ssml = "<speak>Can I help you with anything? You can ask to show a movie, or to show a tv series.</speak>"
                }
            };

            // Remove the APL directive if the device doesn't handle APL.
            if (!session.supportsApl)
            {
                if (response.directives.Any(d => d.type == "Alexa.Presentation.APL.RenderDocument"))
                {
                    response.directives.RemoveAll(d => d.type == "Alexa.Presentation.APL.RenderDocument");
                }
            }

            return(await Task.FromResult(JsonSerializer.SerializeToString(new AlexaResponse()
            {
                version = "1.2",
                response = response
            })));
        }
Exemple #3
0
        public override SkillResponse Process(IntentRequest request)
        {
            var responseText = new StringBuilder();

            //We will use SSML as reponse format
            //https://developer.amazon.com/docs/custom-skills/speech-synthesis-markup-language-ssml-reference.html
            responseText.Append("<speak>");
            JokesRepository repo         = new JokesRepository();
            var             categorySlot = request.Intent.Slots["JokeCategory"];
            var             category     = string.Empty;
            string          jokeCategory = categorySlot.Value;

            if (jokeCategory != null)
            {
                //Performing similarity search!
                jokeCategory = JokesRepository.SelectProperCategory(jokeCategory);
            }
            var joke = JokesRepository.NextJoke(jokeCategory);

            if (joke != null)
            {
                responseText.Append("<audio src = 'https://s3.amazonaws.com/ask-soundlibrary/ui/gameshow/amzn_ui_sfx_gameshow_positive_response_01.mp3'/>");
                responseText.Append(joke.JokeText);
                responseText.Append("<break/>");
            }
            else
            {
                responseText.Append("<audio src='soundbank://soundlibrary/cartoon/amzn_sfx_boing_long_1x_01'/>");
                responseText.Append(Ssml.SayAs("Ops! No Joke found!", "interjection"));
                responseText.Append("<break/>");
            }

            responseText.Append("</speak>");
            SsmlOutputSpeech speech = new SsmlOutputSpeech();

            speech.Ssml = responseText.ToString();
            return(ResponseBuilder.Tell(speech));
        }
Exemple #4
0
        public override SkillResponse Process(IntentRequest request)
        {
            var responseText = new StringBuilder();

            //We will use SSML as reponse format
            //https://developer.amazon.com/docs/custom-skills/speech-synthesis-markup-language-ssml-reference.html
            responseText.Append("<speak>");

            try
            {
                //let´s connect as guest on Teamcity Public instance!
                var client = new TeamCitySharp.TeamCityClient("teamcity.jetbrains.com", true);
                client.ConnectAsGuest();

                //By default, when a list of entities is requested, only basic fields are included into the response.
                //When a single entry is requested, all the fields are returned.
                //The complex field values can be returned in full or basic form, depending on a specific entity.
                //https://confluence.jetbrains.com/display/TCD18/REST+API#RESTAPI-FullandPartialResponses
                string projectNameToSearch = request.Intent.Slots["ProjectName"].Value;

                //We will do a similarity search in order to minimize spoken differences!
                //Cool Project: https://github.com/tylerjensen/duovia-fuzzystrings
                //To do so, let´s us get all available projects from Teamcity
                var project = client.Projects.All().OrderByDescending(item => item.Name.FuzzyMatch(projectNameToSearch)).FirstOrDefault();

                if (project != null)
                {
                    responseText.Append("<audio src = 'https://s3.amazonaws.com/ask-soundlibrary/ui/gameshow/amzn_ui_sfx_gameshow_positive_response_01.mp3'/>");
                    responseText.AppendFormat("Here is the Status of the {0} Project:", Ssml.SayAs(project.Name, "interjection"));
                    responseText.Append("<break/>");
                    #region Build Information
                    //Last Build informarion (we get only 1)
                    //state: <queued/running/finished>
                    //https://confluence.jetbrains.com/display/TCD18/REST+API
                    var lastBuilds = client.Builds.AffectedProject(project.Id, 1, new List <string>()
                    {
                        "state:finished"
                    });
                    if (lastBuilds.Any())
                    {
                        //We get by ID to load the full information!
                        var lastBuild = client.Builds.ById(lastBuilds.First().Id);

                        var triggeredBy = string.Empty;
                        if (lastBuild.Triggered.Type.Equals("schedule"))
                        {
                            triggeredBy = "automatically";
                        }
                        else if (lastBuild.Triggered.Type.Equals("vcs"))
                        {
                            triggeredBy = "by the " + Ssml.SayAs("Version Control System", "interjection");
                        }
                        else
                        {
                            triggeredBy = "by " + Ssml.SayAs(lastBuild.Triggered.User.Name, "interjection");
                        }

                        //let us find how long ago was the build
                        responseText.AppendFormat("Last Build, {0}, triggered {1}, happened {2}, {3}, with {4} status",
                                                  Ssml.SayAs(lastBuild.BuildType.Name, "interjection"),
                                                  triggeredBy,
                                                  Ssml.SayAs(lastBuild.FinishDate, true),
                                                  lastBuild.FinishDate.TimeAgo(),
                                                  Ssml.SayAs(lastBuild.Status, "interjection"));

                        responseText.Append("<break/>");

                        //Let´s collect statistics!
                        var buildStatistics = client.Statistics.GetByBuildId(lastBuild.Id);
                        //Default Statistics Values Provided by TeamCity
                        //https://confluence.jetbrains.com/display/TCD18/Custom+Chart#CustomChart-listOfDefaultStatisticValues
                        var failedTestCount = buildStatistics.FirstOrDefault(item => item.Name.Equals("FailedTestCount"));
                        var totalTestCount  = buildStatistics.FirstOrDefault(item => item.Name.Equals("TotalTestCount"));
                        if (totalTestCount != null)
                        {
                            if (failedTestCount != null)
                            {
                                int failed = Convert.ToInt32(failedTestCount);
                                int total  = Convert.ToInt32(totalTestCount);
                                if (failed > 0)
                                {
                                    responseText.AppendFormat("All {0} tests passed!", Ssml.SayAs(total));
                                }
                                else
                                {
                                    responseText.AppendFormat("{0} of {1} tests did not passed", Ssml.SayAs(Ssml.SayAs(failed), "interjection"), Ssml.SayAs(total));
                                }
                            }
                        }
                        else
                        {
                            responseText.Append("No automated Tests were executed");
                        }
                    }
                    else
                    {
                        responseText.AppendFormat("No Build Information found for {0}", projectNameToSearch);
                    }
                    #endregion
                }
                else
                {
                    responseText.AppendFormat("{0} not found", projectNameToSearch);
                }
                responseText.Append("<break/>");
                responseText.Append("That´s all. Bye.");
                responseText.Append("<audio src='https://s3.amazonaws.com/ask-soundlibrary/ui/gameshow/amzn_ui_sfx_gameshow_neutral_response_03.mp3'/>");
            }
            catch
            {
                responseText.Append("<audio src='https://s3.amazonaws.com/ask-soundlibrary/ui/gameshow/amzn_ui_sfx_gameshow_negative_response_02.mp3'/>");
                responseText.Append("<break/>");
                responseText.Append("ATTENTION: No connection to teamcity.");
                responseText.Append("<break/>");
            }

            responseText.Append("</speak>");
            SsmlOutputSpeech speech = new SsmlOutputSpeech();
            speech.Ssml = responseText.ToString();
            return(ResponseBuilder.Tell(speech));
        }
Exemple #5
0
        private async Task <string> OnIntentRequest(IAlexaRequest alexaRequest)
        {
            IAlexaSession session = null;

            var request = alexaRequest.request;
            var intent  = request.intent;
            var context = alexaRequest.context;
            var system  = context.System;
            var person  = system.person;

            if (!IsVoiceAuthenticationAccountLinkRequest(intent)) // create a session
            {
                if (!(person is null))
                {
                    if (!SpeechAuthorization.Instance.UserPersonalizationProfileExists(person))
                    {
                        return(await AlexaResponseClient.Instance.BuildAlexaResponseAsync(new Response()
                        {
                            shouldEndSession = true,
                            outputSpeech = new OutputSpeech()
                            {
                                phrase = "You are not a recognized user. Please take moment to register your voice profile.",
                            }
                        }, null));
                    }
                }

                var user = SpeechAuthorization.Instance.GetRecognizedPersonalizationProfileResult(person);

                session = AlexaSessionManager.Instance.GetSession(alexaRequest, user);

                //There can not be a room intent request without any prior session context data.
                if (session.PersistedRequestData is null && IsRoomNameIntentRequest(intent))
                {
                    //end the session.
                    return(await new NotUnderstood(alexaRequest, session).Response());
                }
            }

            try
            {
                /*
                 * Amazon Alexa Custom SKill Console does not allow "." in skill names.
                 * This would make creating namespace paths easier.
                 * Instead we save the skill name with "_", which replaces the "." in the reflected path to the corresponding .cs file.
                 * Replace the "_" (underscore) with a "." (period) to create the proper reflection path to the corresponding IntentRequest file.
                 */
                var intentName = intent.name.Replace("_", ".");
                ServerController.Instance.Log.Info($"Intent Name Route to {intentName}");
                return(await GetResponseResult(Type.GetType($"AlexaController.Api.IntentRequest.{intentName}"), alexaRequest, session));
            }
            catch (Exception exception)
            {
                var dataSource = await DataSourcePropertiesManager.Instance.GetGenericViewPropertiesAsync(exception.Message, "/particles");

                return(await AlexaResponseClient.Instance.BuildAlexaResponseAsync(new Response()
                {
                    shouldEndSession = true,
                    outputSpeech = new OutputSpeech()
                    {
                        phrase = $"{Ssml.SayWithEmotion($"Sorry, I was unable to do that. {exception.Message}", Emotion.excited, Intensity.low)}",
                    },

                    directives = new List <IDirective>()
                    {
                        await RenderDocumentDirectiveManager.Instance.RenderVisualDocumentDirectiveAsync(dataSource, session)
                    }
                }, session));
            }
        }