public void ProcessOutput(ApiAiSDK.Model.AIResponse res) { string outText = JsonConvert.SerializeObject(res); Debug.Log(outText); string result = res.Result.Fulfillment.Speech;//getStringInBetween(outText, targetBegin, targetEnd); Debug.Log("Result: " + result); WindowsVoice.speak(result); }
void AiService_OnResult(AIResponse response) { InvokeOnMainThread(() => { if (!response.IsError) { var jsonSettings = new JsonSerializerSettings { NullValueHandling = NullValueHandling.Ignore }; var responseString = JsonConvert.SerializeObject(response, Formatting.Indented, jsonSettings); //resultTextView.Text = responseString; DismissViewController(true, null); } else { //resultTextView.Text = response.Status.ErrorDetails; } } ); }
public AIServiceException(AIResponse response) { Response = response; }
public void ProcessOutput(ApiAiSDK.Model.AIResponse res) { string action = res.Result.Action; string speech = res.Result.Fulfillment.Speech; if (!speech.Contains("?")) { switch (action) { case "show_dataflow": barGraphObject.SetActive(false); timeGraphObject.SetActive(false); dataflowObject.SetActive(true); break; case "show_bar_graph_realtime": barGraphThing.BarCommand = "realtime:" + res.Result.GetStringParameter("data-group"); barGraphObject.SetActive(true); timeGraphObject.SetActive(false); dataflowObject.SetActive(false); break; case "show_bar_graph_analysis_results": barGraphThing.BarCommand = "analysis:null"; barGraphObject.SetActive(true); timeGraphObject.SetActive(false); dataflowObject.SetActive(false); break; case "show_time_graph": string sensorType = res.Result.GetStringParameter("sensor-type"); JObject durationObj = res.Result.GetJsonParameter("duration"); int durationAmount = Int32.Parse(durationObj.SelectToken("amount")); string durationUnit = durationObj.SelectToken("unit"); utilsPlugin.ShowToastMessage(action + " => " + sensorType + " => " + durationAmount + " " + durationUnit); utilsPlugin.ShowToastMessage(speech); timeGraphThing.SensorType = sensorType; switch (durationUnit) { case "h": speech += "ours"; timeGraphThing.TimeFromNow = durationAmount + ":00:00"; break; case "min": speech += "utes"; timeGraphThing.TimeFromNow = "00:" + durationAmount + ":00"; break; case "s": speech += "econds"; timeGraphThing.TimeFromNow = "00:00:" + durationAmount; break; } timeGraphObject.SetActive(true); barGraphObject.SetActive(false); dataflowObject.SetActive(false); timeGraphThing.GetSensorData(); break; case "hide_everything": barGraphObject.SetActive(false); timeGraphObject.SetActive(false); dataflowObject.SetActive(false); break; default: utilsPlugin.ShowToastMessage(action); break; } } SpeakTTS(speech); }
static void CheckForErrors(AIResponse aiResponse) { if (aiResponse == null) { throw new AIServiceException("API.AI response parsed as null. Check debug log for details."); } if (aiResponse.IsError) { throw new AIServiceException(aiResponse); } }
protected virtual void FireOnResult(AIResponse response) { OnResult.InvokeSafely(response); }
/// <summary> /// Launch app and pass the appropriate parameters to it /// </summary> /// <param name="voiceServiceConnection"></param> /// <param name="aiResponse"></param> /// <returns></returns> public async Task LaunchAppInForegroundAsync(VoiceCommandServiceConnection voiceServiceConnection, AIResponse aiResponse) { var textMessage = aiResponse?.Result?.Fulfillment?.Speech ?? string.Empty; var userMessage = new VoiceCommandUserMessage { SpokenMessage = textMessage, DisplayMessage = textMessage }; var response = VoiceCommandResponse.CreateResponse(userMessage); response.AppLaunchArgument = JsonConvert.SerializeObject(aiResponse, Formatting.Indented); await voiceServiceConnection.RequestAppLaunchAsync(response); }
private void NavigateToMain(AIResponse aiResponse) { Frame rootFrame = Window.Current.Content as Frame; // Do not repeat app initialization when the Window already has content, // just ensure that the window is active if (rootFrame == null) { // Create a Frame to act as the navigation context and navigate to the first page rootFrame = new Frame(); rootFrame.NavigationFailed += OnNavigationFailed; // Place the frame in the current Window Window.Current.Content = rootFrame; } rootFrame.Navigate(typeof(MainPage), aiResponse); // Ensure the current window is active Window.Current.Activate(); }
private void OutputJson(AIResponse aiResponse) { resultTextBlock.Text = JsonConvert.SerializeObject(aiResponse, Formatting.Indented); }
/// <summary> /// Make Cortana to speech api.ai response. /// </summary> /// <param name="voiceServiceConnection"></param> /// <param name="aiResponse"></param> /// <returns></returns> public async Task SendResponseToCortanaAsync(VoiceCommandServiceConnection voiceServiceConnection, AIResponse aiResponse) { var textResponse = aiResponse.Result.Fulfillment?.Speech ?? string.Empty; var userMessage = new VoiceCommandUserMessage { DisplayMessage = textResponse, SpokenMessage = textResponse }; var response = VoiceCommandResponse.CreateResponse(userMessage); // Cortana will present a “Go to app_name” link that the user // can tap to launch the app. // Pass in a launch to enable the app to deep link to a page // relevant to the voice command. //response.AppLaunchArgument = // string.Format("destination={0}”, “Las Vegas"); await voiceServiceConnection.ReportSuccessAsync(response); }
private void OutputParams(AIResponse aiResponse) { var contextsParams = new Dictionary<string,string>(); if (aiResponse.Result?.Contexts != null) { foreach (var context in aiResponse.Result?.Contexts) { if (context.Parameters != null) { foreach (var parameter in context.Parameters) { if (!contextsParams.ContainsKey(parameter.Key)) { contextsParams.Add(parameter.Key, parameter.Value); } } } } } var resultBuilder = new StringBuilder(); foreach (var contextsParam in contextsParams) { resultBuilder.AppendLine(contextsParam.Key + ": " + contextsParam.Value); } parametersTextBlock.Text = resultBuilder.ToString(); }
private async void ProcessResult(AIResponse aiResponse) { RunInUIThread(() => { listenButton.Content = "Listen"; OutputJson(aiResponse); OutputParams(aiResponse); }); var speechText = aiResponse.Result?.Fulfillment?.Speech; if (!string.IsNullOrEmpty(speechText)) { var speechStream = await speechSynthesizer.SynthesizeTextToStreamAsync(speechText); mediaElement.SetSource(speechStream, speechStream.ContentType); mediaElement.Play(); } }
public AIResponseEventArgs(AIResponse response) { this.response = response; }
private void ProcessResult(AIResponse aiResponse) { if (aiResponse != null) { FireOnResult(aiResponse); } else { FireOnError(new Exception("API.AI Service returns null")); } }
private void FireOnResult(AIResponse aiResponse) { var onResult = OnResult; if (onResult != null) { onResult(this, new AIResponseEventArgs(aiResponse)); } }