private async Task StartAudioVideoIVRFlowAsync(IncomingInviteEventArgs <IAudioVideoInvitation> e) { Logger.Instance.Information(string.Format("[StartAudioVideoIVRFlowAsync]: LoggingContext: {0}", m_loggingContext)); m_pstnCallConversation = null; // Step1: accept incoming call Logger.Instance.Information(string.Format("[StartAudioVideoIVRFlowAsync] Step 1: Accept incoming av call: LoggingContext: {0}", m_loggingContext)); await e.NewInvite.AcceptAsync(m_loggingContext).ConfigureAwait(false); await e.NewInvite.WaitForInviteCompleteAsync().ConfigureAwait(false); // if everything is fine, you will be able to get the related conversation m_pstnCallConversation = e.NewInvite.RelatedConversation; m_pstnCallConversation.HandleResourceRemoved += HandlePSTNCallConversationRemoved; // Step 2 : wait AV flow connected and play Promt IAudioVideoCall pstnAv = m_pstnCallConversation.AudioVideoCall; IAudioVideoFlow pstnFlow = await pstnAv.WaitForAVFlowConnected().ConfigureAwait(false); pstnFlow.ToneReceivedEvent += ToneReceivedEvent; // Step 3 : play prompt await PlayPromptAsync(pstnFlow, AudioVideoIVRAction.PlayMainPrompt).ConfigureAwait(false); }
private async Task PlayPromptAsync(IAudioVideoFlow flow, AudioVideoIVRAction action) { string wavFile = promptMap.GetOrNull(action); Logger.Instance.Information("[AudioVideoIVRJob] playing prompt: {0}.", wavFile); var resourceUri = new Uri(string.Format("{0}://{1}/resources/{2}", m_callbackUri.Scheme, m_callbackUri.Host, wavFile)); try { await flow.PlayPromptAsync(resourceUri, m_loggingContext).ConfigureAwait(false); } catch (CapabilityNotAvailableException ex) { Logger.Instance.Error("[AudioVideoIVRJob] PlayPrompt api is not available!", ex); } catch (RemotePlatformServiceException ex) { ErrorInformation error = ex.ErrorInformation; if (error != null && error.Code == ErrorCode.Informational && error.Subcode == ErrorSubcode.CallTerminated) { Logger.Instance.Information("[AudioVideoIVRJob] Call terminated while playing prompt."); } else { throw; } } }
/// <summary> /// Plays the prompt specified by <see cref="currentMenu"/> on the <paramref name="flow"/>. /// </summary> /// <param name="flow"><see cref="AudioVideoFlow"/> on which the prompt is to be played.</param> /// <returns>void; it is an <i>async</i> method.</returns> private async Task PlayPromptAsync(IAudioVideoFlow flow) { if (flow == null) { throw new ArgumentNullException(nameof(flow)); } string resourceName = currentMenu.Prompt; Logger.Instance.Information("[AudioVideoIVRJob] Playing prompt : {0}.", resourceName); try { var resourceUri = new Uri(string.Format(azureApplication.ResourceUriFormat, resourceName)); await flow.PlayPromptAsync(resourceUri, loggingContext).ConfigureAwait(false); } catch (CapabilityNotAvailableException ex) { Logger.Instance.Error("[AudioVideoIVRJob] PlayPrompt api is not available!", ex); } catch (RemotePlatformServiceException ex) { ErrorInformation error = ex.ErrorInformation; if (error != null && error.Code == ErrorCode.Informational && error.Subcode == ErrorSubcode.CallTerminated) { Logger.Instance.Information("[AudioVideoIVRJob] Call terminated while playing prompt."); } else { throw; } } }
private async Task HandleToneEventAsync(IAudioVideoFlow avFlow, ToneReceivedEventArgs e) { AudioVideoIVRAction action = (AudioVideoIVRAction)e.Tone; Logger.Instance.Information("[AudioVideoIVRJob] ToneReceivedEvent received : {0}.", action); if (!promptMap.ContainsKey(action)) { Logger.Instance.Information("[AudioVideoIVRJob] No action defined for this tone."); return; } if (action == AudioVideoIVRAction.TerminateCall) { Logger.Instance.Information("[AudioVideoIVRJob] Terminating the call."); var avCall = avFlow.Parent as IAudioVideoCall; await avCall.TerminateAsync(m_loggingContext).ConfigureAwait(false); CleanupEventHandlers(avFlow); } else { await PlayPromptAsync(avFlow, action).ConfigureAwait(false); } }
private async Task HandleToneEventAsync(IAudioVideoFlow flow, ToneReceivedEventArgs e) { string tone = ToneValueToString(e.Tone); if (tone == null) { Logger.Instance.Warning("[AudioVideoIVRJob] Tone could not be identified : {0}.", e.Tone.ToString()); return; } Logger.Instance.Information("[AudioVideoIVRJob] ToneReceivedEvent received : {0}.", tone); if (currentMenu?.KeyMap == null || !currentMenu.KeyMap.ContainsKey(tone)) { Logger.Instance.Information("[AudioVideoIVRJob] No action defined for this tone."); return; } var keyAction = currentMenu.KeyMap[tone]; if (keyAction.Action == AudioVideoIVRActions.PlayPrompt) { Logger.Instance.Information("[AudioVideoIVRJob] Playing prompt."); currentMenu = keyAction; await PlayPromptAsync(flow).ConfigureAwait(false); } else if (keyAction.Action == AudioVideoIVRActions.RepeatPrompt) { Logger.Instance.Information("[AudioVideoIVRJob] Repeating prompt."); await PlayPromptAsync(flow).ConfigureAwait(false); } else if (keyAction.Action == AudioVideoIVRActions.GoToPreviousPrompt) { Logger.Instance.Information("[AudioVideoIVRJob] Going to previous prompt."); currentMenu = currentMenu.ParentInput ?? currentMenu; await PlayPromptAsync(flow).ConfigureAwait(false); } else if (keyAction.Action == AudioVideoIVRActions.TransferToUser) { Logger.Instance.Information("[AudioVideoIVRJob] Transferring the call to {0}.", keyAction.User); currentMenu = keyAction; await PlayPromptAsync(flow).ConfigureAwait(false); var audioVideoCall = flow.Parent as IAudioVideoCall; await audioVideoCall.TransferAsync(keyAction.User, null, loggingContext).ConfigureAwait(false); CleanupEventHandlers(flow); } else if (keyAction.Action == AudioVideoIVRActions.TerminateCall) { Logger.Instance.Information("[AudioVideoIVRJob] Terminating the call."); var audioVideoCall = flow.Parent as IAudioVideoCall; await audioVideoCall.TerminateAsync(loggingContext).ConfigureAwait(false); CleanupEventHandlers(flow); } }
/// <summary> /// Removes all eventhandlers from <paramref name="audioVideoFlow"/>. /// </summary> /// <param name="audioVideoFlow">The <see cref="AudioVideoFlow"/> object on which we are currently listening for events</param> private void CleanupEventHandlers(IAudioVideoFlow audioVideoFlow) { audioVideoFlow.ToneReceivedEvent -= ToneReceivedEvent; var audioVideoCall = audioVideoFlow.Parent as IAudioVideoCall; audioVideoCall.AudioVideoFlowConnected -= AudioVideoFlowConnected; }
/// <summary> /// Waits for <see cref="AudioVideoFlow"/> to be connected. /// </summary> /// <param name="timeoutInSeconds">The timeout in seconds.</param> /// <returns>Task<IAudioVideoFlow>.</returns> public Task <IAudioVideoFlow> WaitForAVFlowConnected(int timeoutInSeconds = 30) { IAudioVideoFlow flow = AudioVideoFlow; TaskCompletionSource <IAudioVideoFlow> s = new TaskCompletionSource <IAudioVideoFlow>(); AudioVideoFlowConnected += (o, p) => s.TrySetResult(AudioVideoFlow); if (flow?.State == FlowState.Connected) { s.TrySetResult(AudioVideoFlow); } return(s.Task.TimeoutAfterAsync(TimeSpan.FromSeconds(timeoutInSeconds))); }
internal override bool ProcessAndDispatchEventsToChild(EventContext eventContext) { bool processed = false; if (eventContext.EventEntity.Link.Token == TokenMapper.GetTokenName(typeof(AudioVideoFlowResource))) { if (eventContext.EventEntity.Relationship == EventOperation.Added) { var audioVideoFlowResource = ConvertToPlatformServiceResource <AudioVideoFlowResource>(eventContext); AudioVideoFlow = new AudioVideoFlow(RestfulClient, audioVideoFlowResource, BaseUri, UriHelper.CreateAbsoluteUri(BaseUri, audioVideoFlowResource.SelfUri), this); // Raise event when flow state changes to connected AudioVideoFlow.HandleResourceUpdated += RaiseAudioVideoFlowConnectedEventIfConnected; // Raise event if the flow is already connected RaiseAudioVideoFlowConnectedEventIfConnected(null, null); } ((AudioVideoFlow)AudioVideoFlow).HandleResourceEvent(eventContext); if (eventContext.EventEntity.Relationship == EventOperation.Deleted) { AudioVideoFlow = null; } processed = true; } else if (eventContext.EventEntity.Link.Token == TokenMapper.GetTokenName(typeof(TransferResource))) { this.HandleTransferEvent(eventContext); processed = true; } var flow = AudioVideoFlow; if (!processed && flow != null) { processed = ((AudioVideoFlow)flow).ProcessAndDispatchEventsToChild(eventContext); } //add any new child resource under audioVideo processing here return(processed); }
public async void TestSetup() { m_loggingContext = new LoggingContext(Guid.NewGuid()); var data = TestHelper.CreateApplicationEndpoint(); m_mockEventChannel = data.EventChannel; m_restfulClient = data.RestfulClient; ApplicationEndpoint applicationEndpoint = data.ApplicationEndpoint; await applicationEndpoint.InitializeAsync(m_loggingContext).ConfigureAwait(false); await applicationEndpoint.InitializeApplicationAsync(m_loggingContext).ConfigureAwait(false); IAudioVideoInvitation invitation = null; applicationEndpoint.HandleIncomingAudioVideoCall += (sender, args) => invitation = args.NewInvite; TestHelper.RaiseEventsFromFile(m_mockEventChannel, "Event_IncomingAudioCall.json"); TestHelper.RaiseEventsFromFile(m_mockEventChannel, "Event_AudioVideoConnected.json"); TestHelper.RaiseEventsFromFile(m_mockEventChannel, "Event_AudioVideoFlowAdded.json"); m_audioVideoFlow = invitation.RelatedConversation.AudioVideoCall.AudioVideoFlow; }
private void CleanupEventHandlers(IAudioVideoFlow audioVideoFlow) { audioVideoFlow.ToneReceivedEvent -= ToneReceivedEvent; }
private async Task StartHuntGroupFlowAsync(IncomingInviteEventArgs <IAudioVideoInvitation> e) { Logger.Instance.Information(string.Format("[StartHuntGroupFlow] StartHuntGroupFlow: LoggingContext: {0}", LoggingContext)); m_pstnCallConversation = null; m_outboundAVConversations = new List <IConversation>(); // Step1: accept incoming call Logger.Instance.Information(string.Format("[StartHuntGroupFlow] Step 1: Accept incoming av call: LoggingContext: {0}", LoggingContext)); await e.NewInvite.AcceptAsync(LoggingContext).ConfigureAwait(false); await e.NewInvite.WaitForInviteCompleteAsync().ConfigureAwait(false); // if everything is fine, you will be able to get the related conversation m_pstnCallConversation = e.NewInvite.RelatedConversation; m_pstnCallConversation.HandleResourceRemoved += HandlePSTNCallConversationRemoved; // Step 2 : wait AV flow connected and play Promt IAudioVideoCall pstnAv = m_pstnCallConversation.AudioVideoCall; IAudioVideoFlow pstnFlow = await pstnAv.WaitForAVFlowConnected().ConfigureAwait(false); pstnFlow.ToneReceivedEvent += HandleToneReceived; // Step 3 : play prompt // We support two modes in this job. // A : InviteTargetUris are provided in the configuration; we will send invites to all of them and transfer the incoming call // to whoever accepts it. // B : InviteTargetUris not provided in the configuration; we will provide the user with a list of agents and let the user // pick the target transfer using DTMF tones. bool skipDTMF = m_jobInput.InviteTargetUris != null && m_jobInput.InviteTargetUris.Length > 0; string wavFile = skipDTMF ? "HuntGroupA.wav" : "HuntGroupB.wav"; var resourceUri = new Uri(string.Format(this.AzureApplication.ResourceUriFormat, wavFile)); try { await pstnFlow.PlayPromptAsync(resourceUri, LoggingContext).ConfigureAwait(false); } catch (CapabilityNotAvailableException ex) { Logger.Instance.Error("[HuntGroupJob] PlayPrompt api is not available!", ex); } catch (RemotePlatformServiceException ex) { ErrorInformation error = ex.ErrorInformation; if (error != null && error.Code == ErrorCode.Informational && error.Subcode == ErrorSubcode.CallTerminated) { Logger.Instance.Information("[HuntGroupJob] Call terminated while playing prompt."); } else { throw; } } string callContext = pstnAv.CallContext; if (string.IsNullOrEmpty(callContext)) { throw new PlatformserviceApplicationException("No valid callcontext in audioVideo resource "); } //Step 4 : Make out bound call to agents and do transfer ICommunication communication = m_pstnCallConversation.Parent as ICommunication; bool transferFlowSuccess = false; if (skipDTMF) { List <Task> TasksWithAgent = new List <Task>(); foreach (string to in m_jobInput.InviteTargetUris) { Task a = this.StartAgentCallAndTransferFlowAsync(communication, to, callContext).ContinueWith ( pTask => { if (pTask.IsFaulted) { Logger.Instance.Warning("[HuntGroupJob] Transfer flow failed." + pTask.Exception); } else { Logger.Instance.Information("[HuntGroupJob] Transfer flow complete."); transferFlowSuccess = true; } } ); TasksWithAgent.Add(a); } await Task.WhenAll(TasksWithAgent.ToArray()).ConfigureAwait(false); } else //Upgraded version, with DTMF recognize { //wait tone string target = await m_toneReceived.Task.ConfigureAwait(false); try { await this.StartAgentCallAndTransferFlowAsync(communication, target, callContext).ConfigureAwait(false); transferFlowSuccess = true; } catch (CapabilityNotAvailableException ex) { Logger.Instance.Warning("[HuntGroupJob] Transfer flow failed.", ex); } catch (RemotePlatformServiceException ex) { Logger.Instance.Warning("[HuntGroupJob] Transfer flow failed.", ex); } } m_outboundCallTransferLock = 0; if (transferFlowSuccess) { Logger.Instance.Information("TransferFlow success"); } else { Logger.Instance.Error("TransferFlow Failed, see above trace for error info"); } }
private async Task StartCallCenterFlowAsync(IncomingInviteEventArgs <IAudioVideoInvitation> e) { Logger.Instance.Information(string.Format("[StartCallCenterFlowAsync] StartCallCenterFlowAsync: LoggingContext: {0}", m_loggingContext)); m_pstnCallConversation = null; m_outboundAVConversations = new List <IConversation>(); // Step1: accept incoming call Logger.Instance.Information(string.Format("[StartCallCenterFlowAsync] Step 1: Accept incoming av call: LoggingContext: {0}", m_loggingContext)); await e.NewInvite.AcceptAsync(m_loggingContext).ConfigureAwait(false); await e.NewInvite.WaitForInviteCompleteAsync().ConfigureAwait(false); // if everything is fine, you will be able to get the related conversation m_pstnCallConversation = e.NewInvite.RelatedConversation; m_pstnCallConversation.HandleResourceRemoved += HandlePSTNCallConversationRemoved; // Step 2 : wait AV flow connected and play Promt IAudioVideoCall pstnAv = m_pstnCallConversation.AudioVideoCall; IAudioVideoFlow pstnFlow = await pstnAv.WaitForAVFlowConnected().ConfigureAwait(false); // Step 3 : play prompt // InviteTargetUris are provided in the configuration; we will send invites to all of them and transfer the incoming call // to whoever accepts it. string wavFile = "CallTransferSample.wav"; var resourceUri = new Uri(string.Format("{0}://{1}/resources/{2}", m_callbackUri.Scheme, m_callbackUri.Host, wavFile)); try { await pstnFlow.PlayPromptAsync(resourceUri, m_loggingContext).ConfigureAwait(false); } catch (CapabilityNotAvailableException ex) { Logger.Instance.Error("[CallCenterJob] PlayPrompt api is not available!", ex); } catch (RemotePlatformServiceException ex) { ErrorInformation error = ex.ErrorInformation; if (error != null && error.Code == ErrorCode.Informational && error.Subcode == ErrorSubcode.CallTerminated) { Logger.Instance.Information("[CallCenterJob] Call terminated while playing prompt."); } else { throw; } } string callContext = pstnAv.CallContext; if (string.IsNullOrEmpty(callContext)) { throw new Exception("No valid callcontext in audioVideo resource "); } //Step 4 : Make out bound call to agents and do transfer ICommunication communication = m_pstnCallConversation.Parent as ICommunication; bool transferFlowSuccess = false; List <Task> TasksWithAgent = new List <Task>(); foreach (string to in m_inviteTargetUris) { Task a = this.StartAgentCallAndTransferFlowAsync(communication, to, callContext).ContinueWith ( pTask => { if (pTask.IsFaulted) { Logger.Instance.Warning("[CallCenterJob] Transfer flow failed." + pTask.Exception); } else { Logger.Instance.Information("[CallCenterJob] Transfer flow complete."); transferFlowSuccess = true; } } ); TasksWithAgent.Add(a); } await Task.WhenAll(TasksWithAgent.ToArray()).ConfigureAwait(false); m_outboundCallTransferLock = 0; if (transferFlowSuccess) { Logger.Instance.Information("TransferFlow success"); } else { Logger.Instance.Error("TransferFlow Failed, see above trace for error info"); } }