private void EndTransferCall(IAsyncResult ar) { AudioVideoCall audioVideoCall = ar.AsyncState as AudioVideoCall; try { // End transferring the incoming call. audioVideoCall.EndTransfer(ar); } catch (OperationFailureException OpFailEx) { // Operation failure exception can occur when the far end transfer // does not complete successfully, usually due to the transferee failing to pick up. Console.WriteLine(OpFailEx.ToString()); } catch (RealTimeException realTimeEx) { // Real time exception can occur when the far end transfer does // not complete successfully, usually due to a link-layer or // transport failure (i.e: Link dead, or failure response.). Console.WriteLine(realTimeEx.ToString()); } finally { //Again, just to sync the completion of the code. _waitForTransferComplete.Set(); } }
/// <summary> /// Terminate av call. /// </summary> private void TerminateAudioVideoCall() { bool exceptionEncountered = true; try { AudioVideoCall avCall = null; if (m_webConversation.WebAvCall != null) { avCall = m_webConversation.WebAvCall.AvCall; } if (avCall != null) { avCall.BeginTerminate(this.AudioVideoCallTerminated, avCall); } else { //Go to next step of terminating b2b call. this.TerminateBackToBackCall(); } exceptionEncountered = false; } finally { if (exceptionEncountered) { OperationFault operationFault = FaultHelper.CreateServerOperationFault(FailureStrings.GenericFailures.UnexpectedException, null /*innerException*/); this.CompleteTerminateConversationOperationWithException(new FaultException <OperationFault>(operationFault)); } } }
private void OnIncomingAudioVideoCallReceived(object sender, CallReceivedEventArgs<AudioVideoCall> e) { _avCall = e.Call; try { // Accept the incoming call. _avCall.BeginAccept(ar => { try { _avCall.EndAccept(ar); _logger.Log("Accepted incoming call."); } catch (RealTimeException rtex) { _logger.Log("Failed accepting incoming A/V call.", rtex); } }, null); } catch (InvalidOperationException ioex) { _logger.Log("Failed accepting incoming A/V call.", ioex); } }
/// <summary> /// Initialize Parameters for the activity. /// </summary> /// <param name="parameters"></param> public override void InitializeParameters(Dictionary <string, object> parameters) { if (parameters.ContainsKey("Call")) { this.AudioVideoCall = parameters["Call"] as AudioVideoCall; } }
private void EstablishServiceChannel(AsyncTask task, object state) { task.DoOneStep( delegate() { m_serviceChannelCall = new AudioVideoCall(m_serviceHub.Conversation); var options = new AudioVideoCallEstablishOptions(); // Ee need to use generated user identity for the call as this is hidden participant // of the conference for service purpose. options.UseGeneratedIdentityForTrustedConference = !this.IsPrimaryServiceChannel; if (!this.IsPrimaryServiceChannel) { this.RegisterServiceCallHandlers(); // Service call does not need to be in default mix of the conference. The purpose is to service a specific target user in the conference. options.AudioVideoMcuDialInOptions.RemoveFromDefaultRouting = true; } m_serviceChannelCall.BeginEstablish( options, delegate(IAsyncResult ar) { task.DoFinalStep( delegate() { m_serviceChannelCall.EndEstablish(ar); }); }, null); }); }
/// <summary> /// Checks if call is put on hold, or call is resumed from hold or call is disconnected. /// /// </summary> /// <param name="call">AdioVideo call instant.</param> /// <returns>string</returns> private AvCallCommunicationEvents GetCallConfiguration(AudioVideoCall call) { Microsoft.Rtc.Collaboration.AudioVideo.AudioChannel audioChannel = null; AudioVideoFlow avFlow = call.Flow; if (!avFlow.Audio.GetChannels().TryGetValue(ChannelLabel.AudioMono, out audioChannel)) { // if we were not able to retrieve the current audio Channel is // becuase the call has been already disconnected return(AvCallCommunicationEvents.Disconnected); } MediaChannelDirection direction = audioChannel.Direction; if (direction == MediaChannelDirection.SendReceive || direction == MediaChannelDirection.ReceiveOnly) { return(AvCallCommunicationEvents.Retrieved); } else if (direction == MediaChannelDirection.Inactive || direction == MediaChannelDirection.SendOnly) { return(AvCallCommunicationEvents.OnHold); } return(AvCallCommunicationEvents.None); }
// Callback for BeginTerminate on a call. private void TerminateCallCB(IAsyncResult ar) { AudioVideoCall audioVideoCall = ar.AsyncState as AudioVideoCall; // Finish terminating the call. audioVideoCall.EndTerminate(ar); }
/* private void audioVideoFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e) * { * Console.WriteLine("Flow state changed from " + e.PreviousState + " to " + e.State); * * // When the flow is active, media operations can begin. * if (e.State == MediaFlowState.Active) * { * // Other samples demonstrate uses for an active flow. * } * } */ // The delegate to be called when the inbound call arrives (the call from a customer). private void inboundAVCall_CallReceived(object sender, CallReceivedEventArgs <AudioVideoCall> e) { _inboundAVCall = e.Call; // Register for notification of the StateChanged event on the incoming call. _inboundAVCall.StateChanged += new EventHandler <CallStateChangedEventArgs>(inboundAVCall_StateChanged); // Create a new conversation for the incoming call leg. _inboundConversation = new Conversation(_userEndpoint); _inboundCallLeg = new BackToBackCallSettings(_inboundAVCall); // Create the back-to-back call instance. // Note that you need a Destination URI for the outgoing call leg, but not for the incoming call leg. _b2bCall = new BackToBackCall(_inboundCallLeg, _outboundCallLeg); // Begin the back-to-back session; provide a destination. try { IAsyncResult result = _b2bCall.BeginEstablish(BeginEstablishCB, _b2bCall); /* IAsyncResult result = _b2bCall.BeginEstablish( * delegate(IAsyncResult ar) * { * _b2bCall.EndEstablish(ar); * _waitForB2BCallToEstablish.Set(); * }, _b2bCall);*/ } catch (InvalidOperationException ioe) { Console.WriteLine("_b2bCall must be in the Idle state." + ioe.Message.ToString()); } _waitForB2BCallToEstablish.WaitOne(); }
private void EstablishAvCallAndAudioRouteForNewAttendee(ParticipantEndpoint newAttendeeParticipantEndpoint) { AudioVideoCall newAttendeeCall = new AudioVideoCall(_trustedParticipantConversation); // Save the new Attendee Participant Endpoint in the Application // Context. newAttendeeCall.ApplicationContext = newAttendeeParticipantEndpoint; AudioVideoCallEstablishOptions avCallEstablishOptions = new AudioVideoCallEstablishOptions(); // Remove the call from the default Mcu route because we will be // specifying custom routes after the call is established. avCallEstablishOptions.AudioVideoMcuDialInOptions.RemoveFromDefaultRouting = true; // When the Flow is active, add the tone handler newAttendeeCall.AudioVideoFlowConfigurationRequested += new EventHandler < AudioVideoFlowConfigurationRequestedEventArgs>( NewAttendeeCall_AudioVideoFlowConfigurationRequested); newAttendeeCall.BeginEstablish(avCallEstablishOptions, NewAttendeeCallEstablishCompleted, newAttendeeCall); // Add the call to the collection so it can be retrieved later. _trustedParticipantCalls.Add(newAttendeeParticipantEndpoint.Uri, newAttendeeCall); }
public static Task TransferAsync(this AudioVideoMcuSession mcuSession, AudioVideoCall call, McuTransferOptions options) { return(Task.Factory.FromAsync(mcuSession.BeginTransfer, mcuSession.EndTransfer, call, options, null)); }
public void Run() { // Initialize and register the endpoint, using the credentials of the user the application will be acting as. _helper = new UCMASampleHelper(); _userEndpoint = _helper.CreateEstablishedUserEndpoint("B2BCall Sample User" /*endpointFriendlyName*/); _userEndpoint.RegisterForIncomingCall <AudioVideoCall>(inboundAVCall_CallReceived); // Conversation settings for the outbound call (to the agent). ConversationSettings outConvSettings = new ConversationSettings(); outConvSettings.Priority = _conversationPriority; outConvSettings.Subject = _outConversationSubject; // Create the Conversation instance between UCMA and the agent. _outboundConversation = new Conversation(_userEndpoint, outConvSettings); // Create the outbound call between UCMA and the agent. _outboundAVCall = new AudioVideoCall(_outboundConversation); // Register for notification of the StateChanged event on the outbound call. _outboundAVCall.StateChanged += new EventHandler <CallStateChangedEventArgs>(outboundAVCall_StateChanged); // Prompt for called party - the agent. _calledParty = UCMASampleHelper.PromptUser("Enter the URI of the called party, in sip:User@Host form or tel:+1XXXYYYZZZZ form => ", "RemoteUserURI1"); _outboundCallLeg = new BackToBackCallSettings(_outboundAVCall, _calledParty); // Pause the main thread until both calls, the BackToBackCall, both conversations, // and the platform are shut down. _waitUntilOneUserHangsUp.WaitOne(); // Pause the console to allow for easier viewing of logs. Console.WriteLine("Press any key to end the sample."); Console.ReadKey(); }
internal void Process() { AudioVideoCall avCall = _channel.Call as AudioVideoCall; if (null != avCall) { AudioVideoFlow flow = avCall.Flow; if (null != flow) { try { _mohServer._mohPlayer.AttachFlow(flow); this.SetAsCompleted(null, false); } catch (InvalidOperationException ivoex) { this.SetAsCompleted(new OperationFailureException("AcdMusicOnHoldServer failed attaching the player to the flow", ivoex), false); } catch (OperationFailureException ofex) { this.SetAsCompleted(ofex, false); } } else { this.SetAsCompleted(new OperationFailureException("AcdMusicOnHoldServer cannot establish a MoH channel because the channel is not established"), false); } } else { this.SetAsCompleted(new OperationFailureException("AcdMusicOnHoldServer cannot establish a MoH channel because the call is not of the correct type"), false); } }
void _speechRecognitionEngine_SpeechRecognized(object sender, SpeechRecognizedEventArgs e) { Log("_speechRecognitionEngine_SpeechRecognized " + "Confidence=" + e.Result.Confidence + " " + "Text=" + e.Result.Text); if (e.Result.Text == "next") { _userCallTransferPath = UserCallTransferPath.Next; } else if (e.Result.Text == "previous") { _userCallTransferPath = UserCallTransferPath.Previous; } // Performing a self-transfer AudioVideoCall avCall = (AudioVideoCall)_b2bCall.Call1; avCall.BeginTransfer(avCall, ar => { try { avCall.EndTransfer(ar); } catch (Exception ex) { Log(ex.ToString()); } }, null); }
public static Task <CallParkResponseData> ParkAsync(this AudioVideoCall call, CallParkOptions options) { return(Task <CallParkResponseData> .Factory.FromAsync( call.BeginPark, call.EndPark, options, null)); }
public static Task EstablishEarlyMediaAsync(this AudioVideoCall call, int responseCode, CallProvisionalResponseOptions options) { return(Task.Factory.FromAsync( call.BeginEstablishEarlyMedia, call.EndEstablishEarlyMedia, responseCode, options, null)); }
void On_AudioVideoCall_Received(object sender, CallReceivedEventArgs <AudioVideoCall> e) { // Type checking was done by the platform; no risk of this being any // type other than the type expected. AudioVideoCall _audioVideoCall = e.Call; // Call: StateChanged: Only hooked up for logging, to show the call // state transitions. Only bound on the incoming side, to avoid // printing the events twice. _audioVideoCall.StateChanged += this.Call_StateChanged; // Remote Participant URI represents the far end (caller) in this // conversation. Toast is the message set by the caller as the 'greet' // message for this call. In Microsoft Lync, the toast will // show up in the lower-right of the screen. Console.WriteLine(""); Console.WriteLine(" Audio Video Call Received! From: " + e.RemoteParticipant.Uri); Console.WriteLine(" Toast is: " + e.ToastMessage.Message); Console.WriteLine(" Conversation ID is: " + e.Call.Conversation.Id); Console.WriteLine(""); try { // Now, accept the call. Threading note: AcceptCallCompleted will // be raised on the same thread. Blocking this thread in this // portion of the code will cause endless waiting. _audioVideoCall.BeginAccept(AcceptCallCompleted, _audioVideoCall); } catch (InvalidOperationException exception) { // InvalidOperationException indicates that the call was // disconnected before it could be accepted. Console.WriteLine(exception.ToString()); } }
public override async Task<AcdActionResult> Execute(LocalEndpoint localEndpoint, AudioVideoCall call, CancellationToken cancellationToken) { while (!cancellationToken.IsCancellationRequested) { try { // record start time var t = DateTime.UtcNow; // execute delegate if (await base.Execute(localEndpoint, call, cancellationToken)) return true; // do not execute more than once a second if (DateTime.UtcNow - t < TimeSpan.FromSeconds(1)) await Task.Delay(1000, cancellationToken); } catch (OperationCanceledException) { // ignore } } return false; }
void AudioVideoCall_EstablishCompleted(IAsyncResult result) { try { AudioVideoCall avCall = result.AsyncState as AudioVideoCall; avCall.EndEstablish(result); Message m = new Message("AudioVideoCall Established. Call state: " + _audioVideoCall.State.ToString() + ". CallId: " + _audioVideoCall.CallId + ".", _audioVideoCall.RemoteEndpoint.Participant.DisplayName, _audioVideoCall.RemoteEndpoint.Participant.UserAtHost, _audioVideoCall.RemoteEndpoint.Participant.Uri, MessageType.Audio, _transcriptRecorder.Conversation.Id, MessageDirection.Incoming); _transcriptRecorder.OnMessageReceived(m); _transcriptRecorder.OnRemoteParticipantAdded(null, avCall.RemoteEndpoint); } catch (RealTimeException ex) { NonBlockingConsole.WriteLine("Error: avCall.EndEstablish failed. Exception: {0}", ex.ToString()); // TODO: Error message } finally { _state = TranscriptRecorderState.Active; _waitForAudioVideoCallEstablished.Set(); } }
public void AcceptAVCall(AudioVideoCall call) { //Console.WriteLine("Accepting incoming AV call"); try { call.BeginAccept( ar => { try { call.Flow.StateChanged += new EventHandler<MediaFlowStateChangedEventArgs>(Flow_StateChanged); call.EndAccept(ar); SpeakMessage(call.Flow, string.Format("Hello, {0}. Thanks for calling. " + "Your SIP URI is {1}", call.RemoteEndpoint.Participant.DisplayName, call.RemoteEndpoint.Participant.Uri)); } catch (RealTimeException ex) { Console.WriteLine("Failed tp accept call.", ex); } }, null); } catch (InvalidOperationException ex) { Console.WriteLine("Failed tp accept call.", ex); } }
public override async Task<AcdActionResult> Execute(LocalEndpoint localEndpoint, AudioVideoCall call, CancellationToken cancellationToken) { var cs = new CancellationTokenSource(); var ct = CancellationTokenSource.CreateLinkedTokenSource(cs.Token, cancellationToken).Token; // initiate agent calls var ops = this .Select(async i => { try { // we were canceled if (ct.IsCancellationRequested) return AcdActionResult.Continue; // initiate delegate, cancel the rest on success var result = await i.Execute(localEndpoint, call, ct); if (result) cs.Cancel(); return result; } catch (OperationCanceledException) { // ignore } return AcdActionResult.Continue; }) .ToArray(); // did any complete the call return (await Task.WhenAll(ops)) .Any(i => i); }
/// <summary> ///Constructor /// </summary> /// <param name="avCall"></param> /// <param name="configuration"></param> public DialupDialog(AudioVideoCall avCall, DialupConfiguration configuration) { this.AudioVideoCall = avCall; this.Configuration = configuration; speechGrammar = new List <Grammar>(); dtmfGrammar = new List <Grammar>(); }
/// <summary> /// Handle incoming av call from the avmcu. /// </summary> /// <param name="imCall">IM call.</param> public void HandleIncomingDialOutCall(AudioVideoCall avCall) { lock (syncRoot) { if (this.callAnchor != null) { this.callAnchor.ProcessIncomingDialOutCall(avCall); } else { try { Console.WriteLine("No pending estalblishment process. Declining call"); this.logger.Log("No pending estalblishment process. Declining call"); avCall.Decline(); } catch (RealTimeException rte) { Console.WriteLine("Decline failed with {0}", rte); this.logger.Log("Decline failed with {0}", rte); } catch (InvalidOperationException ioe) { Console.WriteLine("Decline failed with {0}", ioe); this.logger.Log("Decline failed with {0}", ioe); } } } }
public override Task<AcdActionResult> Execute(LocalEndpoint localEndpoint, AudioVideoCall call, CancellationToken cancellationToken) { if (Schedule == null || Schedule.InSchedule(DateTime.Now)) return base.Execute(localEndpoint, call, cancellationToken); else return AcdActionResult.ContinueTask; }
private void OnIncomingAudioVideoCallReceived(object sender, CallReceivedEventArgs<AudioVideoCall> e) { _avCall = e.Call; _avCall.StateChanged += new EventHandler<CallStateChangedEventArgs>(OnCallStateChanged); try { // Accept the incoming call. _avCall.BeginAccept(ar => { try { _avCall.EndAccept(ar); _logger.Log("Accepted incoming call."); PerformAttendedTransfer(); } catch (RealTimeException rtex) { _logger.Log("Failed accepting incoming A/V call.", rtex); } }, null); } catch (InvalidOperationException ioex) { _logger.Log("Failed accepting incoming A/V call.", ioex); } }
public override async Task<AcdActionResult> Execute(LocalEndpoint localEndpoint, AudioVideoCall call, CancellationToken cancellationToken) { if (Endpoint == null) return AcdActionResult.Continue; // only allow one call through to a contact at a time if (Busy) return AcdActionResult.Continue; try { Busy = true; var available = IgnorePresence || await GetAvailableAsync(localEndpoint, cancellationToken); if (available) return await base.Execute(localEndpoint, call, cancellationToken); } catch (OperationCanceledException) { // ignore } finally { Busy = false; } return AcdActionResult.Continue; }
private void AcceptCall(AudioVideoCall call) { _avCall = call; try { // Accept the incoming call. _avCall.BeginAccept(ar => { try { _avCall.EndAccept(ar); _logger.Log("Accepted incoming call."); } catch (RealTimeException rtex) { _logger.Log("Failed accepting incoming A/V call.", rtex); } }, null); } catch (InvalidOperationException ioex) { _logger.Log("Failed accepting incoming A/V call.", ioex); } }
public void JoinLocation(AudioVideoCall incomingCall, Location location) { _location = location; // Call Leg 1 BackToBackCallSettings settings1 = new BackToBackCallSettings(incomingCall); // Call Leg 2 BackToBackCallSettings settings2 = new BackToBackCallSettings(new AudioVideoCall(location.Conversation)); settings2.CallEstablishOptions = new AudioVideoCallEstablishOptions() { UseGeneratedIdentityForTrustedConference = true, SupportsReplaces = CapabilitySupport.Supported }; // Create and establish the back to back call. _b2bCall = new BackToBackCall(settings1, settings2); _b2bCall.StateChanged += new EventHandler <BackToBackCallStateChangedEventArgs>(_b2bCall_StateChanged); _b2bCall.BeginEstablish( ar => { try { _b2bCall.EndEstablish(ar); EstablishControlCall(); } catch (RealTimeException ex) { Log(ex.ToString()); } }, null); }
private void EstablishCall() { // Create a new Conversation. Conversation conversation = new Conversation(_appEndpoint); // Create a new IM call. _avCall = new AudioVideoCall(conversation); try { // Establish the IM call. _avCall.BeginEstablish(_destinationSipUri, new CallEstablishOptions(), result => { try { // Finish the asynchronous operation. _avCall.EndEstablish(result); } catch (RealTimeException ex) { // Catch and log exceptions. _logger.Log("Failed establishing A/V call", ex); } }, null ); } catch (InvalidOperationException ioex) { _logger.Log("Failed establishing A/V call", ioex); } }
/// <summary> /// Creates a new web av call with given web conversation. /// </summary> /// <param name="avCall">Av call.</param> /// <param name="conversation">Web conversation.</param> internal WebAvCall(AudioVideoCall avCall, WebConversation conversation) { Debug.Assert(null != conversation, "conversation is null"); Debug.Assert(null != avCall, "av call is null"); this.WebConversation = conversation; m_avCall = avCall; }
public void JoinLocation(AudioVideoCall incomingCall, Location location) { _location = location; // Call Leg 1 BackToBackCallSettings settings1 = new BackToBackCallSettings(incomingCall); // Call Leg 2 BackToBackCallSettings settings2 = new BackToBackCallSettings(new AudioVideoCall(location.Conversation)); settings2.CallEstablishOptions = new AudioVideoCallEstablishOptions() { UseGeneratedIdentityForTrustedConference = true, SupportsReplaces = CapabilitySupport.Supported }; // Create and establish the back to back call. _b2bCall = new BackToBackCall(settings1, settings2); _b2bCall.StateChanged += new EventHandler<BackToBackCallStateChangedEventArgs>(_b2bCall_StateChanged); _b2bCall.BeginEstablish( ar => { try { _b2bCall.EndEstablish(ar); EstablishControlCall(); } catch (RealTimeException ex) { Log(ex.ToString()); } }, null); }
/// <summary> /// Handle incoming audio call from the customer. /// </summary> /// <param name="audioCall">Audio call.</param> public void HandleIncomingAudioCall(AudioVideoCall audioCall) { lock (syncRoot) { if (this.conversation == null) { this.conversation = audioCall.Conversation; this.RegisterConversationEventHandlers(this.conversation); } this.audioIvr = new AudioIVR(audioCall, this.application.XmlParser, this.logger); try { audioCall.BeginAccept((asyncResult) => { try { audioCall.EndAccept(asyncResult); } catch (RealTimeException rte) { Console.WriteLine("Error accepting incoming AV call {0}", rte); this.logger.Log("Error accepting incoming AV call {0}", rte); } }, null); } catch (InvalidOperationException ioe) { Console.WriteLine("Error accepting incoming AV call {0}", ioe); this.logger.Log("Error accepting incoming AV call {0}", ioe); } } }
void AVCall_TransferReceived(object sender, AudioVideoCallTransferReceivedEventArgs e) { // Accept the transfer. The parameter is null as we do not want to // add any specalized signaling headers to the acceptance. AudioVideoCall audioVideoCall = e.Accept(null); audioVideoCall.BeginEstablish(CallEstablishCompleted, audioVideoCall); }
public static Task <CallMessageData> SendMessageAsync( this AudioVideoCall call, MessageType messageType, ContentDescription contentDescription, CallSendMessageRequestOptions options) { return(Task <CallMessageData> .Factory.FromAsync(call.BeginSendMessage, call.EndSendMessage, messageType, contentDescription, options, null)); }
public override async Task<AcdActionResult> Execute(LocalEndpoint localEndpoint, AudioVideoCall call, CancellationToken cancellationToken) { Trace.TraceInformation("Debug: {0} start", Name); await Task.Delay(TimeSpan.FromSeconds(random.Next(3, 30)), cancellationToken); var r = cancellationToken.IsCancellationRequested ? false : (random.Next(1, 100) <= 20); Trace.TraceInformation("Debug: {0} stop {1}", Name, r); return r; }
/// <summary> /// Constructor /// </summary> /// <param name="avCall">audio video call</param> /// <param name="configuration">call back configuration</param> /// <param name="contactInformation">information of contact</param> /// <param name="getContactService">instance of GetContactService</param> public CallbackDialog(AudioVideoCall avCall, SetupCallbackConfiguration configuration, ContactInformation contactInformation, Microsoft.Rtc.Collaboration.Samples.VoiceCompanion.VoiceServices.GetContactService getContactService) { this.AudioVideoCall = avCall; this.Configuration = configuration; this.objGetContactService = getContactService; this.ContactInfo = contactInformation; speechGrammar = new List <Grammar>(); dtmfGrammar = new List <Grammar>(); }
//call received event handler public void AudioVideoCall_Received(CallReceivedEventArgs <AudioVideoCall> e) { if (_state == TranscriptRecorderState.Terminated) { NonBlockingConsole.WriteLine("Error: AVTranscriptRecorder is shutdown."); // TODO: Error message return; } if (_audioVideoCall != null) { NonBlockingConsole.WriteLine("Warn: AVCall already exists for this Conversation. Shutting down previous call..."); // TODO: Info message TerminateCall(); } _state = TranscriptRecorderState.Initialized; _waitForAudioVideoCallTerminated.Reset(); //Type checking was done by the platform; no risk of this being any // type other than the type expected. _audioVideoCall = e.Call; // Call: StateChanged: Only hooked up for logging, to show the call // state transitions. _audioVideoCall.StateChanged += new EventHandler <CallStateChangedEventArgs>(AudioVideoCall_StateChanged); // Subscribe for the flow configuration requested event; the flow will be used to send the media. // Ultimately, as a part of the callback, the media will be sent/recieved. _audioVideoCall.AudioVideoFlowConfigurationRequested += new EventHandler <AudioVideoFlowConfigurationRequestedEventArgs>(AudioVideoCall_FlowConfigurationRequested); _audioVideoCall.ConversationChanged += new EventHandler <ConversationChangedEventArgs>(AudioVideoCall_ConversationChanged); // Remote Participant URI represents the far end (caller) in this // conversation. Toast is the message set by the caller as the 'greet' // message for this call. In Microsoft Lync, the toast will // show up in the lower-right of the screen. // TODO: change this to preserve confidentiality in the video demo //NonBlockingConsole.WriteLine("Call Received! From: " + e.RemoteParticipant.Uri + " Toast is: " + e.ToastMessage.Message); NonBlockingConsole.WriteLine("Call Received! From: " + e.RemoteParticipant.Uri); //NonBlockingConsole.WriteLine("Call Received!"); Message m = new Message("AudioVideoCall Received. Inbound call state: " + _audioVideoCall.State.ToString(), e.RemoteParticipant.DisplayName, e.RemoteParticipant.UserAtHost, e.RemoteParticipant.Uri, MessageType.Audio, _transcriptRecorder.Conversation.Id, MessageDirection.Incoming); _transcriptRecorder.OnMessageReceived(m); // Accept the call. Before transferring the call, it must be in the Established state. // Note that the docs are wrong in the state machine for the AVCall. BeginEstablish // should be called on outgoing calls, not incoming calls. _audioVideoCall.BeginAccept(AudioVideoCallAccepted, _audioVideoCall); // Wait for a few seconds to give time for the call to get to the Established state. //_waitForAudioVideoCallAccepted.WaitOne(2000); NonBlockingConsole.WriteLine("Inbound call state is {0}\n", _audioVideoCall.State.ToString()); }
/// <summary> /// Gets the action to be used to excute the specified call. /// </summary> /// <param name="call"></param> /// <returns></returns> public virtual AcdAction GetAction(AudioVideoCall call) { return new AcdParallel() { new AcdContactTransfer(new AcdContact("sip:[email protected]")), new AcdContactTransfer(new AcdContact("sip:[email protected]")), new AcdContactTransfer(new AcdContact("tel:+112146410503")), }; }
public static Task TransferAsync(this AudioVideoMcuSession self, AudioVideoCall call, McuTransferOptions mcuTransferOptions) { return Task.Factory.FromAsync<AudioVideoCall, McuTransferOptions>( self.BeginTransfer, self.EndTransfer, call, mcuTransferOptions, null); }
public void StopMusic(AudioVideoCall audioVideoCall) { AudioVideoFlow flow = audioVideoCall.Flow; if (null != flow) { m_mohPlayer.DetachFlow(flow); } }
private void EndTerminateCall(IAsyncResult ar) { AudioVideoCall audioVideoCall = ar.AsyncState as AudioVideoCall; // End terminating the incoming call. audioVideoCall.EndTerminate(ar); //Again, just to sync the completion of the code. _waitForCallToTerminate.Set(); }
internal void HandleIncomingMcuDialOut(AudioVideoCall incomingCall) { try { incomingCall.Decline(); } catch (InvalidOperationException) { } }
// Callback referenced in the BeginTerminate method on the call. private void CallTerminateCB(IAsyncResult ar) { AudioVideoCall AVCall = ar.AsyncState as AudioVideoCall; // Complete the termination of the incoming call. AVCall.EndTerminate(ar); // Terminate the conversation. IAsyncResult result = _audioVideoCall.Conversation.BeginTerminate(ConversationTerminateCB, _audioVideoCall.Conversation); Console.WriteLine("Waiting for the conversation to be terminated..."); }
public void Run() { //Initialize and register the endpoint, using the credentials of the user the application will be acting as. _helper = new UCMASampleHelper(); _userEndpoint = _helper.CreateEstablishedUserEndpoint("AVCall Sample User" /*endpointFriendlyName*/); //Set up the conversation and place the call. ConversationSettings convSettings = new ConversationSettings(); convSettings.Priority = _conversationPriority; convSettings.Subject = _conversationSubject; //Conversation represents a collection of modalities in the context of a dialog with one or multiple callees. Conversation conversation = new Conversation(_userEndpoint, convSettings); _audioVideoCall = new AudioVideoCall(conversation); //Call: StateChanged: Only hooked up for logging. _audioVideoCall.StateChanged += new EventHandler <CallStateChangedEventArgs>(audioVideoCall_StateChanged); //Subscribe for the flow configuration requested event; the flow will be used to send the media. //Ultimately, as a part of the callback, the media will be sent/received. _audioVideoCall.AudioVideoFlowConfigurationRequested += this.audioVideoCall_FlowConfigurationRequested; // Prompt for called party _calledParty = UCMASampleHelper.PromptUser("Enter the URI for the user logged onto Microsoft Lync, in the sip:User@Host format or tel:+1XXXYYYZZZZ format => ", "RemoteUserURI"); //Place the call to the remote party. _audioVideoCall.BeginEstablish(_calledParty, null, EndCallEstablish, _audioVideoCall); //Sync; wait for the call to complete. Console.WriteLine("Calling the remote user..."); _waitForCallToEstablish.WaitOne(); // Terminate the call, and then the conversation. // Terminating these additional objects individually is made redundant by shutting down the platform right after, but in the multiple call case, // this is needed for object hygene. Terminating a Conversation terminates all it's associated calls, and terminating an endpoint will terminate // all conversations on that endpoint. _audioVideoCall.BeginTerminate(EndTerminateCall, _audioVideoCall); Console.WriteLine("Waiting for the call to get terminated..."); _waitForCallToTerminate.WaitOne(); _audioVideoCall.Conversation.BeginTerminate(EndTerminateConversation, _audioVideoCall.Conversation); Console.WriteLine("Waiting for the conversation to get terminated..."); _waitForConversationToTerminate.WaitOne(); //Now, cleanup by shutting down the platform. Console.WriteLine("Shutting down the platform..."); _helper.ShutdownPlatform(); // Pause the console to allow for easier viewing of logs. Console.WriteLine("Please hit any key to end the sample."); Console.ReadKey(); }
/// <summary> /// Disconnects the call from the existing BackToBack if present. /// </summary> /// <returns></returns> async Task DisconnectB2B() { if (b2bCall != null) { // wait for termination of b2b leg await b2bCall.TerminateAsync(); // clear existing b2b call b2bCall = null; b2b = null; } }
/// <summary> /// Delays before putting the call on hold. /// </summary> /// <param name="hold"></param> /// <param name="call"></param> /// <returns></returns> async Task Hold(AcdHoldContext hold, AudioVideoCall call, CancellationToken cancellationToken) { try { await Task.Delay(Delay, cancellationToken); await hold.HoldAsync(call); } catch (OperationCanceledException) { // ignore } }
public async Task AddCallerToConference(AudioVideoCall call) { try { AudioVideoMcuSession mcu = _confConversation.ConferenceSession.AudioVideoMcuSession; await mcu.TransferAsync(call, null); } catch (Exception ex) { Console.WriteLine(ex.ToString()); } }
/// <summary> /// Initializes a new instance. /// </summary> /// <param name="application"></param> /// <param name="call"></param> public QueueCall(QueueApplication application, AudioVideoCall call) { this.id = Guid.NewGuid(); this.application = application; this.call = call; // incoming call identity this.uri = "sip:" + id.ToString("N") + "@" + call.Conversation.Endpoint.DefaultDomain; this.phoneUri = call.RemoteEndpoint.Participant.PhoneUri.TrimToNull() ?? call.RemoteEndpoint.Participant.OtherPhoneUri.TrimToNull(); this.displayName = call.RemoteEndpoint.Participant.DisplayName.TrimToNull(); this.call.StateChanged += call_StateChanged; }
public override async Task<AcdActionResult> Execute(LocalEndpoint localEndpoint, AudioVideoCall call, CancellationToken cancellationToken) { try { await Task.Delay(Duration, cancellationToken); } catch (OperationCanceledException) { // ignore } return AcdActionResult.Continue; }
public override async Task<AcdActionResult> Execute(LocalEndpoint localEndpoint, AudioVideoCall call, CancellationToken cancellationToken) { using (var hold = new AcdHoldContext(Audio)) { // initiate delayed hold operation var s = new CancellationTokenSource(); var h = Hold(hold, call, s.Token); // invoke next action var r = await base.Execute(localEndpoint, call, cancellationToken); // wait for delayed hold operation to exit s.Cancel(); await h; return r; } }
public override async Task<AcdActionResult> Execute(LocalEndpoint localEndpoint, AudioVideoCall call, CancellationToken cancellationToken) { foreach (var dispatcher in this) { if (cancellationToken.IsCancellationRequested) return AcdActionResult.Continue; try { if (await dispatcher.Execute(localEndpoint, call, cancellationToken)) return AcdActionResult.Complete; } catch (OperationCanceledException) { // ignore } } return AcdActionResult.Continue; }
/// <summary> /// Places the call in the call queue. /// </summary> /// <returns></returns> public async Task Queue() { await DisconnectB2B(); // obtain queue URI var queueAddress = await application.Conference.GetConferenceUri(); // second leg of call to conference b2bCall = new AudioVideoCall(new Conversation(call.Conversation.Endpoint)); b2bCall.Conversation.Impersonate(uri, phoneUri, displayName); await b2bCall.Conversation.ConferenceSession.JoinAsync(queueAddress.Uri, null); // join first and second leg var leg1B2B = new BackToBackCallSettings(call); var leg2B2B = new BackToBackCallSettings(b2bCall); // establish back to back call b2b = new BackToBackCall(leg1B2B, leg2B2B); await b2b.EstablishAsync(); }
internal void HandleIncomingCall() { Console.WriteLine("Handling incoming call."); // Create a new conversation for the back-end leg of the B2B call (which will connect to the conference). LocalEndpoint localEndpoint = _frontEndCallLeg.Conversation.Endpoint; Conversation backEndConversation = new Conversation(localEndpoint); // Impersonate the caller so that the caller, rather than the application, will appear to be participating in the conference string callerSipUri = _frontEndCallLeg.RemoteEndpoint.Participant.Uri; backEndConversation.Impersonate(callerSipUri, null, null); Console.WriteLine("Caller SIP Uri: " + callerSipUri); try { // Join the conference backEndConversation.ConferenceSession.BeginJoin( default(ConferenceJoinOptions), joinAsyncResult => { try { backEndConversation.ConferenceSession.EndJoin(joinAsyncResult); Console.WriteLine("Joined conference."); _backEndCallLeg = new AudioVideoCall(backEndConversation); CreateBackToBack(); } catch (RealTimeException ex) { Console.WriteLine(ex); } }, null); } catch (InvalidOperationException ex) { Console.WriteLine(ex); } }
public override async Task<AcdActionResult> Execute(LocalEndpoint localEndpoint, AudioVideoCall call, CancellationToken cancellationToken) { // if we have a timeout, combine with existing cancellation token if (Length.TotalMilliseconds > 0) cancellationToken = CancellationTokenSource.CreateLinkedTokenSource( cancellationToken, new CancellationTokenSource(Length).Token) .Token; // send to base dispatcher try { return await base.Execute(localEndpoint, call, cancellationToken); } catch (OperationCanceledException) { // ignore } return false; }
private void OnIncomingAudioVideoCallReceived(object sender, CallReceivedEventArgs<AudioVideoCall> e) { _avCall = e.Call; try { // Accept the incoming call. _avCall.BeginAccept(ar => { try { _avCall.EndAccept(ar); _logger.Log("Accepted incoming call."); switch (_transferType) { case TransferTypeSelection.Attended: PerformAttendedTransfer(); break; case TransferTypeSelection.Unattended: PerformUnattendedTransfer(); break; case TransferTypeSelection.Supervised: PerformSupervisedTransfer(); break; } } catch (RealTimeException rtex) { _logger.Log("Failed accepting incoming A/V call.", rtex); } }, null); } catch (InvalidOperationException ioex) { _logger.Log("Failed accepting incoming A/V call.", ioex); } }
private void OnTransferReceived(object sender, AudioVideoCallTransferReceivedEventArgs e) { // Unregister the event handlers. _avCall.TransferReceived -= OnTransferReceived; _avCall.Forwarded -= OnCallForwarded; // Accept the REFER request with no special headers. e.Accept(null); // Create a new A/V call with the transfer-to URI using the // pre-initialized Conversation object. AudioVideoCall newCall = new AudioVideoCall(e.NewConversation); try { // Establish the call to the transfer-to endpoint. newCall.BeginEstablish(e.TransferDestination, null, ar => { try { newCall.EndEstablish(ar); } catch (RealTimeException rtex) { _logger.Log("Failed establishing new call following transfer.", rtex); } }, null ); } catch (InvalidOperationException ioex) { _logger.Log("Failed establishing new call following transfer.", ioex); } }
/// <summary> /// Checks whether we are joined to the conference as a trusted participant. /// </summary> /// <returns></returns> async Task EnsureTrustedParticipant() { if (trustedConversation == null || trustedConversation.ConferenceSession.ConferenceUri != conference.ConferenceUri) { // join conference as trusted participant trustedConversation = new Conversation(endpoint); await trustedConversation.ConferenceSession.JoinAsync(conference.ConferenceUri, new ConferenceJoinOptions() { JoinMode = JoinMode.TrustedParticipant }); trustedCall = new AudioVideoCall(trustedConversation); await trustedCall.EstablishAsync(); } }
public override Task<AcdActionResult> Execute(LocalEndpoint localEndpoint, AudioVideoCall call, CancellationToken cancellationToken) { return Action != null ? Action.Execute(localEndpoint, call, cancellationToken) : AcdActionResult.ContinueTask; }
/// <summary> /// Attempts to handle the call. Returns <c>true</c> if the call has been successfully handled. /// </summary> /// <param name="localEndpoint"></param> /// <param name="call"></param> /// <param name="cancellationToken"></param> /// <returns></returns> public abstract Task<AcdActionResult> Execute(LocalEndpoint localEndpoint, AudioVideoCall call, CancellationToken cancellationToken);
public static Task TransferAsync(this AudioVideoMcuSession mcuSession, AudioVideoCall call, McuTransferOptions options) { return Task.Factory.FromAsync(mcuSession.BeginTransfer, mcuSession.EndTransfer, call, options, null); }
public Location(int id, string name, string fileName, LocalEndpoint endpoint) { if (!File.Exists(fileName)) throw new FileNotFoundException(fileName); _id = id; _Name = name; _FileName = fileName; _Endpoint = endpoint; ConferenceScheduleInformation csi = new ConferenceScheduleInformation() { AccessLevel = ConferenceAccessLevel.Everyone, Description = _Name, ExpiryTime = DateTime.Now.AddYears(5), AutomaticLeaderAssignment = AutomaticLeaderAssignment.Everyone }; csi.Mcus.Add(new ConferenceMcuInformation(McuType.AudioVideo)); _Endpoint.ConferenceServices.BeginScheduleConference(csi, ar => { try { _conference = _Endpoint.ConferenceServices.EndScheduleConference(ar); Log("Conference " + _conference.ConferenceId + " scheduled. Starting music..."); Log(_conference.ConferenceUri); ConversationSettings cs = new ConversationSettings() { Subject = _Name }; _conversation = new Conversation(_Endpoint, cs); ConferenceJoinOptions cjo = new ConferenceJoinOptions() { JoinMode = JoinMode.TrustedParticipant }; _conversation.ConferenceSession.BeginJoin(_conference.ConferenceUri, cjo, ar1 => { try { _conversation.ConferenceSession.EndJoin(ar1); _avCall = new AudioVideoCall(_conversation); _avCall.AudioVideoFlowConfigurationRequested += new EventHandler<AudioVideoFlowConfigurationRequestedEventArgs>(_avCall_AudioVideoFlowConfigurationRequested); AudioVideoCallEstablishOptions options = new AudioVideoCallEstablishOptions() { UseGeneratedIdentityForTrustedConference = true, SupportsReplaces = CapabilitySupport.Supported }; _avCall.BeginEstablish( options, ar2 => { try { _avCall.EndEstablish(ar2); } catch (Exception ex) { Log(ex.ToString()); } }, null); } catch (Exception ex) { Log(ex.ToString()); } }, null); } catch (Exception ex) { Log(ex.ToString()); } }, null); }