public void Run() { // Create and establish the endpoint, using the credentials of the user the application will be acting as. _helper = new UCMASampleHelper(); _userEndpoint = _helper.CreateEstablishedUserEndpoint("VoiceXML Sample User" /*endpointFriendlyName*/); _userEndpoint.RegisterForIncomingCall <AudioVideoCall>(inboundAVCall_CallReceived); // Pause the main thread until a call is received and then accepted. _waitForCallReceived.WaitOne(); _waitForCallAccepted.WaitOne(); InitializeVoiceXmlBrowser(); _voiceXmlBrowser.SetAudioVideoCall(_audioVideoCall); Uri startPageURI = new Uri(startPageURL); Console.WriteLine("Browser state: " + _voiceXmlBrowser.State.ToString()); _voiceXmlBrowser.RunAsync(startPageURI, null); _waitForSessionCompleted.WaitOne(); _collabPlatform = _conversation.Endpoint.Platform; // Terminate the call. _audioVideoCall.BeginTerminate(CallTerminateCB, _audioVideoCall); _waitForPlatformShutdownCompleted.WaitOne(); // Pause the console to allow the user to view logs. Console.WriteLine("Press any key to end the sample."); Console.ReadKey(); }
/// <summary> /// Terminate av call. /// </summary> private void TerminateAudioVideoCall() { bool exceptionEncountered = true; try { AudioVideoCall avCall = null; if (m_webConversation.WebAvCall != null) { avCall = m_webConversation.WebAvCall.AvCall; } if (avCall != null) { avCall.BeginTerminate(this.AudioVideoCallTerminated, avCall); } else { //Go to next step of terminating b2b call. this.TerminateBackToBackCall(); } exceptionEncountered = false; } finally { if (exceptionEncountered) { OperationFault operationFault = FaultHelper.CreateServerOperationFault(FailureStrings.GenericFailures.UnexpectedException, null /*innerException*/); this.CompleteTerminateConversationOperationWithException(new FaultException <OperationFault>(operationFault)); } } }
public void Run() { // A helper class to take care of platform and endpoint setup and cleanup. _helper = new UCMASampleHelper(); // Create a user endpoint using the network credential object. _userEndpoint = _helper.CreateEstablishedUserEndpoint("Broadcast User"); // Register a delegate to be called when an incoming audio-video call arrives. _userEndpoint.RegisterForIncomingCall <AudioVideoCall>(AudioVideoCall_Received); // Wait for the incoming call to be accepted. Console.WriteLine("Waiting for incoming call..."); _waitForCallToBeAccepted.WaitOne(); // Create a speech recognition connector and attach an AudioVideoFlow to it. SpeechRecognitionConnector speechRecognitionConnector = new SpeechRecognitionConnector(); speechRecognitionConnector.AttachFlow(_audioVideoFlow); // Start the speech recognition connector. SpeechRecognitionStream stream = speechRecognitionConnector.Start(); // Create a speech recognition engine. SpeechRecognitionEngine speechRecognitionEngine = new SpeechRecognitionEngine(); speechRecognitionEngine.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(SpeechRecognitionEngine_SpeechRecognized); //Add a grammar. string[] recoString = { "buy", "sell", "Fabrikam", "Contoso", "maximum", "minimum", "one", "ten", "twenty", "send" }; Choices choices = new Choices(recoString); speechRecognitionEngine.LoadGrammar(new Grammar(new GrammarBuilder(choices))); //Attach to audio stream to the SR engine. SpeechAudioFormatInfo speechAudioFormatInfo = new SpeechAudioFormatInfo(8000, AudioBitsPerSample.Sixteen, Microsoft.Speech.AudioFormat.AudioChannel.Mono); speechRecognitionEngine.SetInputToAudioStream(stream, speechAudioFormatInfo); Console.WriteLine("\r\nGrammar loaded, say send to send IM."); //Prepare the SR engine to perform multiple asynchronous recognitions. speechRecognitionEngine.RecognizeAsync(RecognizeMode.Multiple); //Pause the main thread until recognition completes. _waitForConnectorToStop.WaitOne(); speechRecognitionConnector.Stop(); Console.WriteLine("connector stopped"); // Detach the flow from the speech recognition connector, to prevent the flow from being kept in memory. speechRecognitionConnector.DetachFlow(); // Terminate the call, the conversation, and then unregister the // endpoint from receiving an incoming call. _audioVideoCall.BeginTerminate(CallTerminateCB, _audioVideoCall); _waitForConversationToBeTerminated.WaitOne(); // Shut down the platform. _helper.ShutdownPlatform(); }
public void Run() { //Initialize and register the endpoint, using the credentials of the user the application will be acting as. _helper = new UCMASampleHelper(); _userEndpoint = _helper.CreateEstablishedUserEndpoint("AVCall Sample User" /*endpointFriendlyName*/); //Set up the conversation and place the call. ConversationSettings convSettings = new ConversationSettings(); convSettings.Priority = _conversationPriority; convSettings.Subject = _conversationSubject; //Conversation represents a collection of modalities in the context of a dialog with one or multiple callees. Conversation conversation = new Conversation(_userEndpoint, convSettings); _audioVideoCall = new AudioVideoCall(conversation); //Call: StateChanged: Only hooked up for logging. _audioVideoCall.StateChanged += new EventHandler <CallStateChangedEventArgs>(audioVideoCall_StateChanged); //Subscribe for the flow configuration requested event; the flow will be used to send the media. //Ultimately, as a part of the callback, the media will be sent/received. _audioVideoCall.AudioVideoFlowConfigurationRequested += this.audioVideoCall_FlowConfigurationRequested; // Prompt for called party _calledParty = UCMASampleHelper.PromptUser("Enter the URI for the user logged onto Microsoft Lync, in the sip:User@Host format or tel:+1XXXYYYZZZZ format => ", "RemoteUserURI"); //Place the call to the remote party. _audioVideoCall.BeginEstablish(_calledParty, null, EndCallEstablish, _audioVideoCall); //Sync; wait for the call to complete. Console.WriteLine("Calling the remote user..."); _waitForCallToEstablish.WaitOne(); // Terminate the call, and then the conversation. // Terminating these additional objects individually is made redundant by shutting down the platform right after, but in the multiple call case, // this is needed for object hygene. Terminating a Conversation terminates all it's associated calls, and terminating an endpoint will terminate // all conversations on that endpoint. _audioVideoCall.BeginTerminate(EndTerminateCall, _audioVideoCall); Console.WriteLine("Waiting for the call to get terminated..."); _waitForCallToTerminate.WaitOne(); _audioVideoCall.Conversation.BeginTerminate(EndTerminateConversation, _audioVideoCall.Conversation); Console.WriteLine("Waiting for the conversation to get terminated..."); _waitForConversationToTerminate.WaitOne(); //Now, cleanup by shutting down the platform. Console.WriteLine("Shutting down the platform..."); _helper.ShutdownPlatform(); // Pause the console to allow for easier viewing of logs. Console.WriteLine("Please hit any key to end the sample."); Console.ReadKey(); }
// Handler for the StateChanged event on the inbound call. void inboundAVCall_StateChanged(object sender, CallStateChangedEventArgs e) { Console.WriteLine("Inbound call - state change.\nPrevious state: " + e.PreviousState + "\nCurrent state: " + e.State + "\nTransitionReason: " + e.TransitionReason + "\n"); if (e.TransitionReason == CallStateTransitionReason.TerminatedRemotely) { // If one call has been terminated remotely, unregister for // notification of the StateChanged event. _outboundAVCall.StateChanged -= outboundAVCall_StateChanged; _inboundAVCall.StateChanged -= inboundAVCall_StateChanged; _inboundAVCall.BeginTerminate(TerminateCallCB, _inboundAVCall); if (_outboundAVCall != null) { Console.WriteLine("Terminating the inbound call..."); _outboundAVCall.BeginTerminate(TerminateCallCB, _outboundAVCall); } FinishShutdown(); } }
private void TerminateCall() { this.logger.Log("Call terminating for user {0}", audioVideoCall.OriginalDestinationUri); try { audioVideoCall.EndTerminate( audioVideoCall.BeginTerminate(result => { }, null) ); } catch { } }
/// <summary> /// Indicates what to do when a new attendee is detected and when an /// attendee departs. /// </summary> /// <param name="sender">The Av Mcu session raising the event.</param> /// <param name="e">The AV Mcu Participant endpoint attendance changed /// event arguments object.</param> private void AudioVideoMcuSession_ParticipantEndpointAttendanceChanged(object sender, ParticipantEndpointAttendanceChangedEventArgs <AudioVideoMcuParticipantEndpointProperties> e) { foreach (var joiningParticipant in e.Joined) { var joiningParticipantEndpoint = joiningParticipant.Key; // If this participant is hidden on the roster, and therefore a // trusted application, move onto the next joining participant. if (joiningParticipantEndpoint.Participant.RosterVisibility == ConferencingRosterVisibility.Hidden) { continue; } if (!_trustedParticipantCalls.ContainsKey(joiningParticipant.Key.Uri)) { Console.WriteLine("Detected a new participant on the AVMCU Session with the displayname " + "{0}.", joiningParticipant.Key.Participant.DisplayName); EstablishAvCallAndAudioRouteForNewAttendee(joiningParticipant.Key); } } foreach (var departingParticipant in e.Left) { if (_trustedParticipantCalls.ContainsKey(departingParticipant.Key.Uri)) { Console.WriteLine("Detected a departing participant on the AVMCU Session with the " + "displayname {0}.", departingParticipant.Key.Participant.DisplayName); // Terminate the call that the trusted app has listening for // DTMF from the user. AudioVideoCall departingParticipantAvCall = _trustedParticipantCalls[departingParticipant .Key.Uri]; if (CallState.Terminating != departingParticipantAvCall.State && CallState.Terminated != departingParticipantAvCall.State) { departingParticipantAvCall.BeginTerminate(CallTerminationCompleted, departingParticipantAvCall); } // Remove the call from the collection. _trustedParticipantCalls.Remove(departingParticipant.Key.Uri); _trustedParticipantCallIDToParticipantUriStore.Remove(departingParticipantAvCall.CallId); } } }
/// <summary> /// Method to clean up callback call. /// </summary> private void CleanupCallbackCallIfNeeded() { //In exception cases terminate temporary callback av call. AudioVideoCall callbackCall = m_callbackCall; if (callbackCall != null) { callbackCall.BeginTerminate( delegate(IAsyncResult result) { callbackCall.EndTerminate(result); }, callbackCall); } }
/// <summary> /// Clean-up the call's EventHandlers /// </summary> private void CleanupCall() { lock (callCleanupLock) { if (currentCall != null) { if (currentCall.Flow != null) { currentCall.Flow.StateChanged -= new EventHandler <MediaFlowStateChangedEventArgs>(Flow_StateChanged); } currentCall.StateChanged -= new EventHandler <CallStateChangedEventArgs>(HandleCallStateChanged); currentCall.AudioVideoFlowConfigurationRequested -= new EventHandler <AudioVideoFlowConfigurationRequestedEventArgs>(Call_AudioVideoFlowConfigurationRequested); if (currentCall.State == CallState.Established) { currentCall.EndTerminate(currentCall.BeginTerminate(null, null)); } currentCall = null; } } }
/// <summary> /// Terminates the service channel. /// </summary> /// <param name="task">The task for this operation.</param> private void TerminateServiceChannelCall(AsyncTask task, object state) { Exception exception = null; try { Logger.Log(Logger.LogLevel.Info, "Terminating service channel call."); m_serviceChannelCall.BeginTerminate( ar => { Exception ex = null; try { m_serviceChannelCall.EndTerminate(ar); Logger.Log(Logger.LogLevel.Info, "Terminated service channel call."); } catch (RealTimeException rte) { ex = rte; } finally { task.Complete(ex); } }, null); } catch (InvalidOperationException ioe) { exception = ioe; } finally { if (exception != null) { task.Complete(exception); } } }
public void TerminateCall() { if (_speechRecognizer != null) { _speechRecognizer.StopSpeechRecognition(); } if (_audioVideoFlow != null) { _audioVideoFlow.StateChanged -= AudioVideoFlow_StateChanged; _audioVideoFlow = null; } if (_audioVideoCall != null) { _audioVideoCall.BeginTerminate(AudioVideoCallTerminated, _audioVideoCall); _audioVideoCall.Flow.StateChanged -= AudioVideoFlow_StateChanged; _audioVideoCall.StateChanged -= AudioVideoCall_StateChanged; _audioVideoCall.AudioVideoFlowConfigurationRequested -= AudioVideoCall_FlowConfigurationRequested; _audioVideoCall.ConversationChanged -= AudioVideoCall_ConversationChanged; _audioVideoCall = null; } else { _waitForAudioVideoCallTerminated.Set(); } if (_subConversation != null) { _transcriptRecorder.OnSubConversationRemoved(_subConversation, this); _subConversation = null; } _waitForAudioVideoCallAccepted.Reset(); _waitForAudioVideoFlowStateChangedToActiveCompleted.Reset(); }