private void Conversation_modalityChanged(object sender, ModalityStateChangedEventArgs e) { var modality = sender as Microsoft.Lync.Model.Conversation.AudioVideo.AVModality; var conversationID = modality.Conversation.Properties[ConversationProperty.Id].ToString(); //Console.WriteLine("Modality Changed ID:" + conversationID + ": Name :" + e.NewState.ToString()); if (modality.State == ModalityState.Connected) { Console.WriteLine("Conversation ID : " + conversationID + " : Is now an Audio Call"); ActiveAudioCalls.Add(conversationID); bool external = false; foreach (var participant in modality.Conversation.Participants) { var x = participant.Contact.Uri; if (x.Substring(0, 3) == "sip") { } else { external = true; } } if (external) { ActiveExternalAudioCalls.Add(conversationID); } } else if (modality.State == ModalityState.Disconnected) { updateCallStatuses(); } ExternalStatusChanged(); }
void MainWindow_ModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { if (e.NewState == ModalityState.Connected) { MessageBox.Show("IM Modality Connected"); } }
/// <summary> /// Handles event raised when IM modality state changes. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void myInstantMessageModality_ModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { switch (e.NewState) { case ModalityState.Connected: if ((Modality)sender == _Conversation.Modalities[ModalityTypes.InstantMessage]) { this.Dispatcher.Invoke(FormActor, new object[] { FormActions.UpdateLabel, ActivityText_Label, "Modality is " + e.NewState.ToString() }); } break; case ModalityState.ConnectingToCaller: break; case ModalityState.Disconnected: break; case ModalityState.Joining: if ((Modality)sender == _Conversation.Modalities[ModalityTypes.InstantMessage]) { this.Dispatcher.Invoke(FormActor, new object[] { FormActions.UpdateLabel, ActivityText_Label, "Modality is " + e.NewState.ToString() }); } break; } }
void sharingModality_ModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { if (e.NewState == ModalityState.Connected) { // We show all the highlighters when the application sharing modality is connected foreach (Highlighter hl in _appsharingHighlighters) { this.Dispatcher.BeginInvoke(new Action(() => { hl.Highlight(); hl.Show(); }), null); } } else if (e.NewState == ModalityState.Disconnected) { // We hide all the highlighters when the application sharing modality is connected foreach (Highlighter hl in _appsharingHighlighters) { this.Dispatcher.Invoke(new Action(() => { hl.Hide(); hl.Close(); }), null); } } }
//void ConversationManager_ConversationAdded(object sender, Microsoft.Lync.Model.Conversation.ConversationManagerEventArgs e) //{ // e.Conversation.Modalities[ModalityTypes.InstantMessage].ModalityStateChanged += IMModalityStateChanged; //} void IMModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { if (e.NewState == ModalityState.Connected) { MessageBox.Show("IM Modality Connected"); } }
private static void AVModality_ModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { if (e.NewState == ModalityState.Connected) { // We can't start video until it's connected var vc = ((AVModality)sender).VideoChannel; vc.StateChanged += VideoChannel_StateChanged; } }
//***************************************************************************************** // Modality event handling //***************************************************************************************** void avModality_ModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { //posts the execution into the UI thread this.BeginInvoke(new MethodInvoker(delegate() { //updates the status bar with the video channel state toolStripStatusLabelModality.Text = e.NewState.ToString(); })); }
static void Program_ModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { //in this case, any state change will be from Disconnected and will therefore indicate some A/V activity var modality = sender as Microsoft.Lync.Model.Conversation.AudioVideo.AVModality; if (modality == null || FindContainerOf(modality.Conversation) != null) { return; } StoreConversation(modality.Conversation); modality.ModalityStateChanged -= Program_ModalityStateChanged; }
/// <summary> /// Handles the even raised when the state of an application sharing modality changes. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void _sharingModality_ModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { //Modality will be connected for each particpant whethere they have accepted the shariing invite or not. ApplicationSharingModality thisModality = (ApplicationSharingModality)sender; if (e.NewState == ModalityState.Connected) { this.Invoke(new AddAContactDelegate(AddAContact), new object[] { thisModality.Participant.Contact.Uri }); this.Invoke(new ChangeLabelTextDelegate(ChangeLabelText), new object[] { ContactList_Label, "5) Pick a participant" }); } }
static void applicationSharingModality_ModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { try { } catch (Exception ex) { LogManage.WriteLog(typeof(LyncHelper), ex); } finally { } }
static void Program_ModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { //in this case, any state change will be from Disconnected and will therefore indicate some A/V activity var modality = sender as Microsoft.Lync.Model.Conversation.AudioVideo.AVModality; string ConversationID = modality.Conversation.Properties[ConversationProperty.Id].ToString(); if (!ActiveConversations.ContainsKey(ConversationID)) { StoreConversation(modality.Conversation, ConversationID); modality.ModalityStateChanged -= Program_ModalityStateChanged; } }
static void AVModality_ModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { VideoChannel vc = null; switch (e.NewState) { //we can't start video until it's connected case ModalityState.Connected: if (vc == null) { vc = ((AVModality)sender).VideoChannel; vc.StateChanged += new EventHandler<ChannelStateChangedEventArgs>(VideoChannel_StateChanged); } break; } }
static void avModality_ModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { if (e.NewState == ModalityState.Connected) { var videoChannel = ((AVModality)_conversation.Modalities[ModalityTypes.AudioVideo]).VideoChannel; videoChannel.StateChanged += VideoChannel_StateChanged; //TODO: race condition needs sorting out System.Threading.Thread.Sleep(1000 * 5); if (videoChannel.CanInvoke(ChannelAction.Start)) { videoChannel.BeginStart(videoChannelEndStart, videoChannel); } } }
private void av_ModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { var modality = sender as AVModality; if (modality == null) { return; } var participants = modality.Conversation.Participants; var name = (string)modality.Participant.Contact.GetContactInformation(ContactInformationType.DisplayName); var convKey = calculateKey(participants); var avStatus = e.NewState.ToString(); update_Conversation(convKey, name, avStatus); }
private static void avModality_ModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { try { if (e.NewState == ModalityState.Connected) { var videoChannel = ((AVModality)_conversation.Modalities[ModalityTypes.AudioVideo]).VideoChannel; //wire up to state changes to control the UI display of the control. //wire up to action availability events to know when the channel is ready to be started. videoChannel.StateChanged += VideoChannel_StateChanged; videoChannel.ActionAvailabilityChanged += videoChannel_ActionAvailabilityChanged; } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex); } }
private void AvModality_ModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { Console.WriteLine($"****modality state changed from {e.OldState} to {e.NewState} ****"); switch (e.NewState) { case ModalityState.Connected: Console.WriteLine("Call started"); //stop audio recording. var audioProperties = _currentConversation.Modalities[ModalityTypes.AudioVideo].Properties; var device = audioProperties[ModalityProperty.AVModalityAudioRenderDevice]; _recordingThread = new Thread(() => { _audioRecorder.Record(device.ToString()); }); _recordingThread.Start(); break; case ModalityState.Disconnected: Console.WriteLine("Call stopped"); //start audio recording. _audioRecorder.Stop(); break; } }
private void OnAvModalityModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { try { _log.Debug("OnAvModalityModalityStateChanged NewState:{0}", e.NewState.ToString()); if (e.NewState == ModalityState.Connected) { var videoChannel = ((AVModality)Conversation.Modalities[ModalityTypes.AudioVideo]).VideoChannel; //wire up to state changes to control the UI display of the control. //wire up to action availability events to know when the channel is ready to be started. //videoChannel.StateChanged += OnVideoChannelStateChanged; //videoChannel.ActionAvailabilityChanged += OnVideoChannelActionAvailabilityChanged; } } catch (Exception ex) { _log.ErrorException("", ex); } }
void OnSharingModalityModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { _log.Debug("OnSharingModalityModalityStateChanged ModalityState:{0}", e.NewState.ToString()); //Modality will be connected for each particpant whethere they have accepted the sharing invite or not. RunAtUI(() => { ApplicationSharingModality thisModality = sender as ApplicationSharingModality; if (e.NewState == ModalityState.Connected) { //ShowStage_Button //If the local user is not resource sharer, then dock the view to see //the resource shared by a remote user if (thisModality.View != null) { SharingView = thisModality.View; if (ShowApplicationSharingView != null) { ShowApplicationSharingView(SharingView); } // this.Invoke(new ChangeButtonTextDelegate(ChangeButtonText), new object[] { Disconnect_Button, "Hide sharing stage" }); } else { // this.Invoke(new ChangeButtonTextDelegate(ChangeButtonText), new object[] { Disconnect_Button, "Stop sharing" }); } } if (e.NewState == ModalityState.Disconnected) { if (thisModality == Conversation.Modalities[ModalityTypes.ApplicationSharing]) { } if (HideApplicationSharingView != null) { HideApplicationSharingView(); } } }); }
private void Modality_ModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { Conversation conversation = ((Modality)sender).Conversation; // Audio call connected, this is the active call if (e.NewState == ModalityState.Connected) { latestCall = conversation; latestCall.SelfParticipant.IsMutedChanged += Participant_IsMutedChanged; OnCallStateChanged(new CallStateEventArgs(ActiveCallName)); } // Just disconnected or put the active audio call on hold else if (e.OldState == ModalityState.Connected) { if (latestCall == conversation) { latestCall = null; } conversation.SelfParticipant.IsMutedChanged -= Participant_IsMutedChanged; OnCallStateChanged(new CallStateEventArgs(ActiveCallName)); } }
/// <summary> /// Handles the ModalityStateChanged event of the avModality control. /// </summary> /// <param name="sender">The source of the event.</param> /// <param name="e">The <see cref="Microsoft.Lync.Model.Conversation.ModalityStateChangedEventArgs"/> instance containing the event data.</param> private void AudioVideoModality_ModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { }
private void OnParticipantModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { }
void AVModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { Console.WriteLine("AVModalityStateChanged"); SendState(); }
/// <summary> /// log to fileLog the date of the start and end of a call /// (ModalityStateChanged callback) /// </summary> /// <param name="e"></param> /// <param name="fileLog"></param> static void callImModality_ModalityStateChanged(ModalityStateChangedEventArgs e, String fileLog) { //write log only on connection or disconnection if (e.NewState == ModalityState.Connected || e.NewState == ModalityState.Disconnected) { //write start/end info to log using (FileStream stream = File.Open(fileLog, FileMode.Append, FileAccess.Write, FileShare.ReadWrite)) { using (StreamWriter writer = new StreamWriter(stream)) { writer.WriteLine(String.Format(LOG_AUDIO, (e.NewState == ModalityState.Connected) ? "started" : "ended", DateTime.Now.ToString("HH:mm:ss") )); } } } //record conversation if (e.NewState == ModalityState.Connected) // || e.NewState == ModalityState.Connecting) { _log.Info("Start recording to " + fileLog); AudioLogger.Instance.Start(fileLog); } //end recording if (e.NewState == ModalityState.Disconnected) { AudioLogger.Instance.Stop(); } }
static void avModality_ModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { if (e.NewState == ModalityState.Connected) { var videoChannel = ((AVModality)_conversation.Modalities[ModalityTypes.AudioVideo]).VideoChannel; videoChannel.StateChanged += VideoChannel_StateChanged; //TODO: race condition needs sorting out System.Threading.Thread.Sleep(1000 * 5); if (videoChannel.CanInvoke(ChannelAction.Start)) videoChannel.BeginStart(videoChannelEndStart, videoChannel); } }
private void AvModalityModalityStateChanged(object sender, ModalityStateChangedEventArgs e) { this.SetModalityStatus(e.NewState.ToString()); }