//Handler for the InstantMessagingFlow.StateChanged event.
 void ImFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
 {
     if (e.State == MediaFlowState.Active)
     {
         ImFlow.BeginSendInstantMessage(messageText, SendMessageCompleted, ImFlow);
     }
 }
示例#2
0
 /// <summary>
 /// Handle the AudioVideoFlow state change event
 /// </summary>
 /// <param name="sender">sender</param>
 /// <param name="e">MediaFlowStateChangedEventArgs</param>
 private void Flow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
 {
     if (e.State == MediaFlowState.Active)
     {
         RunBrowser();
     }
 }
示例#3
0
        void Flow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            Log("ControlAVCall Flow_StateChanged PreviousState=" + e.PreviousState + " State=" + e.State);

            AudioVideoFlow avFlow = (AudioVideoFlow)sender;

            if (avFlow.State == MediaFlowState.Active)
            {
                SpeechRecognitionConnector speechRecognitionConnector = new SpeechRecognitionConnector();
                speechRecognitionConnector.AttachFlow(avFlow);

                SpeechRecognitionStream stream = speechRecognitionConnector.Start();

                _speechRecognitionEngine = new SpeechRecognitionEngine();
                _speechRecognitionEngine.SpeechRecognized     += new EventHandler <SpeechRecognizedEventArgs>(_speechRecognitionEngine_SpeechRecognized);
                _speechRecognitionEngine.LoadGrammarCompleted += new EventHandler <LoadGrammarCompletedEventArgs>(_speechRecognitionEngine_LoadGrammarCompleted);

                Choices pathChoice = new Choices(new string[] { "previous", "next" });
                Grammar gr         = new Grammar(new GrammarBuilder(pathChoice));
                _speechRecognitionEngine.LoadGrammarAsync(gr);

                SpeechAudioFormatInfo speechAudioFormatInfo = new SpeechAudioFormatInfo(8000, AudioBitsPerSample.Sixteen, Microsoft.Speech.AudioFormat.AudioChannel.Mono);
                _speechRecognitionEngine.SetInputToAudioStream(stream, speechAudioFormatInfo);
                _speechRecognitionEngine.RecognizeAsync(RecognizeMode.Multiple);
            }
            else
            {
                if (avFlow.SpeechRecognitionConnector != null)
                {
                    avFlow.SpeechRecognitionConnector.DetachFlow();
                }
            }
        }
        // Callback that handles when the state of an AudioVideoFlow changes
        private void AudioVideoFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            NonBlockingConsole.WriteLine("AV flow state changed from " + e.PreviousState + " to " + e.State);
            string messageText = "";

            //When flow is active, media operations can begin
            if (e.State == MediaFlowState.Active)
            {
                NonBlockingConsole.WriteLine("Starting speech recognition");
                messageText = "Starting speech recognition";

                // Flow-related media operations normally begin here.
                _waitForAudioVideoFlowStateChangedToActiveCompleted.Set();
                StartSpeechRecognition();
            }
            else if (e.State == MediaFlowState.Terminated)
            {
                NonBlockingConsole.WriteLine("Stopping speech recognition");
                messageText = "Stopping speech recognition";
                // Detach SpeechSynthesisConnector since AVFlow will not work anymore
                this.StopSpeechRecognition();
            }

            if (!String.IsNullOrEmpty(messageText) && (this._transcriptRecorder != null))
            {
                Conversation            conv    = _audioVideoFlow.Call.Conversation;
                ConversationParticipant speaker = _audioVideoFlow.Call.RemoteEndpoint.Participant;
                Message m = new Message(messageText, speaker.DisplayName, speaker.UserAtHost,
                                        speaker.Uri, DateTime.Now, conv.Id,
                                        conv.ConferenceSession.ConferenceUri, MessageType.Info, MessageDirection.Outgoing);
                this._transcriptRecorder.OnMessageReceived(m);
            }
        }
示例#5
0
        private void Flow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            if (e.State == MediaFlowState.Active)
            {   //deal with the first message
                ProcessMessage(_initialMessage);
            }

        }
示例#6
0
文件: AudioIVR.cs 项目: mujiansu/Lync
 // Callback that handles when the state of an AudioVideoFlow changes.
 private void AudioVideoFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
 {
     Console.WriteLine("Flow Handler Call state changed from " + e.PreviousState + " to " + e.State);
     if (e.State == MediaFlowState.Active)
     {
         waitForAudioVideoFlowStateChangedToActiveCompleted.Set();
     }
 }
示例#7
0
        private void audioVideoFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            Console.WriteLine("Flow state changed from " + e.PreviousState + " to " + e.State);

            //When flow is active, media operations can begin
            if (e.State == MediaFlowState.Active)
            {
                // Flow-related media operations normally begin here.
            }
        }
示例#8
0
        private void audioVideoFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            Console.WriteLine("Flow state changed from " + e.PreviousState + " to " + e.State);

            //When flow is active, media operations can begin.
            if (e.State == MediaFlowState.Active)
            {
                // Other samples demonstrate uses for an active flow.
            }
        }
示例#9
0
        private void instantMessagingFlow2_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            InstantMessagingFlow instantMessagingFlow = sender as InstantMessagingFlow;

            Console.WriteLine("Flow state changed from " + e.PreviousState + " to " + e.State);

            //When flow is active, media operations (here, sending an IM) may begin.
            if (e.State == MediaFlowState.Active)
            {
                _IMFlow2 = instantMessagingFlow;
            }
        }
示例#10
0
        private void InstantMessagingFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            Console.WriteLine("Flow state changed from " + e.PreviousState + " to " + e.State);

            // When flow is active, media operations (here, sending an IM)
            // may begin.
            if (e.State == MediaFlowState.Active)
            {
                // Send the message on the InstantMessagingFlow.
                _instantMessagingFlow.BeginSendInstantMessage(_messageToSend, SendMessageCompleted,
                                                              _instantMessagingFlow);
            }
        }
示例#11
0
 // Handles the StateChanged event on the audio-video flow.
 private void AudioVideoFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
 {
     // When the flow is active, media operations can begin.
     if (e.State == MediaFlowState.Terminated)
     {
         // Detach the speech recognition connector, because the state of the flow is now Terminated.
         AudioVideoFlow avFlow = (AudioVideoFlow)sender;
         if (avFlow.SpeechRecognitionConnector != null)
         {
             avFlow.SpeechRecognitionConnector.DetachFlow();
         }
     }
 }
示例#12
0
 // Callback that handles when the state of an AudioVideoFlow changes
 private void audioVideoFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
 {
     //When flow is active, media operations can begin
     if (e.State == MediaFlowState.Terminated)
     {
         // Detach Recorder since AVFlow will not work anymore
         AudioVideoFlow avFlow = (AudioVideoFlow)sender;
         if (avFlow.Recorder != null)
         {
             avFlow.Recorder.DetachFlow();
         }
     }
 }
 private void OnMediaStateChanged(object sender, MediaFlowStateChangedEventArgs args)
 {
     this.LockAndExecuteOotyCallback(delegate
     {
         this.DebugTrace("Inside BaseUMconnectivityTester MediaFlowStateChangedEventArgs, new state ={0}, previous state = {1}", new object[]
         {
             args.State,
             args.PreviousState
         });
         if (args.State == 1)
         {
             this.mediaEstablishedEvent.Set();
         }
     });
 }
示例#14
0
        void Flow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            Log("Flow_StateChanged PreviousState=" + e.PreviousState + " State=" + e.State);

            AudioVideoFlow avFlow = (AudioVideoFlow)sender;

            if (avFlow.State == MediaFlowState.Active)
            {
                Player player = new Player();
                player.StateChanged += new EventHandler <PlayerStateChangedEventArgs>(player_StateChanged);
                player.AttachFlow(avFlow);

                WmaFileSource src = new WmaFileSource(_FileName);
                src.BeginPrepareSource(MediaSourceOpenMode.Buffered,
                                       ar =>
                {
                    try
                    {
                        src.EndPrepareSource(ar);

                        player.SetSource(src);

                        // For some reason, PlayerMode.Automatic does not loop audio
                        player.SetMode(PlayerMode.Manual);
                        player.Start();

                        Log("Playing \"" + _FileName + "\"");
                    }
                    catch (Exception ex)
                    {
                        Log(ex.ToString());
                    }
                },
                                       null);
            }
            else if (avFlow.State == MediaFlowState.Terminated)
            {
                if (avFlow.Player != null)
                {
                    if (avFlow.Player.Source != null)
                    {
                        avFlow.Player.Source.Close();
                    }

                    avFlow.Player.DetachFlow(avFlow);
                }
            }
        }
示例#15
0
        private void instantMessagingFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            InstantMessagingFlow instantMessagingFlow = sender as InstantMessagingFlow;

            Console.WriteLine("Flow state changed from " + e.PreviousState + " to " + e.State);

            //When flow is active, media operations (here, sending an IM) may begin.
            if (e.State == MediaFlowState.Active)
            {
                _IMFlow = instantMessagingFlow;
                Console.WriteLine("Please type the message to send...");
                string msg = Console.ReadLine();
                //Send the message on the InstantMessagingFlow.
                instantMessagingFlow.BeginSendInstantMessage(msg, EndSendMessage, instantMessagingFlow);
            }
        }
示例#16
0
        // Callback that handles when the state of an AudioVideoFlow changes
        private void audioVideoFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            Console.WriteLine("Flow state changed from " + e.PreviousState + " to " + e.State);

            //When flow is active, media operations can begin
            if (e.State == MediaFlowState.Active)
            {
                // Flow-related media operations normally begin here.
                _waitForAudioVideoFlowStateChangedToActiveCompleted.Set();
            }

            // call sample event handler
            if (_audioVideoFlowStateChangedEventHandler != null)
            {
                _audioVideoFlowStateChangedEventHandler(sender, e);
            }
        }
示例#17
0
        private void Flow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            try
            {
                AudioVideoFlow flow = sender as AudioVideoFlow;

                if (e.State == MediaFlowState.Terminated)
                {
                    flow.SpeechSynthesisConnector.DetachFlow();
                }

                flow.StateChanged -= Flow_StateChanged;
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex);
            }
        }
示例#18
0
 private void Flow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
 {
     try
     {
         if (e.State == MediaFlowState.Terminated)
         {
             this.UnRegisterEvents();
             throw new InvalidOperationException("Call is terminated");
         }
     }
     catch (InvalidOperationException exception)
     {
         if (m_tcs != null)
         {
             m_tcs.TrySetException(exception);
         }
     }
 }
示例#19
0
        private void InstantMessagingFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            Console.WriteLine("Flow state changed from " + e.PreviousState + " to " + e.State);
            // When flow is active, media operations (here, sending an IM)
            // may begin.
            //xueming
            InstantMessagingFlow instantMessagingFlow = sender as InstantMessagingFlow;

            if (e.State == MediaFlowState.Active)
            {
                // Send the message on the InstantMessagingFlow.
                // _instantMessagingFlow.BeginSendInstantMessage(_messageToSend, SendMessageCompleted,
                //    _instantMessagingFlow);
                string str   = _messageToSend.ToString();
                byte[] bytes = Encoding.UTF8.GetBytes(str);
                instantMessagingFlow.BeginSendInstantMessage(new ContentType("text/html"), bytes, SendMessageCompleted,
                                                             instantMessagingFlow);
            }
        }
示例#20
0
        /// <summary>
        /// Im flow state changed event handler.
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="e"></param>
        private void ImFlowStateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            if (e.State == MediaFlowState.Active)
            {
                InstantMessagingFlow imFlow = sender as InstantMessagingFlow;

                MimePartContentDescription package = null;
                // Get the top level menu.
                if (this.menu.Level == MenuLevel.None)
                {
                    package = this.menu.HandleUserInput(" ");
                }
                else
                {
                    package = this.menu.GetMessage();
                }
                if (package != null)
                {
                    this.SendStatusMessageToCustomer(package.ContentType, package.GetBody());
                }
            }
        }
示例#21
0
        // Callback that handles when the state of an AudioVideoFlow changes
        private void AudioVideoFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            NonBlockingConsole.WriteLine("AV flow state changed from " + e.PreviousState + " to " + e.State);

            if (_transcriptRecorder != null)
            {
                Message m = new Message("AudioVideoFlow changed from " + e.PreviousState + " to " + e.State + ".",
                                        MessageType.Audio,
                                        _transcriptRecorder.Conversation.Id);
                _transcriptRecorder.OnMessageReceived(m);
            }

            //When flow is active, media operations can begin
            if (e.State == MediaFlowState.Active)
            {
                // Flow-related media operations normally begin here.
                _waitForAudioVideoFlowStateChangedToActiveCompleted.Set();

                if ((_speechRecognizer != null) && !_speechRecognizer.IsActive)
                {
                    _speechRecognizer.AttachAndStartSpeechRecognition(_audioVideoFlow);
                }
            }
            else if (e.State == MediaFlowState.Terminated)
            {
                if ((_speechRecognizer != null) && _speechRecognizer.IsActive)
                {
                    _speechRecognizer.StopSpeechRecognition();
                }
            }

            // call top level event handler
            if (_audioVideoFlowStateChangedEventHandler != null)
            {
                _audioVideoFlowStateChangedEventHandler(sender, e);
            }
        }
示例#22
0
        private void InstantMessagingFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            NonBlockingConsole.WriteLine("IM flow state changed from " + e.PreviousState + " to " + e.State);

            Message m = new Message("InstantMessagingFlow changed from " + e.PreviousState + " to " + e.State + ".",
                                    MessageType.InstantMessage,
                                    _transcriptRecorder.Conversation.Id);

            _transcriptRecorder.OnMessageReceived(m);

            // When flow is active, media operations (here, sending an IM)
            // may begin.
            if (e.State == MediaFlowState.Active)
            {
                // When flow is active, media operations can begin
                _waitForIMFlowStateChangedToActiveCompleted.Set();
            }

            // call top level event handler
            if (_imFlowStateChangedEventHandler != null)
            {
                _imFlowStateChangedEventHandler(sender, e);
            }
        }
        private void InstantMessagingFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            NonBlockingConsole.WriteLine("IM flow state changed from " + e.PreviousState + " to " + e.State);

            Message m = new Message("InstantMessagingFlow changed from " + e.PreviousState + " to " + e.State + ".",
                MessageType.InstantMessage,
                _transcriptRecorder.Conversation.Id);
            _transcriptRecorder.OnMessageReceived(m);

            // When flow is active, media operations (here, sending an IM) 
            // may begin.
            if (e.State == MediaFlowState.Active)
            {
                // When flow is active, media operations can begin
                _waitForIMFlowStateChangedToActiveCompleted.Set();
            }

            // call top level event handler
            if (_imFlowStateChangedEventHandler != null)
            {
                _imFlowStateChangedEventHandler(sender, e);
            }
        }
示例#24
0
        void Flow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            Log("Flow_StateChanged PreviousState=" + e.PreviousState + " State=" + e.State);

            AudioVideoFlow avFlow = (AudioVideoFlow)sender;

            if (avFlow.State == MediaFlowState.Active)
            {
                Player player = new Player();
                player.StateChanged += new EventHandler<PlayerStateChangedEventArgs>(player_StateChanged);
                player.AttachFlow(avFlow);

                WmaFileSource src = new WmaFileSource(_FileName);
                src.BeginPrepareSource(MediaSourceOpenMode.Buffered,
                    ar=>
                    {
                        try
                        {
                            src.EndPrepareSource(ar);

                            player.SetSource(src);

                            // For some reason, PlayerMode.Automatic does not loop audio
                            player.SetMode(PlayerMode.Manual);
                            player.Start();

                            Log("Playing \"" + _FileName + "\"");
                        }
                        catch (Exception ex)
                        {
                            Log(ex.ToString());
                        }
                    },
                    null);
            }
            else if (avFlow.State == MediaFlowState.Terminated)
            {
                if (avFlow.Player != null)
                {
                    if (avFlow.Player.Source != null)
                    {
                        avFlow.Player.Source.Close();
                    }

                    avFlow.Player.DetachFlow(avFlow);
                }
            }
        }
        // Callback that handles when the state of an AudioVideoFlow changes
        private void AudioVideoFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            NonBlockingConsole.WriteLine("AV flow state changed from " + e.PreviousState + " to " + e.State);

            if (_transcriptRecorder != null)
            {
                Message m = new Message("AudioVideoFlow changed from " + e.PreviousState + " to " + e.State + ".",
                    MessageType.Audio,
                    _transcriptRecorder.Conversation.Id);
                _transcriptRecorder.OnMessageReceived(m);
            }

            //When flow is active, media operations can begin
            if (e.State == MediaFlowState.Active)
            {
                // Flow-related media operations normally begin here.
                _waitForAudioVideoFlowStateChangedToActiveCompleted.Set();

                if ((_speechRecognizer != null) && !_speechRecognizer.IsActive)
                {
                    _speechRecognizer.AttachAndStartSpeechRecognition(_audioVideoFlow);
                }
            }
            else if (e.State == MediaFlowState.Terminated)
            {
                if ((_speechRecognizer != null) && _speechRecognizer.IsActive)
                {
                    _speechRecognizer.StopSpeechRecognition();
                }
            }

            // call top level event handler
            if (_audioVideoFlowStateChangedEventHandler != null)
            {
                _audioVideoFlowStateChangedEventHandler(sender, e);
            }
        }
        // Callback that handles when the state of an AudioVideoFlow changes
        private void AudioVideoFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            NonBlockingConsole.WriteLine("AV flow state changed from " + e.PreviousState + " to " + e.State);
            string messageText = "";

            //When flow is active, media operations can begin
            if (e.State == MediaFlowState.Active)
            {
                NonBlockingConsole.WriteLine("Starting speech recognition");
                messageText = "Starting speech recognition";

                // Flow-related media operations normally begin here.
                _waitForAudioVideoFlowStateChangedToActiveCompleted.Set();
                StartSpeechRecognition();
            }
            else if (e.State == MediaFlowState.Terminated)
            {
                NonBlockingConsole.WriteLine("Stopping speech recognition");
                messageText = "Stopping speech recognition";
                // Detach SpeechSynthesisConnector since AVFlow will not work anymore
                this.StopSpeechRecognition();
            }

            if (!String.IsNullOrEmpty(messageText) && (this._transcriptRecorder != null))
            {
                Conversation conv = _audioVideoFlow.Call.Conversation;
                ConversationParticipant speaker = _audioVideoFlow.Call.RemoteEndpoint.Participant;
                Message m = new Message(messageText, speaker.DisplayName, speaker.UserAtHost,
                    speaker.Uri, DateTime.Now, conv.Id,
                    conv.ConferenceSession.ConferenceUri, MessageType.Info, MessageDirection.Outgoing);
                this._transcriptRecorder.OnMessageReceived(m);
            }
        }
 // Handler for the StateChanged event on an AudioVideoFlow instance.
 private void audioVideoFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
 {
     // When the flow is active, media operations can begin.
     Console.WriteLine("Previous flow state: " + e.PreviousState.ToString() + "\nNew flow state: " + e.State.ToString());
 }
示例#28
0
        void Flow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            Log("ControlAVCall Flow_StateChanged PreviousState=" + e.PreviousState + " State=" + e.State);

            AudioVideoFlow avFlow = (AudioVideoFlow)sender;

            if (avFlow.State == MediaFlowState.Active)
            {
                SpeechRecognitionConnector speechRecognitionConnector = new SpeechRecognitionConnector();
                speechRecognitionConnector.AttachFlow(avFlow);

                SpeechRecognitionStream stream = speechRecognitionConnector.Start();

                _speechRecognitionEngine = new SpeechRecognitionEngine();
                _speechRecognitionEngine.SpeechRecognized += new EventHandler<SpeechRecognizedEventArgs>(_speechRecognitionEngine_SpeechRecognized);
                _speechRecognitionEngine.LoadGrammarCompleted += new EventHandler<LoadGrammarCompletedEventArgs>(_speechRecognitionEngine_LoadGrammarCompleted);

                Choices pathChoice = new Choices(new string[] { "previous", "next" });
                Grammar gr = new Grammar(new GrammarBuilder(pathChoice));
                _speechRecognitionEngine.LoadGrammarAsync(gr);

                SpeechAudioFormatInfo speechAudioFormatInfo = new SpeechAudioFormatInfo(8000, AudioBitsPerSample.Sixteen, Microsoft.Speech.AudioFormat.AudioChannel.Mono);
                _speechRecognitionEngine.SetInputToAudioStream(stream, speechAudioFormatInfo);
                _speechRecognitionEngine.RecognizeAsync(RecognizeMode.Multiple);
            }
            else
            {
                if (avFlow.SpeechRecognitionConnector != null)
                {
                    avFlow.SpeechRecognitionConnector.DetachFlow();
                }
            }
        }