Пример #1
0
        public void SpeakMessage(AudioVideoFlow flow, string message)
        {
            try
            {
                SpeechSynthesizer synth = new SpeechSynthesizer();
                SpeechAudioFormatInfo formatInfo = new SpeechAudioFormatInfo(16000, AudioBitsPerSample.Sixteen, Microsoft.Speech.AudioFormat.AudioChannel.Mono);
                SpeechSynthesisConnector connector = new SpeechSynthesisConnector();

                synth.SetOutputToAudioStream(connector.Stream, formatInfo);

                connector.AttachFlow(flow);
                connector.Start();

                synth.SpeakCompleted += new EventHandler<SpeakCompletedEventArgs>(
                    (sender, args) =>
                    {
                        connector.Stop();
                        synth.Dispose();
                    });

                synth.SpeakAsync(message);
            }
            catch (Exception ex)
            {
                Console.WriteLine("Failed to play the message. {0}", ex);
            }

        }
Пример #2
0
 // Callback that handles when the state of an AudioVideoFlow changes
 private void audioVideoFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
 {
     //When flow is active, media operations can begin
     if (e.State == MediaFlowState.Terminated)
     {
         // Detach SpeechRecognitionConnector since AVFlow will not work anymore
         AudioVideoFlow avFlow = (AudioVideoFlow)sender;
         if (avFlow.SpeechSynthesisConnector != null)
         {
             avFlow.SpeechSynthesisConnector.DetachFlow();
         }
     }
 }
Пример #3
0
 // Callback that handles when the state of an AudioVideoFlow changes
 private void audioVideoFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
 {
     //When flow is active, media operations can begin
     if (e.State == MediaFlowState.Terminated)
     {
         // Detach Player since AVFlow will not work anymore
         AudioVideoFlow avFlow = (AudioVideoFlow)sender;
         if (avFlow.Player != null)
         {
             avFlow.Player.DetachFlow(avFlow);
         }
     }
 }
Пример #4
0
 // Handles the StateChanged event on the audio-video flow.
 private void AudioVideoFlow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
 {
     // When the flow is active, media operations can begin.
     if (e.State == MediaFlowState.Terminated)
     {
         // Detach the speech recognition connector, because the state of the flow is now Terminated.
         AudioVideoFlow avFlow = (AudioVideoFlow)sender;
         if (avFlow.SpeechRecognitionConnector != null)
         {
             avFlow.SpeechRecognitionConnector.DetachFlow();
         }
     }
 }
        // The delegate to be called when the inbound call arrives (the call from a customer).
        private void inboundAVCall_CallReceived(object sender, CallReceivedEventArgs <AudioVideoCall> e)
        {
            _waitForCallReceived.Set();
            _audioVideoCall = e.Call;

            _audioVideoCall.AudioVideoFlowConfigurationRequested += this.audioVideoCall_FlowConfigurationRequested;
            _audioVideoCall.StateChanged += new EventHandler <CallStateChangedEventArgs>(audioVideoCall_StateChanged);

            // Create a new conversation instance.
            _conversation = new Conversation(_userEndpoint);
            // Accept the call.
            _audioVideoCall.BeginAccept(CallAcceptCB, _audioVideoCall);
            _audioVideoFlow = _audioVideoCall.Flow;
        }
Пример #6
0
        private void audioVideoFlow_ApplyChangesCompleted(IAsyncResult result)
        {
            try
            {
                AudioVideoFlow avFlow = (AudioVideoFlow)result.AsyncState;
                avFlow.EndApplyChanges(result);
            }
            catch (RealTimeException e)
            {
                throw e;
            }

            _waitForApplyChangesCompleted.Set();
        }
Пример #7
0
        private void audioVideoFlow_HoldCompleted(IAsyncResult result)
        {
            try
            {
                AudioVideoFlow avFlow = (AudioVideoFlow)result.AsyncState;
                avFlow.EndHold(result);
            }
            catch (RealTimeException e)
            {
                throw e;
            }

            _waitForHoldRetrieveCompleted.Set();
        }
Пример #8
0
        /// <summary>
        /// The constructor for the class.
        /// </summary>
        /// <param name="specifiedMediaSource">
        /// The media source that is to be stored.
        /// </param>
        /// <param name="specifiedAvFlow">
        /// The Audio Video flow that is to be stored.
        /// </param>
        public MediaSourceAndAvFlowContainer(MediaSource specifiedMediaSource, AudioVideoFlow specifiedAvFlow)
        {
            if (null == specifiedMediaSource)
            {
                throw new ArgumentNullException("specifiedMediaSource");
            }

            if (null == specifiedAvFlow)
            {
                throw new ArgumentNullException("specifiedAvFlow");
            }

            _storedMediaSource = specifiedMediaSource;
            _storedAvFlow      = specifiedAvFlow;
        }
Пример #9
0
        //Flow configuration requested indicates that there is a flow present to begin media operations with that it is no longer null, and is ready to be configured.
        public void audioVideoCall_FlowConfigurationRequested(object sender, AudioVideoFlowConfigurationRequestedEventArgs e)
        {
            Console.WriteLine("Flow Configuration Requested.");
            _audioVideoFlow = e.Flow;

            //Now that the flow is non-null, bind the event handler for State Changed.
            // When the flow goes active, (as indicated by the state changed event) the program will perform media related actions..
            _audioVideoFlow.StateChanged += new EventHandler <MediaFlowStateChangedEventArgs>(audioVideoFlow_StateChanged);

            // call sample event handler
            if (_audioVideoFlowConfigurationRequestedEventHandler != null)
            {
                _audioVideoFlowConfigurationRequestedEventHandler(sender, e);
            }
        }
Пример #10
0
        void Flow_StateChanged(object sender, MediaFlowStateChangedEventArgs e)
        {
            Log("Flow_StateChanged PreviousState=" + e.PreviousState + " State=" + e.State);

            AudioVideoFlow avFlow = (AudioVideoFlow)sender;

            if (avFlow.State == MediaFlowState.Active)
            {
                Player player = new Player();
                player.StateChanged += new EventHandler <PlayerStateChangedEventArgs>(player_StateChanged);
                player.AttachFlow(avFlow);

                WmaFileSource src = new WmaFileSource(_FileName);
                src.BeginPrepareSource(MediaSourceOpenMode.Buffered,
                                       ar =>
                {
                    try
                    {
                        src.EndPrepareSource(ar);

                        player.SetSource(src);

                        // For some reason, PlayerMode.Automatic does not loop audio
                        player.SetMode(PlayerMode.Manual);
                        player.Start();

                        Log("Playing \"" + _FileName + "\"");
                    }
                    catch (Exception ex)
                    {
                        Log(ex.ToString());
                    }
                },
                                       null);
            }
            else if (avFlow.State == MediaFlowState.Terminated)
            {
                if (avFlow.Player != null)
                {
                    if (avFlow.Player.Source != null)
                    {
                        avFlow.Player.Source.Close();
                    }

                    avFlow.Player.DetachFlow(avFlow);
                }
            }
        }
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                null,
                audioVideoFlow_StateChanged);

            // Create a speech recognition connector and attach it to a AudioVideoFlow
            SpeechRecognitionConnector speechRecognitionConnector = new SpeechRecognitionConnector();

            speechRecognitionConnector.AttachFlow(_audioVideoFlow);

            //Start recognizing
            SpeechRecognitionStream stream = speechRecognitionConnector.Start();

            // Create speech recognition engine and start recognizing by attaching connector to engine
            SpeechRecognitionEngine speechRecognitionEngine = new SpeechRecognitionEngine();

            speechRecognitionEngine.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(speechRecognitionEngine_SpeechRecognized);


            string[] recognizedString = { "zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "exit" };
            Choices  numberChoices    = new Choices(recognizedString);

            speechRecognitionEngine.LoadGrammar(new Grammar(new GrammarBuilder(numberChoices)));

            SpeechAudioFormatInfo speechAudioFormatInfo = new SpeechAudioFormatInfo(8000, AudioBitsPerSample.Sixteen, Microsoft.Speech.AudioFormat.AudioChannel.Mono);

            speechRecognitionEngine.SetInputToAudioStream(stream, speechAudioFormatInfo);
            Console.WriteLine("\r\nGrammar loaded from zero to ten, say exit to shutdown.");

            speechRecognitionEngine.RecognizeAsync(RecognizeMode.Multiple);

            _waitForXXXCompleted.WaitOne();
            //Stop the connector
            speechRecognitionConnector.Stop();
            Console.WriteLine("Stopping the speech recognition connector");

            //speech recognition connector must be detached from the flow, otherwise if the connector is rooted, it will keep the flow in memory.
            speechRecognitionConnector.DetachFlow();

            // Shutdown the platform
            ShutdownPlatform();

            _waitForShutdownEventCompleted.WaitOne();
        }
Пример #12
0
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                null,
                audioVideoFlow_StateChanged);

            // When something happens involving negotiation this event will be triggered.
            _audioVideoFlow.ConfigurationChanged += new EventHandler <AudioVideoFlowConfigurationChangedEventArgs>(audioVideoFlow_ConfigurationChanged);

            // Attaches a player with a source and starts it in constant loop.
            audioVideoFlowHelper.AttachAndStartPlayer(_audioVideoFlow, true);

            // Check Hold Status.
            Console.WriteLine("AudioVideoFlow's HoldStatus: " + _audioVideoFlow.HoldStatus);

            Thread.Sleep(10000);

            // Hold both endpoints synchronously.
            _audioVideoFlow.BeginHold(HoldType.BothEndpoints, audioVideoFlow_HoldCompleted, _audioVideoFlow);
            _waitForHoldRetrieveCompleted.WaitOne();

            // Check Hold Status.
            Console.WriteLine("AudioVideoFlow's HoldStatus: " + _audioVideoFlow.HoldStatus);

            Thread.Sleep(10000);

            // Retrieve AudioVideoFlow synchronously.
            _audioVideoFlow.BeginRetrieve(audioVideoFlow_RetrieveCompleted, _audioVideoFlow);
            _waitForHoldRetrieveCompleted.WaitOne();

            // Check Hold Status.
            Console.WriteLine("AudioVideoFlow's HoldStatus: " + _audioVideoFlow.HoldStatus);

            Thread.Sleep(10000);

            // Shutdown the platform
            ShutdownPlatform();

            //Wait for shutdown to occur.
            _waitForShutdownEventCompleted.WaitOne();
        }
        public void StopSpeechRecognition()
        {
            if (!_isActive)
            {
                NonBlockingConsole.WriteLine("Warn: StopSpeechRecognition() called on an inactive SpeechRecognizer.");
                return;
            }

            _isActive = false;

            if (_isRecognizing)
            {
                _isRecognizing = false;
                if (_speechRecognitionEngine != null)
                {
                    _speechRecognitionEngine.RecognizeAsyncCancel();
                }

                if (_speechRecognitionConnector != null)
                {
                    // Stop the connector
                    _speechRecognitionConnector.Stop();

                    // speech recognition connector must be detached from the flow, otherwise if the connector is rooted, it will keep the flow in memory.
                    _speechRecognitionConnector.DetachFlow();
                }

                if (_speechRecognitionStream != null)
                {
                    _speechRecognitionStream.Dispose();
                    _speechRecognitionStream = null;
                }
            }

            if ((_audioVideoFlow != null) && (_audioVideoFlow.SpeechRecognitionConnector != null))
            {
                _audioVideoFlow.SpeechRecognitionConnector.Stop();
                _audioVideoFlow.SpeechRecognitionConnector.DetachFlow();
                _audioVideoFlow.StateChanged -= AudioVideoFlow_StateChanged;
                _audioVideoFlow = null;
            }

            _waitForAudioVideoFlowStateChangedToActiveCompleted.Reset();
        }
Пример #14
0
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                null,
                audioVideoFlow_StateChanged);

            // When something happens involving negotiation this event will be triggered.
            _audioVideoFlow.ConfigurationChanged += new EventHandler <AudioVideoFlowConfigurationChangedEventArgs>(audioVideoFlow_ConfigurationChanged);

            // Attaches a player with a source and starts it in constant loop.
            audioVideoFlowHelper.AttachAndStartPlayer(_audioVideoFlow, true);

            // Check direction muted
            Console.WriteLine("AudioVideoFlow's direction muted: " + _audioVideoFlow.Audio.DirectionMuted);

            Thread.Sleep(10000);

            // Mute both directions
            _audioVideoFlow.Audio.Mute(MuteDirection.SendReceive);

            // Check direction muted
            Console.WriteLine("AudioVideoFlow's direction muted: " + _audioVideoFlow.Audio.DirectionMuted);

            Thread.Sleep(10000);

            // Unmute both directions
            _audioVideoFlow.Audio.Unmute(MuteDirection.SendReceive);

            // Check direction muted
            Console.WriteLine("AudioVideoFlow's direction muted: " + _audioVideoFlow.Audio.DirectionMuted);

            Thread.Sleep(10000);

            // Shutdown the platform
            ShutdownPlatform();

            //Wait for shutdown to occur.
            _waitForShutdownEventCompleted.WaitOne();
        }
Пример #15
0
        public void StartWithWelcome(AudioVideoFlow flow, LyncServer server)
        {
            _flow = flow;
            _server = server;
           //attach speech synthasis to audio flow
            _speechSynthesisConnector.AttachFlow(_flow);

            _speechSynthesizer = new SpeechSynthesizer();
            _speechSynthesizer.SetOutputToAudioStream(_speechSynthesisConnector, audioformat);
            _speechSynthesizer.SelectVoice("Microsoft Hazel Desktop");  //slightly more english

            var toneController = new ToneController(); //this is for the DTMF tones
            toneController.AttachFlow(_flow);
           
            _speechSynthesisConnector.Start();

            _speechSynthesizer.Speak("Welcome to the UCMA IVR Demo!");
            SpeakMenuOptions();
            toneController.ToneReceived += toneController_ToneReceived;
        }
Пример #16
0
        public void StartMusic(AudioVideoCall audioVideoCall)
        {
            AudioVideoFlow flow = audioVideoCall.Flow;

            if (null != flow && flow.State == MediaFlowState.Active)
            {
                try
                {
                    m_mohPlayer.AttachFlow(flow);
                }
                catch (InvalidOperationException ioe)
                {
                    this.Logger.Log(Logger.LogLevel.Error, ioe);
                }
                catch (OperationFailureException ofe)
                {
                    this.Logger.Log(Logger.LogLevel.Error, ofe);
                }
            }
        }
Пример #17
0
        /// <summary>
        /// Runs the activity.
        /// </summary>
        private void Run()
        {
            //Check if call is established
            try
            {
                if (AudioVideoCall.State != CallState.Established)
                {
                    throw new InvalidOperationException("Call is not established");
                }

                //Register flow configuration changed event.
                AudioVideoFlow avFlow = AudioVideoCall.Flow;
                avFlow.ConfigurationChanged += new EventHandler <AudioVideoFlowConfigurationChangedEventArgs>(HandleAudioVideoConfigurationChanged);

                AudioVideoCall.StateChanged += new EventHandler <CallStateChangedEventArgs>(HandleAudioVideoCallStateChanged);

                //Checks if call is on hold.
                if (this.GetCallConfiguration(AudioVideoCall) == AvCallCommunicationEvents.OnHold)
                {
                    m_callOnHold = true;

                    if (m_tcs != null)
                    {
                        m_tcs.TrySetResult(this.GetActivityResult());
                    }
                }
                else
                {
                    m_callOnHold = false;
                    // start Speaking.
                    StartSpeakAsync();
                }
            }
            catch (InvalidOperationException exception)
            {
                if (m_tcs != null)
                {
                    m_tcs.TrySetException(exception);
                }
            }
        }
Пример #18
0
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                null,
                audioVideoFlow_StateChanged);

            // When something happens involving negotiation this event will be triggered.
            _audioVideoFlow.ConfigurationChanged += new EventHandler <AudioVideoFlowConfigurationChangedEventArgs>(audioVideoFlow_ConfigurationChanged);

            // Attaches a player with a source and starts it in constant loop.
            audioVideoFlowHelper.AttachAndStartPlayer(_audioVideoFlow, true);

            // Check allowed direction.
            Console.WriteLine("AudioVideoFlow using sampling rate: " + _audioVideoFlow.Audio.GetChannels()[ChannelLabel.AudioMono].SamplingRate);

            Thread.Sleep(10000);

            Console.WriteLine("Call ApplyChanges changing sampling rate from 8Khz or 16Khz to only 8Khz.");

            AudioVideoFlowTemplate template             = new AudioVideoFlowTemplate(_audioVideoFlow);
            AudioChannelTemplate   audioChannelTemplate = template.Audio.GetChannels()[ChannelLabel.AudioMono];

            audioChannelTemplate.SamplingRate = AudioSamplingRate.EightKhz;

            // Change allowed direction to SendOnly.
            _audioVideoFlow.BeginApplyChanges(template, audioVideoFlow_ApplyChangesCompleted, _audioVideoFlow);
            _waitForApplyChangesCompleted.WaitOne();

            Console.WriteLine("AudioVideoFlow using sampling rate: " + _audioVideoFlow.Audio.GetChannels()[ChannelLabel.AudioMono].SamplingRate);

            Thread.Sleep(10000);

            // Shutdown the platform
            ShutdownPlatform();

            //Wait for shutdown to occur.
            _waitForShutdownEventCompleted.WaitOne();
        }
Пример #19
0
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                audioVideoCall_FlowConfigurationRequested,
                audioVideoFlow_StateChanged);

            // When something happens involving negotiation this event will be triggered.
            _audioVideoFlow.ConfigurationChanged += new EventHandler <AudioVideoFlowConfigurationChangedEventArgs>(audioVideoFlow_ConfigurationChanged);

            // Attaches a player with a source and starts it in constant loop.
            audioVideoFlowHelper.AttachAndStartPlayer(_audioVideoFlow, true);

            // Check allowed direction.
            Console.WriteLine("AudioVideoFlow audio channel direction: " + _audioVideoFlow.Audio.GetChannels()[ChannelLabel.AudioMono].Direction);

            Thread.Sleep(10000);

            Console.WriteLine("Call ApplyChanges changing audio direcion to send and receive.");

            AudioVideoFlowTemplate template             = new AudioVideoFlowTemplate(_audioVideoFlow);
            AudioChannelTemplate   audioChannelTemplate = template.Audio.GetChannels()[ChannelLabel.AudioMono];

            audioChannelTemplate.AllowedDirection = MediaChannelDirection.SendReceive;

            // Change allowed direction to SendOnly.
            _audioVideoFlow.BeginApplyChanges(template, audioVideoFlow_ApplyChangesCompleted, _audioVideoFlow);
            _waitForApplyChangesCompleted.WaitOne();

            Console.WriteLine("AudioVideoFlow audio channel direction: " + _audioVideoFlow.Audio.GetChannels()[ChannelLabel.AudioMono].Direction);

            Thread.Sleep(5000);

            // Shutdown the platform
            ShutdownPlatform();

            //Wait for shutdown to occur.
            _waitForShutdownEventCompleted.WaitOne();
        }
Пример #20
0
        public void StartWithWelcome(AudioVideoFlow flow, LyncServer server)
        {
            _flow   = flow;
            _server = server;
            //attach speech synthasis to audio flow
            _speechSynthesisConnector.AttachFlow(_flow);

            _speechSynthesizer = new SpeechSynthesizer();
            _speechSynthesizer.SetOutputToAudioStream(_speechSynthesisConnector, audioformat);
            _speechSynthesizer.SelectVoice("Microsoft Hazel Desktop"); //slightly more english

            var toneController = new ToneController();                 //this is for the DTMF tones

            toneController.AttachFlow(_flow);

            _speechSynthesisConnector.Start();

            _speechSynthesizer.Speak("Welcome to the UC Day IVR!");  //TEXT TO SPEEECH!
            SpeakMenuOptions();
            toneController.ToneReceived += toneController_ToneReceived;
        }
Пример #21
0
        //Flow configuration requested indicates that there is a flow present to begin media operations with that it is no longer null, and is ready to be configured.
        public void AudioVideoCall_FlowConfigurationRequested(object sender, AudioVideoFlowConfigurationRequestedEventArgs e)
        {
            NonBlockingConsole.WriteLine("AV Flow Configuration Requested.");
            _audioVideoFlow = e.Flow;

            Message m = new Message("AV Flow Configuration Requested.",
                                    MessageType.Audio,
                                    _transcriptRecorder.Conversation.Id);

            _transcriptRecorder.OnMessageReceived(m);

            //Now that the flow is non-null, bind the event handler for State Changed.
            // When the flow goes active, (as indicated by the state changed event) the program will perform media related actions..
            _audioVideoFlow.StateChanged += new EventHandler <MediaFlowStateChangedEventArgs>(AudioVideoFlow_StateChanged);

            // call top level event handler
            if (_audioVideoFlowConfigurationRequestedEventHandler != null)
            {
                _audioVideoFlowConfigurationRequestedEventHandler(sender, e);
            }
        }
Пример #22
0
        /// <summary>
        /// Plays a WMA file using the specified Audio Video Flow.
        /// </summary>
        /// <param name="flowToPlayUsing">
        /// The flow to use when playing the file.
        /// </param>
        /// <param name="fileToPlay">
        /// The path to the wma file to be played.
        /// </param>
        private void PlayWmaFileAudio(AudioVideoFlow flowToPlayUsing, string fileToPlay)
        {
            // Set the media source that the player will use.
            WmaFileSource wmaSource = new WmaFileSource(fileToPlay);

            MediaSourceAndAvFlowContainer sourceAndFlowContainer = new MediaSourceAndAvFlowContainer(
                wmaSource, flowToPlayUsing);

            try
            {
                wmaSource.BeginPrepareSource(MediaSourceOpenMode.Unbuffered,
                                             WmaSourcePreparationCompleted,
                                             sourceAndFlowContainer);
            }
            catch (ArgumentOutOfRangeException argOOREx)
            {
                // TODO (Left to the reader): Write actual handling code for the
                // occurrence.
                Console.WriteLine("An ArgumentOutOfRangeException occured when preparing the media source: "
                                  + "{0}", argOOREx.ToString());
            }
        }
Пример #23
0
        public void TerminateCall()
        {
            if (_speechRecognizer != null)
            {
                _speechRecognizer.StopSpeechRecognition();
            }

            if (_audioVideoFlow != null)
            {
                _audioVideoFlow.StateChanged -= AudioVideoFlow_StateChanged;
                _audioVideoFlow = null;
            }

            if (_audioVideoCall != null)
            {
                _audioVideoCall.BeginTerminate(AudioVideoCallTerminated, _audioVideoCall);
                _audioVideoCall.Flow.StateChanged -= AudioVideoFlow_StateChanged;
                _audioVideoCall.StateChanged      -= AudioVideoCall_StateChanged;
                _audioVideoCall.AudioVideoFlowConfigurationRequested -= AudioVideoCall_FlowConfigurationRequested;
                _audioVideoCall.ConversationChanged -= AudioVideoCall_ConversationChanged;
                _audioVideoCall = null;
            }
            else
            {
                _waitForAudioVideoCallTerminated.Set();
            }

            if (_subConversation != null)
            {
                _transcriptRecorder.OnSubConversationRemoved(_subConversation, this);
                _subConversation = null;
            }

            _waitForAudioVideoCallAccepted.Reset();
            _waitForAudioVideoFlowStateChangedToActiveCompleted.Reset();
        }
Пример #24
0
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                null,
                audioVideoFlow_StateChanged);

            //Load readme file as the source
            Console.WriteLine();
            Console.Write("Please enter the destination wma file => ");
            string filename = Console.ReadLine();

            //setup a recorder to record the audio received from the remote side by attaching it to the AudioVideoFlow
            Recorder recorder = new Recorder();

            recorder.AttachFlow(_audioVideoFlow);

            //Subscribe to the recorder's state changed event
            recorder.StateChanged += new EventHandler <RecorderStateChangedEventArgs>(recorder_StateChanged);

            //Subscribe to voice activity changed event
            recorder.VoiceActivityChanged += new EventHandler <VoiceActivityChangedEventArgs>(recorder_VoiceActivityChanged);

            //Create the sink and give it to the recorder so the recorder knows where to record to
            WmaFileSink sink = new WmaFileSink(filename);

            recorder.SetSink(sink);

            //Start to record
            recorder.Start();
            Console.WriteLine("\r\nRecording for 10 seconds.");

            //Wait for 9 seconds to allow recording up to 10 seconds
            Thread.Sleep(10000);

            //Pauses recorder
            recorder.Pause();
            Console.WriteLine("\r\nPausing for 2 seconds.");

            //Wait 2 seconds
            Thread.Sleep(2000);

            //Resume recording from where we paused the recorder previously
            recorder.Start();
            Console.WriteLine("\r\nResume recording for 5 seconds.");

            //Wait 5 seconds
            Thread.Sleep(5000);

            //Stop the recording
            recorder.Stop();
            Console.WriteLine("\r\nRecording stopped.");

            //Detach the recorder from the AudioVideoFlow
            recorder.DetachFlow();

            // Shutdown the platform
            ShutdownPlatform();

            //Wait for shutdown to occur.
            _waitForShutdownEventCompleted.WaitOne();
        }
        public void TerminateCall()
        {
            if (_speechRecognizer != null)
            {
                _speechRecognizer.StopSpeechRecognition();
            }

            if (_audioVideoFlow != null)
            {
                _audioVideoFlow.StateChanged -= AudioVideoFlow_StateChanged;
                _audioVideoFlow = null;
            }

            if (_audioVideoCall != null)
            {
                _audioVideoCall.BeginTerminate(AudioVideoCallTerminated, _audioVideoCall);
                _audioVideoCall.Flow.StateChanged -= AudioVideoFlow_StateChanged;
                _audioVideoCall.StateChanged -= AudioVideoCall_StateChanged;
                _audioVideoCall.AudioVideoFlowConfigurationRequested -= AudioVideoCall_FlowConfigurationRequested;
                _audioVideoCall.ConversationChanged -= AudioVideoCall_ConversationChanged;
                _audioVideoCall = null;
            }
            else
            {
                _waitForAudioVideoCallTerminated.Set();
            }

            if (_subConversation != null)
            {
                _transcriptRecorder.OnSubConversationRemoved(_subConversation, this);
                _subConversation = null;
            }

            _waitForAudioVideoCallAccepted.Reset();
            _waitForAudioVideoFlowStateChangedToActiveCompleted.Reset();
        }
        //Flow configuration requested indicates that there is a flow present to begin media operations with that it is no longer null, and is ready to be configured.
        public void AudioVideoCall_FlowConfigurationRequested(object sender, AudioVideoFlowConfigurationRequestedEventArgs e)
        {
            NonBlockingConsole.WriteLine("AV Flow Configuration Requested.");
            _audioVideoFlow = e.Flow;

            Message m = new Message("AV Flow Configuration Requested.",
                MessageType.Audio,
                _transcriptRecorder.Conversation.Id);
            _transcriptRecorder.OnMessageReceived(m);

            //Now that the flow is non-null, bind the event handler for State Changed.
            // When the flow goes active, (as indicated by the state changed event) the program will perform media related actions..
            _audioVideoFlow.StateChanged += new EventHandler<MediaFlowStateChangedEventArgs>(AudioVideoFlow_StateChanged);

            // call top level event handler
            if (_audioVideoFlowConfigurationRequestedEventHandler != null)
            {
                _audioVideoFlowConfigurationRequestedEventHandler(sender, e);
            }
        }
Пример #27
0
 public static Task RetrieveAsync(this AudioVideoFlow flow)
 {
     return(Task.Factory.FromAsync(
                flow.BeginRetrieve, flow.EndRetrieve,
                null));
 }
Пример #28
0
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                null,
                audioVideoFlow_StateChanged);

            // Create a player and attach it to a AudioVideoFlow
            Player player = new Player();

            player.AttachFlow(_audioVideoFlow);

            //Subscribe to the player's state changed event, including the play completed event.
            player.StateChanged += new EventHandler <PlayerStateChangedEventArgs>(player_StateChanged);

            //Load the file into memory
            WmaFileSource source = new WmaFileSource("music.wma");

            source.BeginPrepareSource(MediaSourceOpenMode.Buffered, source_PrepareSourceCompleted, source);
            _waitForPrepareSourceCompleted.WaitOne();

            //in automatic mode, player will start playing only when the flow is in the active state.
            //in manual mode, player will start playing immediately.
            player.SetMode(PlayerMode.Automatic);

            player.SetSource(source);

            //Start playing the file
            player.Start();
            Console.WriteLine("Start playing for 10 seconds");

            //Allow the player to play for 10 seconds by waiting for 10 seconds
            Thread.Sleep(10000);

            //Pauses player
            player.Pause();
            Console.WriteLine("Pausing for 5 seconds");

            //Wait 5 seconds
            Thread.Sleep(5000);

            //Change playback speed to half of the regular speed
            player.PlaybackSpeed = PlaybackSpeed.Half;
            Console.WriteLine("Playback speed change to half of the regular speed");

            //Resume playing from where we paused the player at previously
            player.Start();
            Console.WriteLine("Resume playing for 10 seconds");

            Thread.Sleep(10000);

            //Stop the player and reset position to the beginning
            player.Stop();
            Console.WriteLine("Stopping the player");

            // Source must be closed after it is no longer needed, otherwise memory will not be released even after garbage collection.
            source.Close();

            //player must be detached from the flow, otherwise if the player is rooted, it will keep the flow in memory.
            player.DetachFlow(_audioVideoFlow);

            // Shutdown the platform
            ShutdownPlatform();

            //Wait for shutdown to occur.
            _waitForShutdownEventCompleted.WaitOne();
        }
        public void AttachAndStartSpeechRecognition(AudioVideoFlow avFlow)
        {
            if (avFlow == null)
            {
                throw new InvalidOperationException("Cannot recognize speech of inactive AudioVideoFlow");
            }
            if (_isActive)
            {
                NonBlockingConsole.WriteLine("Warn: SpeechRecognizer already active on an AudioFlow. Stopping current recognition session.");
                StopSpeechRecognition();
            }

            _waitForAudioVideoFlowStateChangedToActiveCompleted.Reset();
            _speechTranscript.Clear();
            //StopSpeechRecognition();

            _isActive = true;
            _audioVideoFlow = avFlow;
            _audioVideoFlow.StateChanged += new EventHandler<MediaFlowStateChangedEventArgs>(AudioVideoFlow_StateChanged);

            if (_audioVideoFlow.State == MediaFlowState.Active)
            {
                _waitForAudioVideoFlowStateChangedToActiveCompleted.Set();
                StartSpeechRecognition();
            }

            // Else, Speech Recognition will start when AudioVideoFlow state becomes active
        }
        public void StopSpeechRecognition()
        {
            if (!_isActive)
            {
                NonBlockingConsole.WriteLine("Warn: StopSpeechRecognition() called on an inactive SpeechRecognizer.");
                return;
            }

            _isActive = false;

            if (_isRecognizing)
            {
                _isRecognizing = false;
                if (_speechRecognitionEngine != null)
                {
                    _speechRecognitionEngine.RecognizeAsyncCancel();
                }

                if (_speechRecognitionConnector != null)
                {
                    // Stop the connector
                    _speechRecognitionConnector.Stop();

                    // speech recognition connector must be detached from the flow, otherwise if the connector is rooted, it will keep the flow in memory.
                    _speechRecognitionConnector.DetachFlow();
                }

                if (_speechRecognitionStream != null)
                {
                    _speechRecognitionStream.Dispose();
                    _speechRecognitionStream = null;
                }
            }

            if ((_audioVideoFlow != null) && (_audioVideoFlow.SpeechRecognitionConnector != null))
            {
                _audioVideoFlow.SpeechRecognitionConnector.Stop();
                _audioVideoFlow.SpeechRecognitionConnector.DetachFlow();
                _audioVideoFlow.StateChanged -= AudioVideoFlow_StateChanged;
                _audioVideoFlow = null;
            }

            _waitForAudioVideoFlowStateChangedToActiveCompleted.Reset();
        }