Ejemplo n.º 1
0
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                null,
                audioVideoFlow_StateChanged);

            // Create a speech synthesis connector and attach it to an AudioVideoFlow
            SpeechSynthesisConnector speechSynthesisConnector = new SpeechSynthesisConnector();

            speechSynthesisConnector.AttachFlow(_audioVideoFlow);

            // Create a speech synthesis and set connector to it
            SpeechSynthesizer     speechSynthesis = new SpeechSynthesizer();
            SpeechAudioFormatInfo audioformat     = new SpeechAudioFormatInfo(16000, AudioBitsPerSample.Sixteen, Microsoft.Speech.AudioFormat.AudioChannel.Mono);

            speechSynthesis.SetOutputToAudioStream(speechSynthesisConnector, audioformat);

            //Load readme file as the source
            Console.WriteLine();
            Console.Write("Please enter the source file => ");
            string filename = Console.ReadLine();

            string msg = "";

            try
            {
                StreamReader objReader = new StreamReader(filename);
                msg = objReader.ReadToEnd();
            }
            catch (FileNotFoundException)
            {
                Console.WriteLine("\r\nFile doesn't exist.");
                ShutdownPlatform();
            }

            //Start connector
            speechSynthesisConnector.Start();
            Console.WriteLine("\r\nStreaming source file for 15 seconds.");

            //Start streaming from speech synthesis.
            speechSynthesis.SpeakAsync(new Prompt(msg));

            //Allow the connector to stream 15 seconds by waiting for 15 seconds
            Thread.Sleep(15000);

            //Stop the connector
            speechSynthesisConnector.Stop();
            Console.WriteLine("\r\nSpeech synthesis connector stopped.");

            //speech synthesis connector must be detached from the flow, otherwise if the connector is rooted, it will keep the flow in memory.
            speechSynthesisConnector.DetachFlow();

            // Shutdown the platform
            ShutdownPlatform();

            _waitForShutdownEventCompleted.WaitOne();
        }
Ejemplo n.º 2
0
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                null,
                audioVideoFlow_StateChanged);

            // Create a ToneController and attach to AVFlow
            ToneController toneController = new ToneController();

            toneController.AttachFlow(_audioVideoFlow);

            // Subscribe to callback to receive tones
            toneController.ToneReceived += new EventHandler <ToneControllerEventArgs>(toneController_ToneReceived);

            // Subscribe to callback to receive fax tones
            toneController.IncomingFaxDetected += new EventHandler <IncomingFaxDetectedEventArgs>(toneController_IncomingFaxDetected);

            Console.WriteLine("ToneController attached. Send Zero or a Fax Tone to disconnect the call.");

            //Sync; wait for ToneReceivedEvent
            _waitForToneReceivedEventCompleted.WaitOne();

            // Shutdown the platform
            ShutdownPlatform();

            //Wait for shutdown to occur.
            _waitForShutdownEventCompleted.WaitOne();
        }
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                null,
                audioVideoFlow_StateChanged);

            // Create a speech recognition connector and attach it to a AudioVideoFlow
            SpeechRecognitionConnector speechRecognitionConnector = new SpeechRecognitionConnector();

            speechRecognitionConnector.AttachFlow(_audioVideoFlow);

            //Start recognizing
            SpeechRecognitionStream stream = speechRecognitionConnector.Start();

            // Create speech recognition engine and start recognizing by attaching connector to engine
            SpeechRecognitionEngine speechRecognitionEngine = new SpeechRecognitionEngine();

            speechRecognitionEngine.SpeechRecognized += new EventHandler <SpeechRecognizedEventArgs>(speechRecognitionEngine_SpeechRecognized);


            string[] recognizedString = { "zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "exit" };
            Choices  numberChoices    = new Choices(recognizedString);

            speechRecognitionEngine.LoadGrammar(new Grammar(new GrammarBuilder(numberChoices)));

            SpeechAudioFormatInfo speechAudioFormatInfo = new SpeechAudioFormatInfo(8000, AudioBitsPerSample.Sixteen, Microsoft.Speech.AudioFormat.AudioChannel.Mono);

            speechRecognitionEngine.SetInputToAudioStream(stream, speechAudioFormatInfo);
            Console.WriteLine("\r\nGrammar loaded from zero to ten, say exit to shutdown.");

            speechRecognitionEngine.RecognizeAsync(RecognizeMode.Multiple);

            _waitForXXXCompleted.WaitOne();
            //Stop the connector
            speechRecognitionConnector.Stop();
            Console.WriteLine("Stopping the speech recognition connector");

            //speech recognition connector must be detached from the flow, otherwise if the connector is rooted, it will keep the flow in memory.
            speechRecognitionConnector.DetachFlow();

            // Shutdown the platform
            ShutdownPlatform();

            _waitForShutdownEventCompleted.WaitOne();
        }
Ejemplo n.º 4
0
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                null,
                audioVideoFlow_StateChanged);

            // When something happens involving negotiation this event will be triggered.
            _audioVideoFlow.ConfigurationChanged += new EventHandler <AudioVideoFlowConfigurationChangedEventArgs>(audioVideoFlow_ConfigurationChanged);

            // Attaches a player with a source and starts it in constant loop.
            audioVideoFlowHelper.AttachAndStartPlayer(_audioVideoFlow, true);

            // Check Hold Status.
            Console.WriteLine("AudioVideoFlow's HoldStatus: " + _audioVideoFlow.HoldStatus);

            Thread.Sleep(10000);

            // Hold both endpoints synchronously.
            _audioVideoFlow.BeginHold(HoldType.BothEndpoints, audioVideoFlow_HoldCompleted, _audioVideoFlow);
            _waitForHoldRetrieveCompleted.WaitOne();

            // Check Hold Status.
            Console.WriteLine("AudioVideoFlow's HoldStatus: " + _audioVideoFlow.HoldStatus);

            Thread.Sleep(10000);

            // Retrieve AudioVideoFlow synchronously.
            _audioVideoFlow.BeginRetrieve(audioVideoFlow_RetrieveCompleted, _audioVideoFlow);
            _waitForHoldRetrieveCompleted.WaitOne();

            // Check Hold Status.
            Console.WriteLine("AudioVideoFlow's HoldStatus: " + _audioVideoFlow.HoldStatus);

            Thread.Sleep(10000);

            // Shutdown the platform
            ShutdownPlatform();

            //Wait for shutdown to occur.
            _waitForShutdownEventCompleted.WaitOne();
        }
Ejemplo n.º 5
0
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                null,
                audioVideoFlow_StateChanged);

            // When something happens involving negotiation this event will be triggered.
            _audioVideoFlow.ConfigurationChanged += new EventHandler <AudioVideoFlowConfigurationChangedEventArgs>(audioVideoFlow_ConfigurationChanged);

            // Attaches a player with a source and starts it in constant loop.
            audioVideoFlowHelper.AttachAndStartPlayer(_audioVideoFlow, true);

            // Check direction muted
            Console.WriteLine("AudioVideoFlow's direction muted: " + _audioVideoFlow.Audio.DirectionMuted);

            Thread.Sleep(10000);

            // Mute both directions
            _audioVideoFlow.Audio.Mute(MuteDirection.SendReceive);

            // Check direction muted
            Console.WriteLine("AudioVideoFlow's direction muted: " + _audioVideoFlow.Audio.DirectionMuted);

            Thread.Sleep(10000);

            // Unmute both directions
            _audioVideoFlow.Audio.Unmute(MuteDirection.SendReceive);

            // Check direction muted
            Console.WriteLine("AudioVideoFlow's direction muted: " + _audioVideoFlow.Audio.DirectionMuted);

            Thread.Sleep(10000);

            // Shutdown the platform
            ShutdownPlatform();

            //Wait for shutdown to occur.
            _waitForShutdownEventCompleted.WaitOne();
        }
Ejemplo n.º 6
0
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                null,
                audioVideoFlow_StateChanged);

            // When something happens involving negotiation this event will be triggered.
            _audioVideoFlow.ConfigurationChanged += new EventHandler <AudioVideoFlowConfigurationChangedEventArgs>(audioVideoFlow_ConfigurationChanged);

            // Attaches a player with a source and starts it in constant loop.
            audioVideoFlowHelper.AttachAndStartPlayer(_audioVideoFlow, true);

            // Check allowed direction.
            Console.WriteLine("AudioVideoFlow using sampling rate: " + _audioVideoFlow.Audio.GetChannels()[ChannelLabel.AudioMono].SamplingRate);

            Thread.Sleep(10000);

            Console.WriteLine("Call ApplyChanges changing sampling rate from 8Khz or 16Khz to only 8Khz.");

            AudioVideoFlowTemplate template             = new AudioVideoFlowTemplate(_audioVideoFlow);
            AudioChannelTemplate   audioChannelTemplate = template.Audio.GetChannels()[ChannelLabel.AudioMono];

            audioChannelTemplate.SamplingRate = AudioSamplingRate.EightKhz;

            // Change allowed direction to SendOnly.
            _audioVideoFlow.BeginApplyChanges(template, audioVideoFlow_ApplyChangesCompleted, _audioVideoFlow);
            _waitForApplyChangesCompleted.WaitOne();

            Console.WriteLine("AudioVideoFlow using sampling rate: " + _audioVideoFlow.Audio.GetChannels()[ChannelLabel.AudioMono].SamplingRate);

            Thread.Sleep(10000);

            // Shutdown the platform
            ShutdownPlatform();

            //Wait for shutdown to occur.
            _waitForShutdownEventCompleted.WaitOne();
        }
Ejemplo n.º 7
0
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                audioVideoCall_FlowConfigurationRequested,
                audioVideoFlow_StateChanged);

            // When something happens involving negotiation this event will be triggered.
            _audioVideoFlow.ConfigurationChanged += new EventHandler <AudioVideoFlowConfigurationChangedEventArgs>(audioVideoFlow_ConfigurationChanged);

            // Attaches a player with a source and starts it in constant loop.
            audioVideoFlowHelper.AttachAndStartPlayer(_audioVideoFlow, true);

            // Check allowed direction.
            Console.WriteLine("AudioVideoFlow audio channel direction: " + _audioVideoFlow.Audio.GetChannels()[ChannelLabel.AudioMono].Direction);

            Thread.Sleep(10000);

            Console.WriteLine("Call ApplyChanges changing audio direcion to send and receive.");

            AudioVideoFlowTemplate template             = new AudioVideoFlowTemplate(_audioVideoFlow);
            AudioChannelTemplate   audioChannelTemplate = template.Audio.GetChannels()[ChannelLabel.AudioMono];

            audioChannelTemplate.AllowedDirection = MediaChannelDirection.SendReceive;

            // Change allowed direction to SendOnly.
            _audioVideoFlow.BeginApplyChanges(template, audioVideoFlow_ApplyChangesCompleted, _audioVideoFlow);
            _waitForApplyChangesCompleted.WaitOne();

            Console.WriteLine("AudioVideoFlow audio channel direction: " + _audioVideoFlow.Audio.GetChannels()[ChannelLabel.AudioMono].Direction);

            Thread.Sleep(5000);

            // Shutdown the platform
            ShutdownPlatform();

            //Wait for shutdown to occur.
            _waitForShutdownEventCompleted.WaitOne();
        }
Ejemplo n.º 8
0
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                null,
                audioVideoFlow_StateChanged);

            //Load readme file as the source
            Console.WriteLine();
            Console.Write("Please enter the destination wma file => ");
            string filename = Console.ReadLine();

            //setup a recorder to record the audio received from the remote side by attaching it to the AudioVideoFlow
            Recorder recorder = new Recorder();

            recorder.AttachFlow(_audioVideoFlow);

            //Subscribe to the recorder's state changed event
            recorder.StateChanged += new EventHandler <RecorderStateChangedEventArgs>(recorder_StateChanged);

            //Subscribe to voice activity changed event
            recorder.VoiceActivityChanged += new EventHandler <VoiceActivityChangedEventArgs>(recorder_VoiceActivityChanged);

            //Create the sink and give it to the recorder so the recorder knows where to record to
            WmaFileSink sink = new WmaFileSink(filename);

            recorder.SetSink(sink);

            //Start to record
            recorder.Start();
            Console.WriteLine("\r\nRecording for 10 seconds.");

            //Wait for 9 seconds to allow recording up to 10 seconds
            Thread.Sleep(10000);

            //Pauses recorder
            recorder.Pause();
            Console.WriteLine("\r\nPausing for 2 seconds.");

            //Wait 2 seconds
            Thread.Sleep(2000);

            //Resume recording from where we paused the recorder previously
            recorder.Start();
            Console.WriteLine("\r\nResume recording for 5 seconds.");

            //Wait 5 seconds
            Thread.Sleep(5000);

            //Stop the recording
            recorder.Stop();
            Console.WriteLine("\r\nRecording stopped.");

            //Detach the recorder from the AudioVideoFlow
            recorder.DetachFlow();

            // Shutdown the platform
            ShutdownPlatform();

            //Wait for shutdown to occur.
            _waitForShutdownEventCompleted.WaitOne();
        }
Ejemplo n.º 9
0
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                null,
                audioVideoFlow_StateChanged);

            // Create a player and attach it to a AudioVideoFlow
            Player player = new Player();

            player.AttachFlow(_audioVideoFlow);

            //Subscribe to the player's state changed event, including the play completed event.
            player.StateChanged += new EventHandler <PlayerStateChangedEventArgs>(player_StateChanged);

            //Load the file into memory
            WmaFileSource source = new WmaFileSource("music.wma");

            source.BeginPrepareSource(MediaSourceOpenMode.Buffered, source_PrepareSourceCompleted, source);
            _waitForPrepareSourceCompleted.WaitOne();

            //in automatic mode, player will start playing only when the flow is in the active state.
            //in manual mode, player will start playing immediately.
            player.SetMode(PlayerMode.Automatic);

            player.SetSource(source);

            //Start playing the file
            player.Start();
            Console.WriteLine("Start playing for 10 seconds");

            //Allow the player to play for 10 seconds by waiting for 10 seconds
            Thread.Sleep(10000);

            //Pauses player
            player.Pause();
            Console.WriteLine("Pausing for 5 seconds");

            //Wait 5 seconds
            Thread.Sleep(5000);

            //Change playback speed to half of the regular speed
            player.PlaybackSpeed = PlaybackSpeed.Half;
            Console.WriteLine("Playback speed change to half of the regular speed");

            //Resume playing from where we paused the player at previously
            player.Start();
            Console.WriteLine("Resume playing for 10 seconds");

            Thread.Sleep(10000);

            //Stop the player and reset position to the beginning
            player.Stop();
            Console.WriteLine("Stopping the player");

            // Source must be closed after it is no longer needed, otherwise memory will not be released even after garbage collection.
            source.Close();

            //player must be detached from the flow, otherwise if the player is rooted, it will keep the flow in memory.
            player.DetachFlow(_audioVideoFlow);

            // Shutdown the platform
            ShutdownPlatform();

            //Wait for shutdown to occur.
            _waitForShutdownEventCompleted.WaitOne();
        }