示例#1
0
        public void Run()
        {
            // Create AudioVideoFlow
            AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper();

            _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow(
                null,
                audioVideoFlow_StateChanged);

            // Create a speech synthesis connector and attach it to an AudioVideoFlow
            SpeechSynthesisConnector speechSynthesisConnector = new SpeechSynthesisConnector();

            speechSynthesisConnector.AttachFlow(_audioVideoFlow);

            // Create a speech synthesis and set connector to it
            SpeechSynthesizer     speechSynthesis = new SpeechSynthesizer();
            SpeechAudioFormatInfo audioformat     = new SpeechAudioFormatInfo(16000, AudioBitsPerSample.Sixteen, Microsoft.Speech.AudioFormat.AudioChannel.Mono);

            speechSynthesis.SetOutputToAudioStream(speechSynthesisConnector, audioformat);

            //Load readme file as the source
            Console.WriteLine();
            Console.Write("Please enter the source file => ");
            string filename = Console.ReadLine();

            string msg = "";

            try
            {
                StreamReader objReader = new StreamReader(filename);
                msg = objReader.ReadToEnd();
            }
            catch (FileNotFoundException)
            {
                Console.WriteLine("\r\nFile doesn't exist.");
                ShutdownPlatform();
            }

            //Start connector
            speechSynthesisConnector.Start();
            Console.WriteLine("\r\nStreaming source file for 15 seconds.");

            //Start streaming from speech synthesis.
            speechSynthesis.SpeakAsync(new Prompt(msg));

            //Allow the connector to stream 15 seconds by waiting for 15 seconds
            Thread.Sleep(15000);

            //Stop the connector
            speechSynthesisConnector.Stop();
            Console.WriteLine("\r\nSpeech synthesis connector stopped.");

            //speech synthesis connector must be detached from the flow, otherwise if the connector is rooted, it will keep the flow in memory.
            speechSynthesisConnector.DetachFlow();

            // Shutdown the platform
            ShutdownPlatform();

            _waitForShutdownEventCompleted.WaitOne();
        }
示例#2
0
        public void SpeakMessage(AudioVideoFlow flow, string message)
        {
            try
            {
                SpeechSynthesizer synth = new SpeechSynthesizer();
                SpeechAudioFormatInfo formatInfo = new SpeechAudioFormatInfo(16000, AudioBitsPerSample.Sixteen, Microsoft.Speech.AudioFormat.AudioChannel.Mono);
                SpeechSynthesisConnector connector = new SpeechSynthesisConnector();

                synth.SetOutputToAudioStream(connector.Stream, formatInfo);

                connector.AttachFlow(flow);
                connector.Start();

                synth.SpeakCompleted += new EventHandler<SpeakCompletedEventArgs>(
                    (sender, args) =>
                    {
                        connector.Stop();
                        synth.Dispose();
                    });

                synth.SpeakAsync(message);
            }
            catch (Exception ex)
            {
                Console.WriteLine("Failed to play the message. {0}", ex);
            }

        }
示例#3
0
 /// <summary>
 ///Starts speech connector and starts speaking the prompt message asynchronously.
 /// </summary>
 private void StartSpeakAsync()
 {
     //start Speech synthesizer
     if (AudioVideoCall.Flow.SpeechSynthesisConnector != null)
     {
         AudioVideoCall.Flow.SpeechSynthesisConnector.DetachFlow();
     }
     m_speechSynthesisConnector.AttachFlow(AudioVideoCall.Flow);
     m_speechSynthesizer.SetOutputToAudioStream(m_speechSynthesisConnector, m_audioformat);
     m_speechSynthesizer.SpeakCompleted += new EventHandler <SpeakCompletedEventArgs>(SpeechSynthesizer_SpeakCompleted);
     m_speechSynthesisConnector.Start();
     //If prompt is not set then speak Ssml pause.
     if (m_pbMainPrompt == null)
     {
         m_pbMainPrompt = new PromptBuilder();
         m_pbMainPrompt.AppendBreak();
     }
     m_speechSynthesizer.SpeakAsync(m_pbMainPrompt);
 }
示例#4
0
        public void StartWithWelcome(AudioVideoFlow flow, LyncServer server)
        {
            _flow   = flow;
            _server = server;
            //attach speech synthasis to audio flow
            _speechSynthesisConnector.AttachFlow(_flow);

            _speechSynthesizer = new SpeechSynthesizer();
            _speechSynthesizer.SetOutputToAudioStream(_speechSynthesisConnector, audioformat);
            _speechSynthesizer.SelectVoice("Microsoft Hazel Desktop"); //slightly more english

            var toneController = new ToneController();                 //this is for the DTMF tones

            toneController.AttachFlow(_flow);

            _speechSynthesisConnector.Start();

            _speechSynthesizer.Speak("Welcome to the UC Day IVR!");  //TEXT TO SPEEECH!
            SpeakMenuOptions();
            toneController.ToneReceived += toneController_ToneReceived;
        }