public void Run() { // Create AudioVideoFlow AudioVideoFlowHelper audioVideoFlowHelper = new AudioVideoFlowHelper(); _audioVideoFlow = audioVideoFlowHelper.CreateAudioVideoFlow( null, audioVideoFlow_StateChanged); // Create a speech synthesis connector and attach it to an AudioVideoFlow SpeechSynthesisConnector speechSynthesisConnector = new SpeechSynthesisConnector(); speechSynthesisConnector.AttachFlow(_audioVideoFlow); // Create a speech synthesis and set connector to it SpeechSynthesizer speechSynthesis = new SpeechSynthesizer(); SpeechAudioFormatInfo audioformat = new SpeechAudioFormatInfo(16000, AudioBitsPerSample.Sixteen, Microsoft.Speech.AudioFormat.AudioChannel.Mono); speechSynthesis.SetOutputToAudioStream(speechSynthesisConnector, audioformat); //Load readme file as the source Console.WriteLine(); Console.Write("Please enter the source file => "); string filename = Console.ReadLine(); string msg = ""; try { StreamReader objReader = new StreamReader(filename); msg = objReader.ReadToEnd(); } catch (FileNotFoundException) { Console.WriteLine("\r\nFile doesn't exist."); ShutdownPlatform(); } //Start connector speechSynthesisConnector.Start(); Console.WriteLine("\r\nStreaming source file for 15 seconds."); //Start streaming from speech synthesis. speechSynthesis.SpeakAsync(new Prompt(msg)); //Allow the connector to stream 15 seconds by waiting for 15 seconds Thread.Sleep(15000); //Stop the connector speechSynthesisConnector.Stop(); Console.WriteLine("\r\nSpeech synthesis connector stopped."); //speech synthesis connector must be detached from the flow, otherwise if the connector is rooted, it will keep the flow in memory. speechSynthesisConnector.DetachFlow(); // Shutdown the platform ShutdownPlatform(); _waitForShutdownEventCompleted.WaitOne(); }
public void SpeakMessage(AudioVideoFlow flow, string message) { try { SpeechSynthesizer synth = new SpeechSynthesizer(); SpeechAudioFormatInfo formatInfo = new SpeechAudioFormatInfo(16000, AudioBitsPerSample.Sixteen, Microsoft.Speech.AudioFormat.AudioChannel.Mono); SpeechSynthesisConnector connector = new SpeechSynthesisConnector(); synth.SetOutputToAudioStream(connector.Stream, formatInfo); connector.AttachFlow(flow); connector.Start(); synth.SpeakCompleted += new EventHandler<SpeakCompletedEventArgs>( (sender, args) => { connector.Stop(); synth.Dispose(); }); synth.SpeakAsync(message); } catch (Exception ex) { Console.WriteLine("Failed to play the message. {0}", ex); } }
/// <summary> /// Default Constructor /// </summary> private SpeechStatementActivity() { m_speechSynthesisConnector = new SpeechSynthesisConnector(); m_speechSynthesizer = new SpeechSynthesizer(); m_audioformat = new SpeechAudioFormatInfo(16000, AudioBitsPerSample.Sixteen, Microsoft.Speech.AudioFormat.AudioChannel.Mono); }
public static Task <int> ReadAsync(this SpeechSynthesisConnector connector, byte[] buffer, int offset, int count) { return(Task <int> .Factory.FromAsync(connector.BeginRead, connector.EndRead, buffer, offset, count, null)); }
public static Task WriteAsync(this SpeechSynthesisConnector connector, byte[] buffer, int offset, int count) { return(Task.Factory.FromAsync(connector.BeginWrite, connector.EndWrite, buffer, offset, count, null)); }