private void button1_Click(object sender, EventArgs e) { if (richTextBox1.Text != "") { reader.Dispose(); reader = new SpeechSynthesizer(); if (comboBox1.Text == "Male") { reader.SelectVoiceByHints(VoiceGender.Male); } if (comboBox1.Text == "Female") { reader.SelectVoiceByHints(VoiceGender.Female); } reader.SpeakAsync(richTextBox1.Text); } else { MessageBox.Show("Please Enter Some Text First !!!"); } buttons = new Button[4] { button1, button2, button3, button4 }; i = 0; timer1.Start(); }
private void button1_Click(object sender, EventArgs e) { if (richTextBox1.Text != null) { reader.Dispose(); reader = new SpeechSynthesizer(); reader.Rate = trackBar2.Value; reader.Volume = trackBar1.Value; if (comboBox1.Text == "Male") { reader.SelectVoiceByHints(VoiceGender.Male); } if (comboBox1.Text == "Female") { reader.SelectVoiceByHints(VoiceGender.Female); } reader.SpeakAsync(richTextBox1.Text); } else { MessageBox.Show("Choose a lesson!"); } }
//SPEAK TEXT private void button1_Click(object sender, EventArgs e) { reader.Dispose(); if (textBox1.Text != "") { //get object of SpeechSynthesizer reader = new SpeechSynthesizer(); //check for installed voices ReadOnlyCollection <InstalledVoice> InstalledVoices = reader.GetInstalledVoices(); Console.Write(InstalledVoices); //reader.SetOutputToWaveFile("C:\\Users\\Leon\\Desktop\\op.wav"); //speak text in the file reader.SpeakAsync(textBox1.Text); label2.Text = "SPEAKING"; button2.Enabled = true; button4.Enabled = true; //check for event completion of speaking reader.SpeakCompleted += new EventHandler <SpeakCompletedEventArgs>(reader_SpeakCompleted); } else { MessageBox.Show("Please enter some text in the textbox", "Message", MessageBoxButtons.OK); } }
private void btnPlay_Click(object sender, EventArgs e) { if (btnPlay.Text == "Play") { if (tbxMessage.Text != "") { reader.Dispose(); reader = new SpeechSynthesizer(); reader.SpeakAsync(tbxMessage.Text); btnPlay.Text = "Stop"; btnPause.Enabled = true; } else { MessageBox.Show("Please Enter Some Text !!"); } } else { if (reader != null) { btnPause.Enabled = false; reader.Dispose(); btnPlay.Text = "Play"; } } }
//SPEAK TEXT private void button1_Click(object sender, EventArgs e) { if (reader != null) { reader.Dispose(); } reader = null; reader = new SpeechSynthesizer(); if (textBox1.Text != "") { reader = new SpeechSynthesizer(); reader.SpeakAsync(textBox1.Text); label2.Text = "SPEAKING"; button2.Enabled = true; button4.Enabled = true; reader.Volume = tbVolume.Value; reader.Rate = tbSpeed.Value; reader.SpeakCompleted += new EventHandler <SpeakCompletedEventArgs>(reader_SpeakCompleted); } else { MessageBox.Show("Please enter some text in the textbox", "Message", MessageBoxButtons.OK); } }
private void button1_Click(object sender, EventArgs e) { synth.Dispose(); //คำสั่งนี้เป็นการทำให้มันค่า ไม่ทำงาน = null if ((richTextBox1.Text != "") && (comboBox1.Text != "")) //แปลว่า ถ้าข้อความไม่ว่างจะไม่สามารถเล่นได้ { synth = new SpeechSynthesizer(); if (comboBox1.Text == "men") { synth.SelectVoiceByHints(VoiceGender.Male); //เลือกเสียงผู้ชาย synth.SelectVoice("Microsoft David Desktop"); //ใช้เสียงผู้ชาย } if (comboBox1.Text == "girls") { synth.SelectVoiceByHints(VoiceGender.Female); //เลือกเสียงผู้หญิง synth.SelectVoice("Microsoft Zira Desktop"); //ใช้เสียงผู้หญิง } Prompt textsound = new Prompt(richTextBox1.Text); synth.SpeakAsync(textsound); synth.Volume = vAluE; button1.Enabled = false; button2.Enabled = true; button3.Enabled = true; comboBox1.Enabled = false; vScrollBar1.Enabled = false; } }
private void readWord() { if (lblWord.Text != null && (lblWord.Text.ToUpper() != "WORD")) { if (textReader != null) { textReader.Dispose(); } textReader = new SpeechSynthesizer(); textReader.SelectVoiceByHints(VoiceGender.Male, VoiceAge.Senior); textReader.Volume = 100; textReader.Rate = -6; StringBuilder word = new StringBuilder(); char[] wordAsArray = lblWord.Text.ToCharArray(); for (int i = 0; i < wordAsArray.Length - 1; i++) { word.Append(wordAsArray[i]); word.Append(" "); } word.Append(wordAsArray[wordAsArray.Length - 1]); textReader.SpeakAsync(word.ToString() + "\t\t\t" + lblWord.Text); } }
public override async Task StopAsync() { await Log.ReportInfoFormatAsync(CancellationToken, "{0} stopped", Name); NotifyEntityChangeContext.ChangeNotifications <DeviceValue> .OnEntityUpdated -= SpeechPlugin_OnEntityUpdated; _synth.Dispose(); }
private void btncongon1_Enter(object sender, EventArgs e) { ; ltr.Dispose(); ltr = new SpeechSynthesizer(); ltr.SpeakAsync("Confirmar" + label1.Text); }
private void textBox3_Leave(object sender, EventArgs e) { if (textBox3.Text == "") { label5.Visible = false; textBox3.Text = "Email"; } String email = textBox3.Text; if (email.IndexOf('@') == -1 || email.IndexOf('.') == -1) { label13.Text = "Invalid Email"; label13.ForeColor = Color.White; label13.BackColor = Color.Red; ss.Dispose(); ss = new SpeechSynthesizer(); ss.SpeakAsync("Please enter a valid email address."); } else if (textBox3.Text == "Email") { label13.Text = null; } else { label13.Text = null; } }
private void btnPlay_Click(object sender, EventArgs e) { //Die Voice wählen try { Reader.SelectVoice(cmbLanguages.Text); } catch { MessageBox.Show("Wählen sie erst eine Voice/Sprache!"); return; } //Text in rtxText wird vorgelesen if (rtxText.Text != "") { Reader.Dispose(); Reader = new SpeechSynthesizer(); Reader.SpeakAsync(rtxText.Text); } if (rtxText.Text != "") { //Geschwindigkeit der Vorlesung Reader.Rate = trbSpeed.Value; //Lautstärke der Vorlesung Reader.Volume = trbVolume.Value; } }
public void speachThis(string text) { reader.Dispose(); reader = new SpeechSynthesizer(); reader.SelectVoiceByHints(VoiceGender.Male); reader.SpeakAsync(text); reader.SpeakCompleted += new EventHandler <SpeakCompletedEventArgs>(reader_SpeakCompleted); }
public void speak(string s) { read.Dispose(); read = new SpeechSynthesizer(); read.SelectVoiceByHints(VoiceGender.Female); read.Rate = 0; read.SpeakAsync(s); }
/// <summary> /// Function to stop the narration and still open another one /// </summary> public void RestartSpeak() { if (speechSynthesizer != null) { speechSynthesizer.Dispose(); speechSynthesizer = new SpeechSynthesizer(); } }
private void Btn_Start_Click(object sender, EventArgs e) { //On Start Button Click try { int delay; if (delay_Cbx.Text == "") { delay = 0; } else { //setting up delay delay = Convert.ToInt32(delay_Cbx.Text) * 1000; } System.Threading.Thread.Sleep(delay); speechSynthesizerObj.Dispose(); //clean cache if (richTextBox1.Text != "") { speechSynthesizerObj = new SpeechSynthesizer(); //voice module male/female selection if (voice_Cbx.Text == "Male") { speechSynthesizerObj.SelectVoiceByHints(VoiceGender.Male); } else if (voice_Cbx.Text == "Female") { speechSynthesizerObj.SelectVoiceByHints(VoiceGender.Female); } else { //error incase of invalid voice selection throw new Exception("Invalid Voice Type"); } //enabling pause,stop button. disabling start, delay, voice, save, output, clear, text box btn_Pause.Enabled = true; btn_Stop.Enabled = true; btn_Start.Enabled = false; delay_Cbx.Enabled = false; voice_Cbx.Enabled = false; btn_Save.Enabled = false; output_Cbx.Enabled = false; btn_Clear.Enabled = false; richTextBox1.Enabled = false; speechSynthesizerObj.SpeakAsync(richTextBox1.Text); //play text box audio speechSynthesizerObj.SpeakCompleted += new EventHandler <SpeakCompletedEventArgs>(SpeechComplete); } else { //if text box is empty. throw error throw new Exception("Container is Empty!"); } } catch (Exception ex) { //catch unexpected exception MessageBox.Show(ex.Message); } }
private void ReproducirAudioLlamadoTicket() { v_voz = new SpeechSynthesizer(); lblTicket.Text = v_ticket; lblPosicion.Text = v_posicion; if (lblTicket.Text != "") { //v_voz.SelectVoice("Vocalizer Expressive Angelica Harpo 22kHz"); if (v_tipo_ticket == 1) { v_voz.SelectVoice(Pro_Voz); v_voz.SetOutputToDefaultAudioDevice(); v_voz.Speak(v_primera_letra + "," + v_segunda_letra + "," + v_tercera_letra + v_cuarta_letra + ", " + v_quinta_letra + v_sexta_letra + ", " + Pro_Texto_Descriptivo + lblPosicion.Text); v_voz.Dispose(); } else { if (v_longitud_ticket > 5) { v_voz.SelectVoice(Pro_Voz); v_voz.SetOutputToDefaultAudioDevice(); v_voz.Speak(v_primera_letra + "," + v_segunda_letra + "," + v_tercera_letra + v_cuarta_letra + ", " + v_quinta_letra + v_sexta_letra + ", " + Pro_Texto_Descriptivo + lblPosicion.Text); v_voz.Dispose(); } else { v_voz.SelectVoice(Pro_Voz); v_voz.SetOutputToDefaultAudioDevice(); v_voz.Speak(v_primera_letra + "," + v_segunda_letra + ", " + v_tercera_letra + ", " + v_cuarta_letra + v_quinta_letra + ", " + Pro_Texto_Descriptivo + lblPosicion.Text); v_voz.Dispose(); } } } v_primera_letra = null; v_segunda_letra = null; v_tercera_letra = null; v_cuarta_letra = null; v_quinta_letra = null; v_sexta_letra = null; v_ticket = null; v_posicion = null; }
private void button2_Click(object sender, EventArgs e) { if (editor.Text != "") { reader.Dispose(); reader = new SpeechSynthesizer(); reader.SpeakAsync(editor.Text); } }
private void btnPrevious_Click(object sender, EventArgs e) { speaker.Dispose(); Home hme = new Home(); this.Hide(); hme.FormClosed += (s, args) => this.Close(); hme.ShowDialog(); }
private void btnVoice_Click(object sender, EventArgs e) { if (tbFrom.Text != string.Empty) { Synthesizer.Dispose(); Synthesizer = new SpeechSynthesizer(); Synthesizer.SpeakAsync(tbFrom.Text); } }
private void btnPrevious_Click(object sender, EventArgs e) { speaker.Dispose(); Step1 stp1 = new Step1(); this.Hide(); stp1.FormClosed += (s, args) => this.Close(); stp1.ShowDialog(); }
private void DisplayTestResults() { _speechSynthesizer.Dispose(); List <Question> questions = _wordBank.WordMap.ToQuestionList(); var resultsForm = new Results(questions); resultsForm.ShowDialog(); }
private void btnNext_Click(object sender, EventArgs e) { speaker.Dispose(); Settings.step2c = DateTime.Now; Step3 stp3 = new Step3(); this.Hide(); stp3.FormClosed += (s, args) => this.Close(); stp3.ShowDialog(); }
private void button4_Click(object sender, EventArgs e) { if (computer != null) { computer.Dispose(); label3.Text = "IDLE"; button2.Enabled = false; button3.Enabled = false; button4.Enabled = false; } }
private void btnspeak_Click(object sender, EventArgs e) { speech.Dispose(); if (richTextBox1.Text != "") { speech = new SpeechSynthesizer(); speech.SpeakAsync(richTextBox1.Text); btnpause.Enabled = true; btnstop.Enabled = true; } }
private void buttonStop_Click(object sender, EventArgs e) { if (reader != null) { reader.Dispose(); buttonStart.Enabled = true; buttonPause.Enabled = false; buttonStop.Enabled = false; buttonResume.Enabled = false; } }
private void ExitSpeakAloud() { _speakAloudInfo.UnregisterHotKeys(this.Handle); UnregisterClipboardViewer(); if (_speachSyn != null) { _speachSyn.Dispose(); _speachSyn = null; } _applicationExitInitiated = true; Application.Exit(); }
/// <summary> /// 测试声音 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void button1_Click(object sender, EventArgs e) { AipSdkHelp.Speech("aaaaaaaaaaaaa", 5, 7, 4); if (synth != null) { synth.Dispose(); } synth = new SpeechSynthesizer(); synth.SpeakAsyncCancelAll(); synth.SpeakAsync("aaaaaaaaaaa"); }
private void button3_Click(object sender, EventArgs e) { an = ""; for (int j = 0; j < listbox1length; j++) { listBox1.SelectedIndex = j; an += listBox1.SelectedItem.ToString(); } reader2.Dispose(); reader2 = new SpeechSynthesizer(); reader2.SpeakAsync(an); }
private void BtnSpeak_Click(object sender, EventArgs e) { //Disposes the SpeechSynthesizer object speechSynthesizerObj.Dispose(); if (richTextBox1.Text != "") { speechSynthesizerObj = new SpeechSynthesizer(); //Asynchronously speaks the contents present in RichTextBox1 speechSynthesizerObj.SpeakAsync(richTextBox1.Text); BtnPause.Enabled = true; BtnStop.Enabled = true; } }
private void button1_Click(object sender, EventArgs e) { if (richTextBox1.Text != "") { reader.Dispose(); reader = new SpeechSynthesizer(); reader.SpeakAsync(richTextBox1.Text); } else { MessageBox.Show("Please enter some text first!!"); } }
public void SpeakMessage(AudioVideoFlow flow, string message) { try { SpeechSynthesizer synth = new SpeechSynthesizer(); SpeechAudioFormatInfo formatInfo = new SpeechAudioFormatInfo(16000, AudioBitsPerSample.Sixteen, Microsoft.Speech.AudioFormat.AudioChannel.Mono); SpeechSynthesisConnector connector = new SpeechSynthesisConnector(); synth.SetOutputToAudioStream(connector.Stream, formatInfo); connector.AttachFlow(flow); connector.Start(); synth.SpeakCompleted += new EventHandler<SpeakCompletedEventArgs>( (sender, args) => { connector.Stop(); synth.Dispose(); }); synth.SpeakAsync(message); } catch (Exception ex) { Console.WriteLine("Failed to play the message. {0}", ex); } }
static void Main(string[] args) { string voiceFileName = args[0]; string voiceFileNamemp3 = voiceFileName.Replace(".wav", ".mp3"); string voiceFilePath = args[1]; string toBeVoiced = args[2]; int rate = int.Parse(args[3]); ; string voice = args[4]; voiceFileName = voiceFileName.Replace("~", " "); voiceFilePath = voiceFilePath.Replace("~", " "); toBeVoiced = toBeVoiced.Replace("~", " "); voice = voice.Replace("~", " "); var reader = new SpeechSynthesizer(); reader.Rate = rate; reader.SelectVoice(voice); try { reader.SetOutputToWaveFile(voiceFilePath + voiceFileName, new SpeechAudioFormatInfo(16025, AudioBitsPerSample.Sixteen, AudioChannel.Mono)); reader.Speak(toBeVoiced); reader.Dispose(); WaveToMP3(voiceFilePath + voiceFileName, voiceFilePath + voiceFileNamemp3); } catch (Exception er) { string s1 = er.Message; } }
public async void ReadOutLoud() { IEnumerable<VoiceInformation> voices = from voice in InstalledVoices.All where voice.Language == "en-US" && voice.Gender == VoiceGender.Female select voice; // Set the voice as identified by the query. //try //{ // if(voices.Count() > 0) // synth.SetVoice(voices.ElementAt(0)); //} //catch //{ //} ICategoryRepository categoryRepository = new XmlCategoryRepository(); foreach (string id in categoryIds) { CategoryData category = categoryRepository.GetCategoryById(Convert.ToInt32(id)); //this.CategoryName.Text = category.Name; Uri imageUri = new Uri(category.Image, UriKind.Relative); var bitmap = new BitmapImage(imageUri); this.CategoryImage.Source = bitmap; RssParser rssParser = new RssParser(); Uri feedUri = new Uri(category.Feed, UriKind.Relative); StreamResourceInfo sri = Application.GetResourceStream(feedUri); rssParser.initializeLocal(sri.Stream); List<NewsItem> topStories = rssParser.getTopStories(); foreach (NewsItem newsItem in topStories) { try { synth = new SpeechSynthesizer(); if (voices.Count() > 0) synth.SetVoice(voices.ElementAt(0)); if (String.IsNullOrEmpty(newsItem.description)) continue; this.CategoryName.Text = newsItem.title; this.SummaryBlock.Text = newsItem.description; string textToRead = newsItem.title + ".\n" + newsItem.source + " reports, " + newsItem.description + "\n\n"; await synth.SpeakTextAsync(textToRead); synth.Dispose(); } catch { } } } }
static void Main(string[] args) { string voiceFileName = args[0]; string voiceFileNamemp3 = voiceFileName.Replace(".wav", ".mp3"); string voiceFilePath = args[1]; string toBeVoiced = args[2]; int rate = int.Parse(args[3]); ; string voice = args[4]; voiceFileName = voiceFileName.Replace("~", " "); voiceFilePath = voiceFilePath.Replace("~", " "); toBeVoiced = toBeVoiced.Replace("~", " "); voice = voice.Replace("~", " "); // Initialize a new instance of the SpeechSynthesizer. using (SpeechSynthesizer synth = new SpeechSynthesizer()) { // // Output information about all of the installed voices. // Console.WriteLine("Installed voices -"); foreach (InstalledVoice voice2 in synth.GetInstalledVoices()) { VoiceInfo info = voice2.VoiceInfo; Console.WriteLine(" Voice Name: " + info.Name); } } var reader = new SpeechSynthesizer(); reader.Rate = rate; reader.SelectVoice(voice); try { reader.SetOutputToWaveFile(voiceFilePath + voiceFileName, new SpeechAudioFormatInfo(16025, AudioBitsPerSample.Sixteen, AudioChannel.Mono)); reader.Speak(toBeVoiced); reader.Dispose(); WaveToMP3(voiceFilePath + voiceFileName, voiceFilePath + voiceFileNamemp3); } catch (Exception er) { string s1 = er.Message; } }
private static async void _Speak(string text) { MediaElement mediaElement = new MediaElement(); SpeechSynthesizer synth = new SpeechSynthesizer(); foreach (VoiceInformation voice in SpeechSynthesizer.AllVoices) { Debug.WriteLine(voice.DisplayName + ", " + voice.Description); } // Initialize a new instance of the SpeechSynthesizer. SpeechSynthesisStream stream = await synth.SynthesizeTextToStreamAsync(text); // Send the stream to the media object. mediaElement.SetSource(stream, stream.ContentType); mediaElement.Play(); mediaElement.Stop(); synth.Dispose(); }
public static void Main(string[] args) { SpeechSynthesizer s = new SpeechSynthesizer(); string lang = args[0].ToLower(); if (lang.Equals("ja")) s.SelectVoice("Microsoft Server Speech Text to Speech Voice (ja-JP, Haruka)"); else if (lang.Equals("en")) s.SelectVoice("Microsoft Server Speech Text to Speech Voice (en-US, ZiraPro)"); else if (lang.Equals("ko")) s.SelectVoice("Microsoft Server Speech Text to Speech Voice (ko-KR, Heami)"); else if (lang.Equals("zh")) s.SelectVoice("Microsoft Server Speech Text to Speech Voice (zh-CN, HuiHui)"); else Environment.Exit(1); string textfile = args[1]; string text = System.IO.File.ReadAllText(textfile, System.Text.Encoding.UTF8); string wavefile = args[2]; s.Volume = 100; s.SetOutputToWaveFile(wavefile, new SpeechAudioFormatInfo(48000, AudioBitsPerSample.Sixteen, AudioChannel.Stereo)); s.Speak(text); s.Dispose(); }
private void button1_Click(object sender, EventArgs e) { if (string.IsNullOrEmpty(textBox1.Text)) { MessageBox.Show(this, "Write some text to start.", "Empty text", MessageBoxButtons.OK, MessageBoxIcon.Asterisk); return; } string[] texts = textBox1.Text.Split('\n'); progressBar1.Value = 0; progressBar1.Maximum = texts.Length; for (int i = 0; i < texts.Length; i++) { var reader = new SpeechSynthesizer(); //reader.SpeakCompleted += new EventHandler<SpeakCompletedEventArgs>(synth_SpeakCompleted); reader.Rate = trackRate.Value; reader.Volume = trackVolume.Value; reader.SelectVoice(((VoiceInfo)cmbVoice.SelectedItem).Name); var bits = radio8Bits.Checked ? AudioBitsPerSample.Eight : AudioBitsPerSample.Sixteen; var channel = radioChannelMono.Checked ? AudioChannel.Mono : AudioChannel.Stereo; var format = new SpeechAudioFormatInfo(int.Parse(cmbSamples.Text), bits, channel); string filePath = Directory.GetCurrentDirectory() + @"\Output\"; if (!Directory.Exists(filePath)) { Directory.CreateDirectory(filePath); } reader.SetOutputToWaveFile(Directory.GetCurrentDirectory() + @"\Output\" + GetAudioFileName(texts[i]), format); reader.Speak(GetAudioText(texts[i])); progressBar1.Value++; reader.Dispose(); } MessageBox.Show(this, "All done. Check .wav files on 'Output' folder.", "Finish", MessageBoxButtons.OK, MessageBoxIcon.Information); }
public void Generate(List<string> p_aryNames, List<string> p_aryLines, string p_strPath, int p_nRate, AudioBitsPerSample p_samples, AudioChannel p_channels) { SpeechAudioFormatInfo t_audioFormatInfo = new SpeechAudioFormatInfo(p_nRate, p_samples, p_channels); SpeechSynthesizer t_synth = new SpeechSynthesizer(); progressBar1.Maximum = p_aryLines.Count; progressBar1.Step = 1; label1.Text = progressBar1.Step + "/" + p_aryNames.Count; for (int t_i = 0; t_i < p_aryNames.Count; ++t_i) { t_synth.SetOutputToWaveFile(p_strPath + "\\" + p_aryNames[t_i] + ".wav"); t_synth.Speak(p_aryLines[t_i]); label1.Text = (t_i + 1) + "/" + p_aryLines.Count; progressBar1.PerformStep(); progressBar1.Refresh(); } t_synth.Dispose(); Close(); }
protected override void OnLoad(EventArgs e) { Visible = false; ShowInTaskbar = false; base.OnLoad(e); /* * Get all installed voices * */ var voices = speech.GetInstalledVoices(); string voice = ""; foreach (InstalledVoice v in voices) { if (v.Enabled) //voice = v.VoiceInfo.Name; Console.WriteLine(v.VoiceInfo.Name); } queuetimer = new System.Timers.Timer(250); queuetimer.Elapsed += (object sender, ElapsedEventArgs ev) => { TTSRequest r; if (Queue.TryDequeue(out r)) { Console.WriteLine("dequeing off of concurrent queue..."); if (r.Interrupt) { // stop current TTS if (IsSpeaking) { //speech.StopSpeaking(); } if (IsSounding) { //sound.Stop(); if(sound.PlaybackState == PlaybackState.Playing) { sound.Stop(); } } // clear queue SpeechQueue.Clear(); } if(!r.Reset) { SpeechQueue.Enqueue(r); } RequestCount++; } var eventdata = new Hashtable(); eventdata.Add("ProcessedRequests", RequestCount); eventdata.Add("QueuedRequests", SpeechQueue.Count); eventdata.Add("IsSpeaking", IsSounding); InstrumentationEvent blam = new InstrumentationEvent(); blam.EventName = "status"; blam.Data = eventdata; NotifyGui(blam.EventMessage()); }; // when this timer fires, it will pull off of the speech queue and speak it // the long delay also adds a little pause between tts requests. speechtimer = new System.Timers.Timer(250); speechtimer.Elapsed += (object sender, ElapsedEventArgs ev) => { if (IsSpeaking.Equals(false)) { if (SpeechQueue.Count > 0) { TTSRequest r = SpeechQueue.Dequeue(); Console.WriteLine("dequeuing off of speech queue"); IsSpeaking = true; speechtimer.Enabled = false; //speech.SpeakAsync(r.Text); //using (speech = new SpeechSynthesizer()) { speech = new SpeechSynthesizer(); speech.SpeakCompleted += speech_SpeakCompleted; format = new SpeechAudioFormatInfo(EncodingFormat.ALaw, 8000, 8, 1, 1, 2, null); //format = new SpeechAudioFormatInfo(11025, AudioBitsPerSample.Sixteen, AudioChannel.Mono); // var si = speech.GetType().GetMethod("SetOutputStream", BindingFlags.Instance | BindingFlags.NonPublic); stream = new MemoryStream(); //si.Invoke(speech, new object[] { stream, format, true, true }); //speech.SetOutputToWaveStream(stream); speech.SetOutputToAudioStream(stream, format); speech.SelectVoice(config.getVoice (r.Language, r.Voice)); int rate = (r.Speed * 2 - 10); Console.WriteLine(rate); try { speech.Rate = rate; } catch (ArgumentOutOfRangeException ex) { speech.Rate = 0; } speech.SpeakAsync(r.Text); //} synthesis.WaitOne(); speech.SpeakCompleted -= speech_SpeakCompleted; speech.SetOutputToNull(); speech.Dispose(); //IsSpeaking = false; IsSounding = true; stream.Position = 0; //WaveFormat.CreateCustomFormat(WaveFormatEncoding.WmaVoice9, 11025, 1, 16000, 2, 16) using(RawSourceWaveStream reader = new RawSourceWaveStream(stream, WaveFormat.CreateALawFormat(8000, 1))) { WaveStream ws = WaveFormatConversionStream.CreatePcmStream(reader); //var waveProvider = new MultiplexingWaveProvider(new IWaveProvider[] { ws }, 4); //waveProvider.ConnectInputToOutput(0, 3); sound = new WaveOutEvent(); // set output device *before* init Console.WriteLine("Output Device: " + OutputDeviceId); sound.DeviceNumber = OutputDeviceId; sound.Init(ws); //sound.Init(waveProvider); sound.PlaybackStopped += output_PlaybackStopped; // Console.WriteLine("playing here " + ws.Length); sound.Play(); } playback.WaitOne(); //IsSounding = false; speechtimer.Enabled = true; } } }; queuetimer.Enabled = true; queuetimer.Start(); speechtimer.Enabled = true; speechtimer.Start(); InitHTTPServer(); }
private async void SpeekCurrentRound() { SpeechSynthesizer synth = new SpeechSynthesizer(); await synth.SpeakTextAsync(currentRound.Word); synth.Dispose(); }
private static void CreateVoiceFile(VoicingParameters voicingParameters, string toBeVoiced,string dateOfApplication,string timeOfApplication) { string fileName = string.Empty; string mp3FileName = string.Empty; SpeechSynthesizer reader; string sequence = string.Empty; string liveOrVoice = string.Empty; fileName = Common.GetFileNameFromDateTimeString() + ".wav"; //Create voice file //Save voice file //Insert live into voicing table //Insert vouce mail into voicing table reader = new SpeechSynthesizer(); // reader. reader.Volume = 100; reader.Rate = voicingParameters.Rate; reader.SelectVoice(voicingParameters.VoiceName); //reader.Rate = -2; //reader.SelectVoice("Microsoft Zira Desktop"); try { //reader.SetOutputToWaveFile(voiceFilePath + fileName, new SpeechAudioFormatInfo(11025, AudioBitsPerSample.Sixteen, AudioChannel.Mono)); //reader.SetOutputToWaveFile(voicingParameters.VoiceFilePath + fileName, new SpeechAudioFormatInfo(16025, AudioBitsPerSample.Sixteen, AudioChannel.Mono)); reader.SetOutputToWaveFile(voicingParameters.VoiceFilePath + fileName, new SpeechAudioFormatInfo(48000, AudioBitsPerSample.Sixteen,AudioChannel.Stereo)); //******************************************************************************** //used for testing //reader.SetOutputToWaveFile("\\\\1lake-reminder2\\c\\Waldenltd\\voicetest\\" + department + ".wav", new SpeechAudioFormatInfo(11025, AudioBitsPerSample.Eight, AudioChannel.Mono)); //**************************************************************************************** reader.Speak(toBeVoiced); reader.Dispose(); mp3FileName = fileName.Replace(".wav",".mp3"); //ConvertWavFileTOMP3.WaveToMP3(voicingParameters.VoiceFilePath + fileName // , voicingParameters.VoiceFilePath + mp3FileName); File.Delete(voicingParameters.VoiceFilePath + fileName); } catch (Exception er) { string s1 = er.Message; } //insert live sequence = "1"; liveOrVoice = "L"; HelpDeskTickets.Model.Voicing voicing = new Model.Voicing(); PatientRecords patientRecords = new PatientRecords(); try { voicing.NameID = int.Parse(voicingParameters.NameID); voicing.TelephoneID = int.Parse(voicingParameters.TelephoneID); voicing.CompanyID = int.Parse(voicingParameters.CompanyId); voicing.AppDate = dateOfApplication; voicing.AppTime = timeOfApplication; voicing.FileName = mp3FileName; voicing.Sequence = int.Parse(sequence); voicing.pPrevious = "Null"; voicing.nNext = "Null"; voicing.LiveorVoice = liveOrVoice; patientRecords.InsertVoiceRecord(voicing); } catch (Exception er) { //Log.WhichProgram = "Labcorp Interface"; Log.LogMessage(er.ToString()); } //insert voice sequence = "1"; liveOrVoice = "V"; try { voicing = new Model.Voicing(); voicing.NameID = int.Parse(voicingParameters.NameID); voicing.TelephoneID = int.Parse(voicingParameters.TelephoneID); voicing.CompanyID = int.Parse(voicingParameters.CompanyId); voicing.AppDate = dateOfApplication; voicing.AppTime = timeOfApplication; voicing.FileName = mp3FileName; voicing.Sequence = int.Parse(sequence); voicing.pPrevious = "Null"; voicing.nNext = "Null"; voicing.LiveorVoice = liveOrVoice; patientRecords.InsertVoiceRecord(voicing); } catch (Exception er) { //Log.WhichProgram = "Labcorp Interface"; Log.LogMessage(er.ToString()); } }
private void Form1_Load(object sender, EventArgs e) { IList<VoiceInfo> voiceInfos = new List<VoiceInfo>(); var reader = new SpeechSynthesizer(); var installedVoices = reader.GetInstalledVoices(); if (installedVoices.Count == 0) { MessageBox.Show(this, "Your system don't have a 'Text to Speech' to make this work. Try install one for continue.", "Finish", MessageBoxButtons.OK, MessageBoxIcon.Information); this.Close(); } else { foreach (InstalledVoice voice in installedVoices) { voiceInfos.Add(voice.VoiceInfo); } cmbVoice.DataSource = voiceInfos; cmbVoice.DisplayMember = "Name"; cmbVoice.ValueMember = "Id"; } reader.Dispose(); }
public static void Main(string[] args) { Console.Title = "Voice Testing"; SpeechSynthesizer synth = new SpeechSynthesizer(); synth.SelectVoiceByHints(VoiceGender.Male, VoiceAge.Child, 3); foreach (var voice in synth.GetInstalledVoices()) { Console.WriteLine("Voice: ID[{0}] Name[{1}] Gender[{2}] Age[{3}]", voice.VoiceInfo.Id, voice.VoiceInfo.Name, voice.VoiceInfo.Gender.ToString(), Enum.GetName(voice.VoiceInfo.Age.GetType(), voice.VoiceInfo.Age)); foreach (KeyValuePair<String, String> kvp in voice.VoiceInfo.AdditionalInfo) { Console.WriteLine("\tKey: {0} - Value: {1}", kvp.Key, kvp.Value); } } Console.Write("Press any key to continue..."); Console.ReadKey(true); Console.Clear(); List<String> names = new List<String>() { "Janeway", "Gunnett", "Loveless", "Archer", "Picard", "Riker", "Anderson", "Troi", "Crusher", "O'Neil", "Data", "Worf", "Jackson", "Carter", /*"Parrett", "Bartling", "Milano", "Corley", "Mayr", "Steeves", "Durbin", "Buckholz", "Mercure", "Davidson", "Legree", "Hairston", "Hoge", "Hayman", "Wales", "Shirey", "Leist", "Lennon", "Coon", "Killgore", "Phalen", "Collinsworth", "Simard", "Plude", "Filson", "Vise", "Towner", "Gosier", "Ehrmann", "Barnum", "Searight", "Dumont", "Dicarlo", "Vallery", "Cullison", "Parlier", "Negron", "Blumer", "Karcher", "Posey", "Vantassell", "Storlie", "Rott", "Iman", "Tibbitts", "Dressel", "Bochenek", "Bath", "Pillot", "Brousseau",*/ }; /*String[] names = { "Janeway", "Gunnett", "Loveless", "Archer", "Picard", "Riker", "Anderson", "Troi", "Crusher", "O'Neil", "Data", "Worf", "Jackson", "Carter" };*/ foreach (var item in names) { String passwd2 = Security.GenerateVoicePassword(item, 3); Console.WriteLine("Generated Voice Password: {0}", passwd2); synth.Speak(passwd2); } Console.Write("Press any key to continue..."); Console.ReadKey(true); synth.Dispose(); }
void sre_SpeechRecognized(object sender, SpeechRecognizedEventArgs e) { RaiseLogEvent("Recognized " + e.Result.Text); if (e.Result.Text == "speak") { SpeechSynthesizer synth = new SpeechSynthesizer(); synth.SetOutputToDefaultAudioDevice(); synth.Speak("Lorem ipsum dolor sit am."); synth.Dispose(); synth = null; } }
public string Speak(string sText, bool bToFile) { SpeechSynthesizer oSpeaker = new SpeechSynthesizer(); oSpeaker.Rate = 1; oSpeaker.SelectVoice("Microsoft Anna"); oSpeaker.Volume = 100; if (bToFile) { oSpeaker.SetOutputToWaveFile("SoundByte.wav"); } oSpeaker.Speak(sText); string msg; // Initialize a new instance of the SpeechSynthesizer. using (SpeechSynthesizer synth = new SpeechSynthesizer()) { // Get information about supported audio formats. string AudioFormats = ""; foreach (SpeechAudioFormatInfo fmt in synth.Voice.SupportedAudioFormats) { AudioFormats += String.Format("{0}\n", fmt.EncodingFormat.ToString()); } // Write information about the voice to the console. msg = "Name: " + synth.Voice.Name + "\n"; msg += "Culture: " + synth.Voice.Culture + "\n"; msg += " Age: " + synth.Voice.Age + "\n"; msg += " Gender: " + synth.Voice.Gender + "\n"; msg += " Description: " + synth.Voice.Description + "\n"; msg += " ID: " + synth.Voice.Id + "\n"; if (synth.Voice.SupportedAudioFormats.Count != 0) { msg += " Audio formats: " + AudioFormats + "\n"; } else { msg += " No supported audio formats found" + "\n"; } // Get additional information about the voice. foreach (string key in synth.Voice.AdditionalInfo.Keys) { msg += String.Format(" {0}: {1}\n",key, synth.Voice.AdditionalInfo[key]); } msg += " Additional Info - "; } oSpeaker.Dispose(); oSpeaker = null; return msg; }
/// <summary> /// Starts live video streaming, Pauses it to detect faces, Resumes it /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void btnStart_Click(object sender, EventArgs e) { SpeechSynthesizer synth = new SpeechSynthesizer(); synth.Speak("Welcome to the magic mirror"); synth.Dispose(); //MessageBox.Show("Câmera selecionada"); if (capture != null) { if (btnStart.Text == "Extrair Rosto") { //if camera is getting frames then pause the capture and set button Text to // "Resume" for resuming capture btnStart.Text = "Resumir Imagem"; // //Pause the live streaming video Application.Idle -= ProcessFrame; //Call face detection DetectFaces(); } else { //if camera is NOT getting frames then start the capture and set button // Text to "Pause" for pausing capture btnStart.Text = "Extrair Rosto"; Application.Idle += ProcessFrame; } } }
private async void Speech(string text) { talk = new SpeechSynthesizer(); try { await talk.SpeakTextAsync(text); talk.Dispose(); } catch (Exception) { MessageBox.Show("Error when trying to use Text to speech", "Error", MessageBoxButton.OK); talk.Dispose(); } }
private void Play(object text) { SpeechSynthesizer synth = new SpeechSynthesizer(); PromptBuilder builder = new PromptBuilder(new System.Globalization.CultureInfo("zh-CN")); synth.SetOutputToDefaultAudioDevice(); if (SelectVoice.Items.Count > 0) { synth.SelectVoice(SelectVoice.SelectedItem.ToString()); //语音选择 } synth.Rate = Convert.ToInt32(voiceRate.Text);//语速 builder.AppendText(text.ToString()); synth.Speak(builder); synth.Dispose(); }
private static Stream textToAudioStream(string textToConvert, string voiceName, int voiceRate, BackgroundWorker worker) { SpeechSynthesizer synth = new SpeechSynthesizer(); synth.SpeakProgress += delegate(object sender, SpeakProgressEventArgs e) { synth_SpeakProgress(sender, e, worker, textToConvert.Length); }; Stream audioStream = new MemoryStream(); synth.SetOutputToWaveStream(audioStream); // set to wave file stream synth.SelectVoice(voiceName); synth.Rate = voiceRate; synth.Speak(textToConvert); // send data to stream synth.Dispose(); audioStream.Position = 0; // reset position for audio stream so it can be read return audioStream; }
public static void Say(string str, int rate) { SpeechSynthesizer synthesizer = new SpeechSynthesizer(); synthesizer.Volume = 100; synthesizer.Rate = rate; // -10...10 synthesizer.Speak(str); if (synthesizer.State != SynthesizerState.Speaking) { synthesizer.Dispose(); } }
public void Show(GlobalManager oldgm) { gm = oldgm; gm.ChangeTransportWindow("PlayNow"); //set team name and shadow uiTeamAName.Text = gm.ms.TeamAName; uiTeamBName.Text = gm.ms.TeamBName; string myself = ""; foreach (Player item in gm.gamePlayerList) { if (item.PlayerName == gm.gameSettings.playerName) { myself = item.PlayerGroupName; break; } } if (myself == gm.ms.TeamAName) { uiTeamAShadow.Color = Color.FromArgb(255, 0, 0, 255); uiTeamBShadow.Color = Color.FromArgb(255, 255, 0, 0); } else { uiTeamAShadow.Color = Color.FromArgb(255, 255, 0, 0); uiTeamBShadow.Color = Color.FromArgb(255, 0, 0, 255); } //以talk模式拦截-拦过了,继承拦截 //gm.kh.SetHook(false); gm.PlayNow_inputPlayerData = new Action<StringGroup>(inputPlayerData); gm.PlayNow_playerDied = new Action<string>(playerDied); gm.PlayNow_playerSuccess = new Action<string>(playerSuccess); gm.PlayNow_teamDied = new Action<string>(teamDied); gm.PlayNow_turnToNewWindow = new Action(turnToNewWindow); gm.PlayNow_newMessage = new Action<string>(newMessage); //show player var playerSplit = from item in gm.gamePlayerList where item.PlayerGroupName != "" group item by item.PlayerGroupName; foreach (var item in playerSplit) { if (item.Key == gm.ms.TeamAName) { uiTeamAList.ItemsSource = item.ToList<Player>(); } else { uiTeamBList.ItemsSource = item.ToList<Player>(); } } stopTurnIn = false; stopShowPrize = false; talkList = new List<TalkListItem>(); prizeLine = new Queue<PrizeStructure>(); this.Show(); //数据传输的操作 Task.Run(async () => { SpeechSynthesizer speakStart = new SpeechSynthesizer(); speakStart.SelectVoiceByHints(VoiceGender.Female, VoiceAge.Adult); uiTimer.Dispatcher.Invoke(() => { uiTimer.Text = "20"; }); //显示欢迎来到 uiNoticeText.Dispatcher.Invoke(() => { uiNoticeText.Text = "欢迎来到Ballance的世界"; }); speakStart.SpeakAsync("欢迎来到Ballance的世界"); for (int i = 0; i < 5; i++) { Thread.Sleep(1000); uiTimer.Dispatcher.Invoke(() => { uiTimer.Text = (int.Parse(uiTimer.Text) - 1).ToString(); }); } //此时=15 //显示负责的关卡 foreach (Player item in gm.gamePlayerList) { if (item.PlayerName == gm.gameSettings.playerName) { uiNoticeText.Dispatcher.Invoke(() => { uiNoticeText.Text = "你的任务:完成" + item.DutyUnitToString + "小节,请在这些小节好好表现"; }); speakStart.SpeakAsync("鼓足干劲,力争上游"); } } for (int i = 0; i < 10; i++) { Thread.Sleep(1000); uiTimer.Dispatcher.Invoke(() => { uiTimer.Text = (int.Parse(uiTimer.Text) - 1).ToString(); }); } //此时=5 //准备开始 uiNoticeText.Dispatcher.Invoke(() => { uiNoticeText.Text = "请就绪"; }); speakStart.SpeakAsync("还有五秒开始游戏"); for (int i = 0; i < 5; i++) { Thread.Sleep(1000); uiTimer.Dispatcher.Invoke(() => { uiTimer.Text = (int.Parse(uiTimer.Text) - 1).ToString(); }); } //隐藏 uiTimerContainer.Dispatcher.Invoke(() => { uiTimerContainer.Visibility = Visibility.Collapsed; }); //全军出击 uiNoticeText.Dispatcher.Invoke(() => { uiNoticeText.Text = ""; }); uiNotice.Dispatcher.Invoke(() => { uiNotice.Visibility = Visibility.Hidden; }); speakStart.SpeakAsync("全军出击"); //提交循环 long previousMark = 1000; int similarityCount = 20; //是否达到极限溢出了 bool overCount = false; while (true) { if (stopTurnIn == true) { break; } long mark = gm.markMonitor.Mode(await gm.markMonitor.ReadDataAsync()); long life = gm.lifeMonitor.Mode(await gm.lifeMonitor.ReadDataAsync()); long unit = gm.unitMonitor.Mode(await gm.unitMonitor.ReadDataAsync()); gm.dataGiveIn.SendData(CombineAndSplitSign.Combine(BallanceOnline.Const.ClientAndServerSign.Client, BallanceOnline.Const.SocketSign.GameDataTurnIn, mark.ToString() + "," + life.ToString() + "," + unit.ToString())); //如果有相同情况出现,提醒警告 if (overCount == false) { if (previousMark == mark) { similarityCount--; //检查超限 if (similarityCount < 0) { uiTimer.Dispatcher.Invoke(() => { uiTimer.Text = ""; }); uiTimerContainer.Dispatcher.Invoke(() => { uiTimerContainer.Visibility = Visibility.Collapsed; }); overCount = true; } else { uiTimer.Dispatcher.Invoke(() => { uiTimer.Text = similarityCount.ToString(); }); uiTimerContainer.Dispatcher.Invoke(() => { uiTimerContainer.Visibility = Visibility.Visible; }); } } else { if (similarityCount != 20) { similarityCount = 20; uiTimer.Dispatcher.Invoke(() => { uiTimer.Text = ""; }); uiTimerContainer.Dispatcher.Invoke(() => { uiTimerContainer.Visibility = Visibility.Collapsed; }); } } } previousMark = mark; Thread.Sleep(1000); } speakStart.Dispose(); //结束 }); //显示成就的操作 Task.Run(() => { while (true) { if (stopShowPrize == true) { break; } if (prizeLine.Count == 0) { Thread.Sleep(500); continue; } //展示 var cache = prizeLine.Dequeue(); string sayWord = ""; string showWord = cache.PlayerName + " "; switch (cache.PrizeName) { case GamePrize.FirstBlood: showWord += GamePrize.FirstBloodShow; sayWord = GamePrize.FirstBloodSpeech; break; case GamePrize.Reborn: showWord += GamePrize.RebornShow; sayWord = GamePrize.RebornSpeech; break; case GamePrize.Silence: showWord += GamePrize.SilenceShow; sayWord = GamePrize.SilenceSpeech; break; case GamePrize.Time: showWord += GamePrize.TimeShow; sayWord = GamePrize.TimeSpeech; break; case GamePrize.Ace: showWord += GamePrize.AceShow; sayWord = GamePrize.AceSpeech; break; } //show uiNoticeText.Dispatcher.Invoke(() => { uiNoticeText.Text = showWord; }); uiNotice.Dispatcher.Invoke(() => { uiNotice.Visibility = Visibility.Visible; }); //say SpeechSynthesizer speak = new SpeechSynthesizer(); speak.SelectVoiceByHints(VoiceGender.Female, VoiceAge.Adult); speak.Speak(sayWord); speak.Dispose(); //hide uiNotice.Dispatcher.Invoke(() => { uiNotice.Visibility = Visibility.Visible; }); } }); }
public void TextToSpeech(string text) { this.Log.Debug("Creating wav file of: " + text); SpeechAudioFormatInfo synthFormat = new SpeechAudioFormatInfo(44100, AudioBitsPerSample.Sixteen, AudioChannel.Stereo); SpeechSynthesizer speechEngine = new SpeechSynthesizer(); this.Log.Debug("setting output: " + ttsSave); speechEngine.SetOutputToWaveFile(ttsSave, synthFormat); this.Log.Debug("speaking"); speechEngine.Speak(text); speechEngine.Dispose(); }
private static void Speak(string msg) { SpeechSynthesizer synthesizer = new SpeechSynthesizer(); synthesizer.Volume = 100; // 0...100 synthesizer.Rate = 1; // -10...10 synthesizer.SelectVoiceByHints(VoiceGender.Female); synthesizer.Speak(msg); synthesizer.Dispose(); }