private void tsi_Click(object sender, EventArgs e) { ToolStripItem tsi = (ToolStripItem)sender; ss.SelectVoice(tsi.Tag.ToString()); speak("You have selected voice: " + tsi.Tag.ToString(), false); }
public static void setLanguage2() { var syn = new System.Speech.Synthesis.SpeechSynthesizer(); syn.SelectVoice("Microsoft Server Speech Text to Speech Voice (ja-JP, Haruka)"); syn.Speak("こんにちは"); }
public override bool SelectVoice(string voice) { try { NativeSynthesizer.SelectVoice(voice); Voice = NativeSynthesizer.Voice.Name; } catch (Exception ex) { this.Error("Select voice " + voice + "失败,失败原因:" + ex); return(false); } return(true); }
private Speech() { SpeechConfig config = SpeechConfig.FromSubscription(API_KEY, REGION); config.SpeechRecognitionLanguage = "de-DE"; _recognizer = new SpeechRecognizer(config); _synthesizer = new System.Speech.Synthesis.SpeechSynthesizer(); ICollection <InstalledVoice> voices = _synthesizer.GetInstalledVoices(System.Globalization.CultureInfo.CreateSpecificCulture("de-DE")); foreach (InstalledVoice voice in voices) { _synthesizer.SelectVoice(voice.VoiceInfo.Name); } //_synthesizer.SelectVoiceByHints(VoiceGender.Female, VoiceAge.Adult); }
private void buttonRead_Click(object sender, EventArgs e) { var text = richTextBox1.SelectedText; if (string.IsNullOrEmpty(text)) { text = richTextBox1.Text; if (checkBoxFromCursor.Checked) { text = text.Substring(richTextBox1.SelectionStart, text.Length - richTextBox1.SelectionStart); } } if (string.IsNullOrEmpty(text)) { MessageBox.Show("No text selected", "Info", MessageBoxButtons.OK, MessageBoxIcon.Information); } else { _synth.Rate = trackBar1.Value; _synth.SelectVoice(comboBoxVoices.Text.Split('#')[0]); _prompt = _synth.SpeakAsync(text); } }
public void read_aloud_html(string inHtml, string audir, string voicename, int startID) { try { System.Speech.Synthesis.SpeechSynthesizer l_spv = new System.Speech.Synthesis.SpeechSynthesizer(); l_spv.SpeakCompleted += new EventHandler <System.Speech.Synthesis.SpeakCompletedEventArgs>(spv_SpeakCompleted); l_spv.SpeakProgress += new EventHandler <System.Speech.Synthesis.SpeakProgressEventArgs>(spv_SpeakProgress); l_spv.SelectVoice(voicename); l_spv.Rate = -2; l_spv.SetOutputToWaveFile(audir + "\\audio.wav"); audioComplete = false; spanTxt = ""; smilTxt = ""; last_time = ""; curFullTxt = ""; curFullTxt = inHtml; spanID = startID; spanPoint = 0; curPageHtml = "page.html"; curaudioFile = "audio.mp3"; probar.Visible = true; probar.Maximum = Regex.Matches(inHtml, "\\s").Count; probar.Value = 0; l_spv.SpeakAsync(inHtml); while (audioComplete == false) { Application.DoEvents(); } string wSmil = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"; wSmil += "<smil xmlns=\"http://www.w3.org/ns/SMIL\" version=\"3.0\" profile=\"http://www.idpf.org/epub/30/profile/content/\">\n"; wSmil += "<body>\n"; wSmil += smilTxt; wSmil += "\n</body></smil>\n"; txt_smil.Text = wSmil; txt_out.Text = htLineTxt; // File.WriteAllText(htmlDir + "\\page" + pgNum + ".smil", wSmil); probar.Visible = false; l_spv.Dispose(); //write mp3 file convert_mp3(audir); if (File.Exists(txt_savefile.Text)) { File.Delete(txt_savefile.Text); } File.Copy(audir + "\\audio.mp3", txt_savefile.Text, true); gCls.show_message("SMIL Tag generated successfully"); } catch (Exception erd) { gCls.show_error(erd.Message.ToString()); return; } }
public static async Task RecognizeOnceSpeechAsync(SpeechTranslationConfig config) { var allCultures = CultureInfo.GetCultures(CultureTypes.AllCultures); // Creates a speech recognizer. using (var recognizer = new IntentRecognizer(config)) { Console.WriteLine("Say something..."); var model = LanguageUnderstandingModel.FromAppId(ConfigurationManager.AppSettings.Get("LUISId")); recognizer.AddAllIntents(model); var result = await recognizer.RecognizeOnceAsync(); // Checks result. if (result.Reason == ResultReason.RecognizedIntent) { Console.WriteLine($"RECOGNIZED: Text={result.Text}"); Console.WriteLine($" Intent Id: {result.IntentId}."); Console.WriteLine($" Language Understanding JSON: {result.Properties.GetProperty(PropertyId.LanguageUnderstandingServiceResponse_JsonResult)}."); if (result.IntentId == "Translate") { var luisJson = JObject.Parse(result.Properties.GetProperty(PropertyId.LanguageUnderstandingServiceResponse_JsonResult)); string targetLng = luisJson["entities"].First(x => x["type"].ToString() == "TargetLanguage")["entity"].ToString(); string text = luisJson["entities"].First(x => x["type"].ToString() == "Text")["entity"].ToString(); var lng = allCultures.FirstOrDefault(c => c.DisplayName.ToLower() == targetLng.ToLower()) ?? allCultures.FirstOrDefault(c => c.DisplayName.ToLower() == "english"); var translated = Translate.TranslateText("de-DE", text); Console.WriteLine("Translation: " + translated); var synth = new System.Speech.Synthesis.SpeechSynthesizer(); // Configure the audio output. synth.SetOutputToDefaultAudioDevice(); // Speak a string. synth.SelectVoice(synth.GetInstalledVoices().First(x => x.VoiceInfo.Culture.TwoLetterISOLanguageName == lng.TwoLetterISOLanguageName).VoiceInfo.Name); synth.Speak(translated); } } else if (result.Reason == ResultReason.RecognizedSpeech) { Console.WriteLine($"RECOGNIZED: Text={result.Text}"); Console.WriteLine($" Intent not recognized."); } else if (result.Reason == ResultReason.NoMatch) { Console.WriteLine($"NOMATCH: Speech could not be recognized."); } else if (result.Reason == ResultReason.Canceled) { var cancellation = CancellationDetails.FromResult(result); Console.WriteLine($"CANCELED: Reason={cancellation.Reason}"); if (cancellation.Reason == CancellationReason.Error) { Console.WriteLine($"CANCELED: ErrorCode={cancellation.ErrorCode}"); Console.WriteLine($"CANCELED: ErrorDetails={cancellation.ErrorDetails}"); Console.WriteLine($"CANCELED: Did you update the subscription info?"); } } } }