private void btnSayHello_Click(object sender, RoutedEventArgs e) { PromptBuilder promptBuilder = new PromptBuilder(); promptBuilder.AppendText("Hello world"); PromptStyle promptStyle = new PromptStyle(); promptStyle.Volume = PromptVolume.Loud; promptStyle.Rate = PromptRate.ExtraSlow; promptBuilder.StartStyle(promptStyle); promptBuilder.AppendText("and hello to the universe too."); promptBuilder.AppendTextWithPronunciation("Reading.", "rɛdɪŋ"); promptBuilder.EndStyle(); promptBuilder.AppendText("On this day, "); promptBuilder.AppendTextWithHint(DateTime.Now.ToShortDateString(), SayAs.Date); promptBuilder.AppendText(", we're gathered here to learn"); promptBuilder.AppendText("all", PromptEmphasis.Strong); promptBuilder.AppendText("about"); promptBuilder.AppendTextWithHint("WPF", SayAs.SpellOut); promptBuilder.AppendText("Hola como estan?", PromptEmphasis.Strong); SpeechSynthesizer speechSynthesizer = new SpeechSynthesizer(); speechSynthesizer.Speak(promptBuilder); }
public static void PromptBuilder() { string SsmlNs = "\"http://schemas.microsoft.com/Speech/2003/03/PromptEngine\""; string SsmlStartOutTag = "<peml:prompt_output xmlns:peml=" + SsmlNs + ">"; string SsmlEndOutTag = "</peml:prompt_output>"; PromptBuilder builder; using (new ThreadCultureChange(null, CultureInfo.CreateSpecificCulture("ru-RU"))) { builder = new PromptBuilder(); Assert.Equal(CultureInfo.CurrentUICulture, builder.Culture); } using (new ThreadCultureChange(null, CultureInfo.CreateSpecificCulture("en-US"))) { builder = new PromptBuilder(); builder.AppendText("test"); builder.AppendTextWithPronunciation("foo", "bar"); builder.AppendSsmlMarkup(SsmlStartOutTag); builder.AppendSsmlMarkup("hello"); builder.AppendSsmlMarkup(SsmlEndOutTag); Assert.Contains("hello", builder.ToXml()); Assert.Equal(CultureInfo.CurrentUICulture, builder.Culture); Assert.False(builder.IsEmpty); string ssml = builder.ToXml(); builder.AppendSsml(XmlTextReader.Create(new StringReader(ssml))); } }
public static string GetPronunciationFromText(string MyWord, string Pron = null) { //this is a trick to figure out phonemes used by synthesis engine if (MyWord == null | MyWord == "") { return(""); } //txt to wav using (MemoryStream audioStream = new MemoryStream()) { using (SpeechSynthesizer synth = new SpeechSynthesizer()) { if (synth == null) { MessageBox.Show("Could not open speech synthisizer."); return(""); } synth.SetOutputToWaveStream(audioStream); PromptBuilder pb = new PromptBuilder(); if (Pron == null) { synth.Speak(MyWord); } else { pb.AppendTextWithPronunciation("Not Used", Pron); synth.Speak(pb); } //synth.Speak(pb); synth.SetOutputToNull(); audioStream.Position = 0; //now wav to txt (for reco phonemes) recoPhonemes = String.Empty; GrammarBuilder gb = new GrammarBuilder(MyWord); Grammar g = new Grammar(gb); //TODO the hard letters to recognize are 'g' and 'e' //Grammar g = new DictationGrammar(); SpeechRecognitionEngine reco = new SpeechRecognitionEngine(); if (reco == null) { MessageBox.Show("Could not open speech recognition engine."); return(""); } reco.SpeechHypothesized += new EventHandler <SpeechHypothesizedEventArgs>(reco_SpeechHypothesized); reco.SpeechRecognitionRejected += new EventHandler <SpeechRecognitionRejectedEventArgs>(reco_SpeechRecognitionRejected); reco.UnloadAllGrammars(); //only use the one word grammar reco.LoadGrammar(g); reco.SetInputToWaveStream(audioStream); RecognitionResult rr = reco.Recognize(); reco.SetInputToNull(); if (rr != null) { recoPhonemes = StringFromWordArray(rr.Words, WordType.Pronunciation); } //txtRecoPho.Text = recoPhonemes; return(recoPhonemes); } } }
private static void Testing_AppendTextWithHint() { var builder = new PromptBuilder(); builder.AppendTextWithHint("3rd", SayAs.NumberOrdinal); builder.AppendBreak(); builder.AppendTextWithHint("3rd", SayAs.NumberCardinal); builder.AppendBreak(); builder.AppendBookmark("First bookmark"); builder.AppendBreak(); builder.AppendTextWithPronunciation("DuBois", "duˈbwɑ"); builder.AppendBreak(); synthesizer.Speak(builder); }
public override bool Apply(PromptBuilder builder) { String pronounciation = ParseTagArgument(); var wordToPronounce = ParseWordToPronounce(); try { builder.AppendTextWithPronunciation(wordToPronounce, pronounciation); } catch (FormatException) { return false; } return true; }
public override bool Apply(PromptBuilder builder) { String pronounciation = ParseTagArgument(); var wordToPronounce = ParseWordToPronounce(); try { builder.AppendTextWithPronunciation(wordToPronounce, pronounciation); } catch (FormatException) { return(false); } return(true); }
public override void Fire() { Init(); //be sure to leave this here if (synth == null) { return; } bool paused = true; for (int i = 1; i < na.NeuronCount; i++) { Neuron n = na.GetNeuronAt(i); if (n.Fired()) { if (n.Label.Length == 1) { phraseToSpeak += n.Label; paused = false; } if (n.Synapses.Count == 0) { //if a neuron fired and it has no connection, connect it to the knowledge store //connection to KB ModuleUKSN nmKB = (ModuleUKSN)FindModuleByType(typeof(ModuleUKSN)); if (nmKB != null) { string label = "pn" + n.Label; List <Thing> phonemes = nmKB.Labeled("Phoneme").Children; Thing pn = nmKB.Labeled(label, phonemes); if (pn == null) //this should always be null { pn = nmKB.AddThing(label, new Thing[] { nmKB.Labeled("Phoneme") }, pn); } Neuron n1 = nmKB.GetNeuron(pn); if (n1 != null) { n.AddSynapse(n1.Id, 1); n1.SetValue(1); } } } } } if (phonemesToFire != "") { char c = phonemesToFire[0]; bool fired = false; for (int i = 0; i < na.NeuronCount; i++) { Neuron n = na.GetNeuronAt(i); if (n.Label == c.ToString()) { n.SetValue(1); fired = true; break; } } if (!fired) { Utils.Noop(); } phonemesToFire = phonemesToFire.Substring(1); } if (paused && phraseToSpeak != "") { if (dlg != null) { ((ModuleSpeakPhonemesDlg)dlg).SetLabel(phraseToSpeak); } if (na.GetNeuronAt("Enable").Fired()) { ModuleSpeechIn msi = (ModuleSpeechIn)FindModuleByType(typeof(ModuleSpeechIn)); if (msi != null) { msi.PauseRecognition(); //if there is a recognizer active } //synth.SpeakAsync(phraseToSpeak + "."); //phraseToSpeak = ""; PromptBuilder pb1 = new PromptBuilder(); if (typedIn) { pb1.StartVoice("Microsoft David Desktop"); pb1.StartStyle(new PromptStyle(PromptRate.Medium)); } else { pb1.StartVoice("Microsoft Zira Desktop"); pb1.StartStyle(new PromptStyle(PromptRate.ExtraSlow)); } pb1.AppendTextWithPronunciation("not used", phraseToSpeak); pb1.EndStyle(); pb1.EndVoice(); string x = pb1.ToXml(); Debug.WriteLine(debugString(phraseToSpeak)); //synth.Speak(pb1); synth.SpeakAsync(pb1); } //string heard = GetPronunciationFromText("", phraseToSpeak); //it would be nice to hear what was said but it doesn't work with this engine phraseToSpeak = ""; typedIn = false; } }
public static string GetPronunciationFromText(string MyWord, string Pron = null) { //this is a trick to figure out phonemes used by synthesis engine MyWord = MyWord.Trim(); if (MyWord == null || MyWord == "") { return(""); } if (MyWord.ToLower() == "a") { return("ə"); } if (MyWord.ToLower() == "no") { return("no"); } //txt to wav using (MemoryStream audioStream = new MemoryStream()) { using (SpeechSynthesizer synth = new SpeechSynthesizer()) { if (synth == null) { MessageBox.Show("Could not open speech synthisizer."); return(""); } synth.SetOutputToWaveStream(audioStream); PromptBuilder pb = new PromptBuilder(); if (Pron == null) { synth.Speak(MyWord); } else { pb.AppendTextWithPronunciation("Not Used", Pron); synth.Speak(pb); } //synth.Speak(pb); synth.SetOutputToNull(); audioStream.Position = 0; //now wav to txt (for reco phonemes) recoPhonemes = String.Empty; GrammarBuilder gb = new GrammarBuilder(MyWord); Grammar g = new Grammar(gb); //TODO the hard letters to recognize are 'g' and 'e' //SrgsItem si = new SrgsItem(); //SrgsToken s = new SrgsToken("am"); //s.Pronunciation = "AE M"; //si.Add(s); //s = new SrgsToken(MyWord); //si.Add(s); //SrgsRule sr = new SrgsRule("x", si); //SrgsDocument sd = new SrgsDocument(sr); //sd.PhoneticAlphabet = SrgsPhoneticAlphabet.Ups; //Grammar g1 = new Grammar(sd); //Grammar g = new DictationGrammar(); SpeechRecognitionEngine reco = new SpeechRecognitionEngine(); if (reco == null) { MessageBox.Show("Could not open speech recognition engine."); return(""); } reco.SpeechHypothesized += new EventHandler <SpeechHypothesizedEventArgs>(reco_SpeechHypothesized); reco.SpeechRecognitionRejected += new EventHandler <SpeechRecognitionRejectedEventArgs>(reco_SpeechRecognitionRejected); reco.UnloadAllGrammars(); //only use the one word grammar reco.LoadGrammar(g); reco.SetInputToWaveStream(audioStream); RecognitionResult rr = reco.Recognize(); reco.SetInputToNull(); if (rr != null) { recoPhonemes = StringFromWordArray(rr.Words, WordType.Pronunciation); } //custom pronunciations don't seem to work so here are patches recoPhonemes = recoPhonemes.Replace("e͡iɛm", "æm"); return(recoPhonemes); } } }
public override void Fire() { Init(); //be sure to leave this here if (synth == null) { return; } if (GetNeuronValue("Cancel") == 1) { synth.SpeakAsyncCancelAll(); } if (GetNeuronValue("Validate") == 1) { if (!validating) { hitWords.Clear(); missWords.Clear(); missPhrase.Clear(); hit = 0; miss = 0; } validating = true; } else { if (validating) { if (hit + miss == 0) { Debug.WriteLine("No Validation Data"); } else { Debug.WriteLine("Validation: " + hit + " / " + miss + " = " + 100 * hit / (hit + miss)); Debug.WriteLine("Validation: " + hitWords.Count + " / " + missWords.Count + " = " + 100 * hitWords.Count / (hitWords.Count + missWords.Count)); } } validating = false; } bool paused = true; for (int i = 3; i < na.NeuronCount; i++) { Neuron n = na.GetNeuronAt(i); if (n.Fired()) { if (n.Label.Length == 1) { phraseToSpeak += n.Label; paused = false; } if (n.Synapses.Count == 0) { //connect it to the knowledge store //connection to KB //ModuleUKS2 nmKB = (ModuleUKS2)FindModuleByName("AudibleUKS"); if (FindModuleByName("AudibleUKS") is ModuleUKS2 UKS) { string label = "pn" + n.Label; List <Thing> phonemes = UKS.Labeled("Phoneme").Children; Thing pn = UKS.Labeled(label, phonemes); if (pn == null) //this should always be null { pn = UKS.AddThing(label, new Thing[] { UKS.Labeled("Phoneme") }, pn); } Neuron n1 = UKS.GetNeuron(pn); Neuron n2 = UKS.GetNeuron(pn, false); if (n1 != null) { n.AddSynapse(n1.Id, 1); n1.SetValue(1); n2.AddSynapse(n.Id, 1); } } } } } if (phonemesToFire != "") { char c = phonemesToFire[0]; bool fired = false; if (c != ' ') { for (int i = 0; i < na.NeuronCount; i++) { Neuron n = na.GetNeuronAt(i); if (n.Label == c.ToString()) { n.SetValue(1); fired = true; break; } } if (!fired) { Neuron n = AddLabel(c.ToString()); //connect it to the knowledge store //connection to KB //ModuleUKS2 nmKB = (ModuleUKS2)FindModuleByName("AudibleUKS"); if (FindModuleByName("AudibleUKS") is ModuleUKS2 UKS) { string label = "pn" + n.Label; List <Thing> phonemes = UKS.Labeled("Phoneme").Children; Thing pn = UKS.Labeled(label, phonemes); if (pn == null) //this should always be null { pn = UKS.AddThing(label, new Thing[] { UKS.Labeled("Phoneme") }, pn); } Neuron n1 = UKS.GetNeuron(pn); Neuron n2 = UKS.GetNeuron(pn, false); if (n1 != null) { n.AddSynapse(n1.Id, 1); n2.AddSynapse(n.Id, 1); n.SetValue(1); } } } } phonemesToFire = phonemesToFire.Substring(1); } if (paused && phraseToSpeak != "") { if (dlg != null) { ((ModuleSpeakPhonemes2Dlg)dlg).SetLabel(phraseToSpeak); } if (na.GetNeuronAt("Enable").Fired()) { ModuleSpeechIn msi = (ModuleSpeechIn)FindModuleByType(typeof(ModuleSpeechIn)); if (msi != null) { msi.PauseRecognition(); //if there is a recognizer active } //synth.SpeakAsync(phraseToSpeak + "."); //phraseToSpeak = ""; PromptBuilder pb1 = new PromptBuilder(); if (typedIn) { pb1.StartVoice("Microsoft David Desktop"); pb1.StartStyle(new PromptStyle(PromptRate.Medium)); } else { pb1.StartVoice("Microsoft Zira Desktop"); pb1.StartStyle(new PromptStyle(PromptRate.Slow)); } pb1.AppendTextWithPronunciation("not used", phraseToSpeak.Trim()); pb1.EndStyle(); pb1.EndVoice(); string x = pb1.ToXml(); Debug.WriteLine(debugString(phraseToSpeak)); //synth.Speak(pb1); synth.SpeakAsync(pb1); } //string heard = GetPronunciationFromText("", phraseToSpeak); //it would be nice to hear what was said but it doesn't work with this engine phraseToSpeak = ""; typedIn = false; } }
// Redraw the visible board from the internal chess board public void RedrawBoard() { foreach (Squar ChessSquar in Squars) { if (ChessSquar.BackgroundImage == null) // if background image doesn't exist { ChessSquar.SetBackgroundSquar(ChessImages); } if (ChessGame.Board[ChessSquar.Name] != null) // Valid board square { ChessSquar.DrawPiece(ChessImages.GetImageForPiece(ChessGame.Board[ChessSquar.Name].piece)); // draw the chess piece image } if (ChessSquar.Name == SelectedSquar && ShowMoveHelp == true) // selected check box { ChessSquar.BackgroundImage = null; ChessSquar.BackColor = System.Drawing.Color.Thistle; } } if (SelectedSquar != null && SelectedSquar != "" && ChessGame.Board[SelectedSquar].piece != null && !ChessGame.Board[SelectedSquar].piece.IsEmpty()) { if (ChessGame.Board[SelectedSquar].piece.Side.isWhite()) { sSynth.SpeakAsync("player"); } if (ChessGame.Board[SelectedSquar].piece.Side.isBlack()) { sSynth.SpeakAsync("computer"); } if (ChessGame.Board[SelectedSquar].piece.IsPawn()) { style.AppendTextWithPronunciation("pawn", "ˈpːɒɳ"); } else { style.AppendText(ChessGame.Board[SelectedSquar].piece.ToString(), PromptEmphasis.Strong); // identify piece at selected square } sSynth.SpeakAsync(style); style.ClearContent(); } // Check if need to show the possible legal moves for the current selected piece if (SelectedSquar != null && SelectedSquar != "" && ShowMoveHelp == true && ChessGame.Board[SelectedSquar].piece != null && !ChessGame.Board[SelectedSquar].piece.IsEmpty() && ChessGame.Board[SelectedSquar].piece.Side.type == ChessGame.GameTurn) { ArrayList moves = ChessGame.GetLegalMoves(ChessGame.Board[SelectedSquar]); // Get all legal moves for the current selected item // highlight all the possible moves for the current player if (moves.Count != 0) { sSynth.SpeakAsync("Available moves are "); // speak coordinates of clicked square } foreach (Cell cell in moves) { Squar sqr = GetBoardSquar(cell.ToString()); // get the board by cell position // sqr.BackgroundImage = null; // Show a semi-transparent, saddle color // sqr.BackColor = System.Drawing.Color.FromArgb(200, System.Drawing.Color.SteelBlue); } } SelectedSquar = ""; // Reset the selected square position }