static void engine_RecognizeCompleted(object sender, System.Speech.Recognition.RecognizeCompletedEventArgs e) { DMDocument doc = new DMDocument(); DMParagraph paragraph = new DMParagraph(); DictationSyncEngine engine = sender as DictationSyncEngine; if (engine != null) { for (int i = 0; i < engine.Result.Count; i++) { var sentence = engine.Result[i]; DMSentence dmSentence = BuildSentence(i, sentence); paragraph.Inlines.Add(dmSentence); } doc.Blocks.Add(paragraph); } doc.Save(Path.Combine(Environment.CurrentDirectory, "data.xml")); Console.WriteLine("Recognize Completed!"); Console.WriteLine("Press any key to exists!"); }
static void Main(string[] args) { string currentFilePath = ""; args = new string[1] { Path.Combine(Environment.CurrentDirectory, "SGTV.wav") }; if (args != null && args.Length > 0) { var str = args[0]; Console.WriteLine("Args : " + str); var waveFile = str + ".wav";// str.ToLower().Replace(".mp3", ".wav"); var lrcFile = str + ".lrc"; currentFilePath = str + ".xml"; if (File.Exists(currentFilePath) == true) { Console.WriteLine("xml file is E"); DMDocument doc = DMDocument.Load(currentFilePath); Lyrics lrc = new Lyrics(); foreach (var sentence in doc.Sentences) { LyricsPhrase phrase = new LyricsPhrase() { BeginTime = sentence.BeginTime, EndTime = sentence.EndTime, Text = sentence.Text }; lrc.Phrases.Add(phrase); } lrc.Save(lrcFile); } else { WaveDecoder wd = new WaveDecoder(); wd.ProcessForRecognize(str, waveFile); DictationSyncEngine engine = new DictationSyncEngine("en-US"); engine.SentenceRecognized += engine_SentenceRecognized; engine.RecognizeCompleted += engine_RecognizeCompleted; engine.Process(waveFile); } } else { Console.WriteLine("Args is null!"); } Console.ReadLine(); }
/// <summary> /// Execute /// </summary> /// <param name="context">WF context</param> /// <returns></returns> protected override DMDocument Execute(CodeActivityContext context) { this._notifyDictationProgress = context.GetExtension <INotifyDictationProgress>(); // Obtain the runtime value of the Text input argument var audioFilePath = context.GetValue(this.AudioFilePath); var cultureName = context.GetValue(this.CultureName); var dictationDocumentPath = EpisodeFileTypes.DictationFile.ToFileName(audioFilePath.Substring(0, audioFilePath.Length - 4)); if (File.Exists(dictationDocumentPath)) { Console.WriteLine("Dictation Doc Exists!"); using (Stream stream = new FileStream(dictationDocumentPath, FileMode.Open)) { myDocument = XamlReader.Load(stream) as DMDocument; } Thread.Sleep(500); if (this._notifyDictationProgress != null) { this._notifyDictationProgress.Exists(dictationDocumentPath); } } else { Console.WriteLine(); Console.WriteLine(); Console.WriteLine("<Dictation Start>"); if (SyncEngine == null) { SyncEngine = new DictationSyncEngine(cultureName); SyncEngine.SentenceRecognized += SyncEngine_SentenceRecognized; SyncEngine.RecognizeCompleted += SyncEngine_RecognizeCompleted; } else { while (SyncEngine.IsBusy) { Thread.Sleep(500); } } if (audioFilePath.EndsWith(EpisodeFileTypes.WaveFile.ToExt())) { audioFilePath = EpisodeFileTypes.WaveFile.ToFileName(audioFilePath.Substring(0, audioFilePath.Length - 4)); } SyncEngine.Process(audioFilePath); myResetEvent.WaitOne(); myDocument.Dispatcher.Invoke(new Action( delegate() { var xamlString = XamlWriter.Save(myDocument); File.WriteAllText(dictationDocumentPath, xamlString); } )); Console.WriteLine("</Dictation Completed>"); } return(this.myDocument); }