private SpeechLipSyncLine CompilePAMFile(string fileName, CompileMessages errors) { SpeechLipSyncLine syncDataForThisFile = new SpeechLipSyncLine(); syncDataForThisFile.FileName = Path.GetFileNameWithoutExtension(fileName); string thisLine; bool inMainSection = false; int lineNumber = 0; StreamReader sr = new StreamReader(fileName); while ((thisLine = sr.ReadLine()) != null) { lineNumber++; if (thisLine.ToLower().StartsWith("[speech]")) { inMainSection = true; continue; } if (inMainSection) { if (thisLine.TrimStart().StartsWith("[")) { // moved onto another section break; } if (thisLine.IndexOf(':') > 0) { string[] parts = thisLine.Split(':'); // Convert from Pamela XPOS into milliseconds int milliSeconds = ((Convert.ToInt32(parts[0]) / 15) * 1000) / 24; string phenomeCode = parts[1].Trim().ToUpper(); int frameID = FindFrameNumberForPhenome(phenomeCode); if (frameID < 0) { string friendlyFileName = Path.GetFileName(fileName); errors.Add(new CompileError("No frame found to match phenome code '" + phenomeCode + "'", friendlyFileName, lineNumber)); } else { syncDataForThisFile.Phenomes.Add(new SpeechLipSyncPhenome(milliSeconds, (short)frameID)); } } } } sr.Close(); syncDataForThisFile.Phenomes.Sort(); // The PAM file contains start times: Convert to end times for (int i = 0; i < syncDataForThisFile.Phenomes.Count - 1; i++) { syncDataForThisFile.Phenomes[i].EndTimeOffset = syncDataForThisFile.Phenomes[i + 1].EndTimeOffset; } if (syncDataForThisFile.Phenomes.Count > 1) { syncDataForThisFile.Phenomes[syncDataForThisFile.Phenomes.Count - 1].EndTimeOffset = syncDataForThisFile.Phenomes[syncDataForThisFile.Phenomes.Count - 2].EndTimeOffset + 1000; } return syncDataForThisFile; }
private SpeechLipSyncLine CompilePamelaFile(string fileName, CompileMessages errors) { SpeechLipSyncLine syncDataForThisFile = new SpeechLipSyncLine(); syncDataForThisFile.FileName = Path.GetFileNameWithoutExtension(fileName); string thisLine; bool inMainSection = false; int lineNumber = 0; StreamReader sr = new StreamReader(fileName); while ((thisLine = sr.ReadLine()) != null) { lineNumber++; if (thisLine.ToLower().StartsWith("[speech]")) { inMainSection = true; continue; } if (inMainSection) { if (thisLine.TrimStart().StartsWith("[")) { // moved onto another section break; } if (thisLine.IndexOf(':') > 0) { string[] parts = thisLine.Split(':'); int part0; // Convert from Pamela XPOS into milliseconds if (!Int32.TryParse(parts[0], out part0)) { string friendlyFileName = Path.GetFileName(fileName); errors.Add(new CompileError("Non-numeric phoneme offset '" + parts[0] + "'", friendlyFileName, lineNumber)); continue; } int milliSeconds = ((part0 / 15) * 1000) / 24; string phonemeCode = parts[1].Trim().ToUpper(); int frameID = FindFrameNumberForPhoneme(phonemeCode); if (frameID < 0) { string friendlyFileName = Path.GetFileName(fileName); errors.Add(new CompileError("No frame found to match phoneme code '" + phonemeCode + "'", friendlyFileName, lineNumber)); } else { syncDataForThisFile.Phonemes.Add(new SpeechLipSyncPhoneme(milliSeconds, (short)frameID)); } } } } sr.Close(); AlignPhonemeOffsets(syncDataForThisFile); return(syncDataForThisFile); }
private SpeechLipSyncLine CompilePapagayoFile(string fileName, CompileMessages errors) { SpeechLipSyncLine syncDataForThisFile = new SpeechLipSyncLine(); syncDataForThisFile.FileName = Path.GetFileNameWithoutExtension(fileName); string thisLine; int lineNumber = 0; StreamReader sr = new StreamReader(fileName); if ((thisLine = sr.ReadLine()) != null) // Skip over the first line (always a heading) { while ((thisLine = sr.ReadLine()) != null) { lineNumber++; if (thisLine.IndexOf(' ') > 0) { string[] parts = thisLine.Split(' '); int part0; if (!Int32.TryParse(parts[0], out part0)) { string friendlyFileName = Path.GetFileName(fileName); errors.Add(new CompileError("Non-numeric phoneme offset '" + parts[0] + "'", friendlyFileName, lineNumber)); continue; } int xpos = part0; if (xpos < 0) // Clamp negative XPOS to 0 { xpos = 0; } int milliSeconds = (part0 * 1000) / 24; string phonemeCode = parts[1].Trim().ToUpper(); int frameID = FindFrameNumberForPhoneme(phonemeCode); if (frameID < 0) { string friendlyFileName = Path.GetFileName(fileName); errors.Add(new CompileError("No frame found to match phoneme code '" + phonemeCode + "'", friendlyFileName, lineNumber)); } else { syncDataForThisFile.Phonemes.Add(new SpeechLipSyncPhoneme(milliSeconds, (short)frameID)); } } } } sr.Close(); AlignPhonemeOffsets(syncDataForThisFile); return(syncDataForThisFile); }
private void AlignPhonemeOffsets(SpeechLipSyncLine syncDataForThisFile) { syncDataForThisFile.Phonemes.Sort(); // The PAM/DAT files contain start times: Convert to end times for (int i = 0; i < syncDataForThisFile.Phonemes.Count - 1; i++) { syncDataForThisFile.Phonemes[i].EndTimeOffset = syncDataForThisFile.Phonemes[i + 1].EndTimeOffset; } if (syncDataForThisFile.Phonemes.Count > 1) { syncDataForThisFile.Phonemes[syncDataForThisFile.Phonemes.Count - 1].EndTimeOffset = syncDataForThisFile.Phonemes[syncDataForThisFile.Phonemes.Count - 2].EndTimeOffset + 1000; } }
private SpeechLipSyncLine CompilePapagayoFile(string fileName, CompileMessages errors) { SpeechLipSyncLine syncDataForThisFile = new SpeechLipSyncLine(); syncDataForThisFile.FileName = Path.GetFileNameWithoutExtension(fileName); string thisLine; int lineNumber = 0; StreamReader sr = new StreamReader(fileName); if ((thisLine = sr.ReadLine()) != null) // Skip over the first line (always a heading) while ((thisLine = sr.ReadLine()) != null) { lineNumber++; if (thisLine.IndexOf(' ') > 0) { string[] parts = thisLine.Split(' '); int part0; if (!Int32.TryParse(parts[0], out part0)) { string friendlyFileName = Path.GetFileName(fileName); errors.Add(new CompileError("Non-numeric phoneme offset '" + parts[0] + "'", friendlyFileName, lineNumber)); continue; } int xpos = part0; if (xpos < 0) // Clamp negative XPOS to 0 xpos = 0; int milliSeconds = (part0 * 1000) / 24; string phonemeCode = parts[1].Trim().ToUpper(); int frameID = FindFrameNumberForPhoneme(phonemeCode); if (frameID < 0) { string friendlyFileName = Path.GetFileName(fileName); errors.Add(new CompileError("No frame found to match phoneme code '" + phonemeCode + "'", friendlyFileName, lineNumber)); } else { syncDataForThisFile.Phonemes.Add(new SpeechLipSyncPhoneme(milliSeconds, (short)frameID)); } } } sr.Close(); AlignPhonemeOffsets(syncDataForThisFile); return syncDataForThisFile; }
private SpeechLipSyncLine CompilePamelaFile(string fileName, CompileMessages errors) { SpeechLipSyncLine syncDataForThisFile = new SpeechLipSyncLine(); syncDataForThisFile.FileName = Path.GetFileNameWithoutExtension(fileName); string thisLine; bool inMainSection = false; int lineNumber = 0; StreamReader sr = new StreamReader(fileName); while ((thisLine = sr.ReadLine()) != null) { lineNumber++; if (thisLine.ToLower().StartsWith("[speech]")) { inMainSection = true; continue; } if (inMainSection) { if (thisLine.TrimStart().StartsWith("[")) { // moved onto another section break; } if (thisLine.IndexOf(':') > 0) { string[] parts = thisLine.Split(':'); int part0; // Convert from Pamela XPOS into milliseconds if (!Int32.TryParse(parts[0], out part0)) { string friendlyFileName = Path.GetFileName(fileName); errors.Add(new CompileError("Non-numeric phoneme offset '" + parts[0] + "'", friendlyFileName, lineNumber)); continue; } int milliSeconds = ((part0 / 15) * 1000) / 24; string phonemeCode = parts[1].Trim().ToUpper(); int frameID = FindFrameNumberForPhoneme(phonemeCode); if (frameID < 0) { string friendlyFileName = Path.GetFileName(fileName); errors.Add(new CompileError("No frame found to match phoneme code '" + phonemeCode + "'", friendlyFileName, lineNumber)); } else { syncDataForThisFile.Phonemes.Add(new SpeechLipSyncPhoneme(milliSeconds, (short)frameID)); } } } } sr.Close(); AlignPhonemeOffsets(syncDataForThisFile); return syncDataForThisFile; }
private SpeechLipSyncLine CompilePAMFile(string fileName, CompileMessages errors) { SpeechLipSyncLine syncDataForThisFile = new SpeechLipSyncLine(); syncDataForThisFile.FileName = Path.GetFileNameWithoutExtension(fileName); string thisLine; bool inMainSection = false; int lineNumber = 0; StreamReader sr = new StreamReader(fileName); while ((thisLine = sr.ReadLine()) != null) { lineNumber++; if (thisLine.ToLower().StartsWith("[speech]")) { inMainSection = true; continue; } if (inMainSection) { if (thisLine.TrimStart().StartsWith("[")) { // moved onto another section break; } if (thisLine.IndexOf(':') > 0) { string[] parts = thisLine.Split(':'); // Convert from Pamela XPOS into milliseconds int milliSeconds = ((Convert.ToInt32(parts[0]) / 15) * 1000) / 24; string phenomeCode = parts[1].Trim().ToUpper(); int frameID = FindFrameNumberForPhenome(phenomeCode); if (frameID < 0) { string friendlyFileName = Path.GetFileName(fileName); errors.Add(new CompileError("No frame found to match phenome code '" + phenomeCode + "'", friendlyFileName, lineNumber)); } else { syncDataForThisFile.Phenomes.Add(new SpeechLipSyncPhenome(milliSeconds, (short)frameID)); } } } } sr.Close(); syncDataForThisFile.Phenomes.Sort(); // The PAM file contains start times: Convert to end times for (int i = 0; i < syncDataForThisFile.Phenomes.Count - 1; i++) { syncDataForThisFile.Phenomes[i].EndTimeOffset = syncDataForThisFile.Phenomes[i + 1].EndTimeOffset; } if (syncDataForThisFile.Phenomes.Count > 1) { syncDataForThisFile.Phenomes[syncDataForThisFile.Phenomes.Count - 1].EndTimeOffset = syncDataForThisFile.Phenomes[syncDataForThisFile.Phenomes.Count - 2].EndTimeOffset + 1000; } return(syncDataForThisFile); }