public static AutoSyncLanguageModel Load(string languageName) { string[] assets = AssetDatabase.FindAssets("t:AutoSyncLanguageModel"); // Get Settings File string[] guids = AssetDatabase.FindAssets("ProjectSettings t:LipSyncProject"); string path = ""; if (guids.Length > 0) { path = AssetDatabase.GUIDToAssetPath(guids[0]); if (guids.Length > 1) { Debug.LogWarning("LipSync: Multiple LipSyncProject files found. Only one will be used."); } } LipSyncProject settings = (LipSyncProject)AssetDatabase.LoadAssetAtPath(path, typeof(LipSyncProject)); if (settings == null) { return(null); } if (settings.phonemeSet == null) { return(null); } if (assets.Length > 0) { foreach (string guid in assets) { AutoSyncLanguageModel model = AssetDatabase.LoadAssetAtPath <AutoSyncLanguageModel>(AssetDatabase.GUIDToAssetPath(guid)); if (model.language == languageName) { if (model.recommendedPhonemeSet != settings.phonemeSet.scriptingName && !string.IsNullOrEmpty(model.recommendedPhonemeSet)) { if (!EditorUtility.DisplayDialog("Wrong Phoneme Set", "Warning: You are using the '" + settings.phonemeSet.scriptingName + "' Phoneme Set, and this language model is designed for use with '" + model.recommendedPhonemeSet + "'. This may not provide usable results, are you sure you want to continue?", "Yes", "No")) { return(null); } } return(model); } } } return(null); }
public static LipSyncProject GetProjectFile() { //Get Settings File string[] guids = AssetDatabase.FindAssets("ProjectSettings t:LipSyncProject"); if (guids.Length > 0) { var spath = AssetDatabase.GUIDToAssetPath(guids[0]); if (guids.Length > 1) { Debug.LogWarning("LipSync: Multiple LipSyncProject files found. Please ensure there is only one in your project to avoid ambiguity."); } return((LipSyncProject)AssetDatabase.LoadAssetAtPath(spath, typeof(LipSyncProject))); } else { LipSyncProject newSettings = ScriptableObject.CreateInstance <LipSyncProject>(); newSettings.emotions = new string[] { "Happy", "Serious", "Eyebrows Up", "Sad" }; newSettings.emotionColors = new Color[] { new Color(1f, 0.682352941f, 0f), new Color(0.317647059f, 0.317647059f, 0.317647059f), new Color(0.741176471f, 1, 0.396078431f), new Color(0f, 0.545098039f, 1f), }; AssetDatabase.CreateAsset(newSettings, "Assets/Rogo Digital/LipSync Pro/ProjectSettings.asset"); AssetDatabase.Refresh(); return(newSettings); } }
private static List <PhonemeMarker> ParseOutput(string[] lines, AutoSyncLanguageModel lm, AudioClip clip) { List <PhonemeMarker> results = new List <PhonemeMarker>(); Dictionary <string, string> phonemeMapper = new Dictionary <string, string>(); // Get Settings File string[] guids = AssetDatabase.FindAssets("ProjectSettings t:LipSyncProject"); string path = ""; if (guids.Length > 0) { path = AssetDatabase.GUIDToAssetPath(guids[0]); if (guids.Length > 1) { Debug.LogWarning("LipSync: Multiple LipSyncProject files found. Only one will be used."); } } LipSyncProject settings = (LipSyncProject)AssetDatabase.LoadAssetAtPath(path, typeof(LipSyncProject)); if (settings == null) { LipSyncProject newSettings = ScriptableObject.CreateInstance <LipSyncProject>(); newSettings.emotions = new string[] { "default" }; newSettings.emotionColors = new Color[] { new Color(1f, 0.7f, 0.1f) }; AssetDatabase.CreateAsset(settings, "Assets/Rogo Digital/LipSync Pro/ProjectSettings.asset"); AssetDatabase.Refresh(); settings = newSettings; } if (lm.phonemeMapper.Length == 0) { // Default Phoneme Mapper phonemeMapper = new Dictionary <string, string>() { // Vowels { "IY", "E" }, { "IH", "AI" }, { "EH", "E" }, { "AE", "AI" }, { "AH", "U" }, { "UW", "O" }, { "UH", "U" }, { "AA", "AI" }, { "AO", "AI" }, { "EY", "AI" }, { "AY", "AI" }, { "OY", "O" }, { "AW", "AI" }, { "OW", "O" }, { "ER", "U" }, // Consonants { "JH", "CDGKNRSThYZ" }, { "L", "L" }, { "R", "CDGKNRSThYZ" }, { "Y", "CDGKNRSThYZ" }, { "W", "WQ" }, { "M", "MBP" }, { "N", "CDGKNRSThYZ" }, { "NG", "CDGKNRSThYZ" }, { "CH", "CDGKNRSThYZ" }, { "J", "CDGKNRSThYZ" }, { "DH", "CDGKNRSThYZ" }, { "B", "MBP" }, { "D", "CDGKNRSThYZ" }, { "G", "CDGKNRSThYZ" }, { "P", "MBP" }, { "T", "CDGKNRSThYZ" }, { "K", "CDGKNRSThYZ" }, { "Z", "CDGKNRSThYZ" }, { "ZH", "CDGKNRSThYZ" }, { "V", "FV" }, { "F", "FV" }, { "TH", "CDGKNRSThYZ" }, { "S", "CDGKNRSThYZ" }, { "SH", "CDGKNRSThYZ" }, { "HH", "CDGKNRSThYZ" }, }; } else { // LM Phoneme Mapper foreach (AutoSyncLanguageModel.PhonemeMapping mapping in lm.phonemeMapper) { phonemeMapper.Add(mapping.label, mapping.phonemeName); } } foreach (string line in lines) { if (string.IsNullOrEmpty(line)) { break; } string[] tokens = line.Split(' '); try { if (tokens[0] != "SIL") { string phonemeName = phonemeMapper[tokens[0]]; float startTime = float.Parse(tokens[1]) / clip.length; bool found = false; int phoneme; for (phoneme = 0; phoneme < settings.phonemeSet.phonemes.Length; phoneme++) { if (settings.phonemeSet.phonemes[phoneme].name == phonemeName) { found = true; break; } } if (found) { results.Add(new PhonemeMarker(phoneme, startTime)); } else { Debug.LogWarning("Phoneme mapper returned '" + phonemeName + "' but this phoneme does not exist in the current set. Skipping this entry."); } } } catch (ArgumentOutOfRangeException) { Debug.LogWarning("Phoneme Label missing from return data. Skipping this entry."); } catch (KeyNotFoundException) { Debug.LogWarning("Phoneme Label '" + tokens[0] + "' not found in phoneme mapper. Skipping this entry."); } } EditorUtility.ClearProgressBar(); return(results); }