public static TimingDataViewModel FromJsonFile(FileReference JsonFile) { string JsonString = File.ReadAllText(JsonFile.FullName); TimingData DeserializedTimingData = Json.Deserialize <TimingData>(JsonString); return(FromTimingData(DeserializedTimingData)); }
/// <summary> /// Starts the specified telemetry data. /// </summary> /// <param name="telemetryData">The telemetry data.</param> /// <returns>true !</returns> public override bool Start(TelemetryData telemetryData, TimingData timingData, AnalysisManager analysisData) { _telemetryData = telemetryData; _timingData = timingData; _analysisData = analysisData; _wrapper.Start(); return(true); }
public override bool Start(TelemetryData telemetryData, TimingData timingData, AnalysisManager analysisData) { _telemetryData = telemetryData; _timingData = timingData; _analysisData = analysisData; ReadData(_cancel.Token); return(true); }
public HTTPServer(string path, int port, TelemetryData gameState, TimingData timingData, IPAddress ipAddress, Dictionary <string, IGame> plugins) { _telemetryData = gameState; _timingData = timingData; _rootDirectory = path; _port = port; _ipAddress = ipAddress; _plugins = plugins; }
private void HTTPServerThread(TelemetryData telemetryData, TimingData timingData, IPAddress ipAddress, int port, string httpServerPath, Dictionary <string, IGame> plugins) { _httpServer = new HTTPServer(httpServerPath, port, telemetryData, timingData, ipAddress, plugins); _httpServer.Start(); while (true) { Thread.Sleep(1); } }
public static TimingDataHandlerResult ProcessConnectedRequest(TimingData timingData, NameValueCollection postData) { TimingDataHandlerResult result = new TimingDataHandlerResult() { Data = timingData }; return(result); }
public DateTime?GetStartTime(string eventKey) { var timingDataDictionary = TimingData.ToDictionary(item => item.Event + item.Detail, item => item); if (!timingDataDictionary.ContainsKey(eventKey)) { return(null); } return(timingDataDictionary[eventKey].StartTime); }
public double?GetElapsedTime(string eventKey) { var timingDataDictionary = TimingData.ToDictionary(item => item.Event + item.Detail, item => item); if (!timingDataDictionary.ContainsKey(eventKey)) { return(null); } return(timingDataDictionary[eventKey].ElapsedSeconds); }
public void Startup() { _telemetryData = new TelemetryData(); _timingData = new TimingData(); _analysisData = new AnalysisManager(); lsvPlugins.Items.Clear(); _plugins = new Dictionary <string, IGame>(); ICollection <IGame> plugins = PluginLoader <IGame> .LoadPlugins("Plugins"); if (plugins.Count > 0) { foreach (var item in plugins) { _plugins.Add(item.Name, item); string[] lvText = new string[4]; lvText[0] = ""; lvText[1] = item.DisplayName; lvText[2] = item.Version; ListViewItem lvItem = new ListViewItem(lvText); Bitmap pluginIcon = item.Icon; if (pluginIcon != null) { imageListPlugins.Images.Add(item.Name, pluginIcon); lvItem.ImageKey = item.Name; } else { lvItem.ImageKey = "missing"; } lsvPlugins.Items.Add(lvItem); } _processMonitor = new ProcessMonitor(_plugins); _processMonitor.GameLoadedEvent += new ProcessMonitor.GameLoaded(GameLoaded); _processMonitor.GameClosedEvent += new ProcessMonitor.GameClosed(GameClosed); _processMonitor.StartProcessMonitor(); _ipAddress = NetHelpers.GetLocalIpAddress(); StartWebServer(); } else { MessageBox.Show("No Plugins Were Found, Application will now exit", "Plugin Error", MessageBoxButtons.OK, MessageBoxIcon.Error); this.Close(); } }
/// <summary> /// Computes (or recomputes) the critical path. This is called automatically if necessary when the Critical Path is requested. /// </summary> public void ComputeCriticalPath() { m_criticalPath = new List <T>(); m_timingData = new Dictionary <T, TimingData>(); PropagateForward(TimingDataNodeFor(m_startNode)); TimingData tdFinish = TimingDataNodeFor(m_finishNode); tdFinish.Fix(tdFinish.EarlyStart, tdFinish.NominalDuration, true); PropagateBackward(TimingDataNodeFor(m_finishNode)); AnalyzeCriticality(); }
public float offsetConst = 44100f; // Uncertain if this is the exact value, but it matches a common music sampling rate public TimingData Convert(SmFile smFile, float songOffset) { var data = new TimingData(); data.unknown1 = 0; // Fake value var offset = -(smFile.offset + songOffset); // Might need to negate this data.offsetMultiplier = (long)(offset * offsetConst); data.entries = ConvertBpms(smFile, offset); data.lastEntry = ConvertLastBpm(smFile); return(data); }
public static void Initialize() { if (IsInitialized) { Terminate(); } Debug = new CluwneDebug(); IsInitialized = true; _currentTarget = new RenderTarget[5]; _timer = new Clock(); FrameStats = new TimingData(_timer); }
// TODO: Performance improvement if TDNode had its TDNode successors & predecessors retrievable directly. /// <summary> /// Performs a depth-first propagation along a path for which all predecessors' computations are complete, /// adjusting early start & finish according to a PERT methodology. /// </summary> /// <param name="tdNode">The TimingData node.</param> private void PropagateForward(TimingData tdNode) { tdNode.EarlyFinish = tdNode.EarlyStart + tdNode.NominalDuration; foreach (TimingData successor in m_successors(tdNode.Subject).Select(n => TimingDataNodeFor(n))) { if (!successor.IsFixed) { successor.EarlyStart = DateTimeOperations.Max(successor.EarlyStart, tdNode.EarlyFinish); } successor.RegisterPredecessor(); if (successor.AllPredecessorsHaveWeighedIn) { PropagateForward(successor); } } }
/// <summary> /// Gets (or creates) the timing data node for the provided client-domain node. /// </summary> /// <param name="node">The client-domain node.</param> /// <returns></returns> private TimingData TimingDataNodeFor(T node) { TimingData tdNode; if (!m_timingData.TryGetValue(node, out tdNode)) { tdNode = new TimingData( node, m_isFixed(node), m_startTime(node), m_duration(node), (short)m_predecessors(node).Count(), (short)m_successors(node).Count()); m_timingData.Add(node, tdNode); } return(tdNode); }
/// <summary> /// Performs a depth-first propagation backwards along a path for which all successors' computations /// are complete, adjusting late start & finish according to a PERT methodology. /// </summary> /// <param name="tdNode">The TimingData node.</param> private void PropagateBackward(TimingData tdNode) { tdNode.LateStart = tdNode.LateFinish - tdNode.NominalDuration; foreach (TimingData predecessor in m_predecessors(tdNode.Subject).Select(n => TimingDataNodeFor(n))) { if (!predecessor.IsFixed) { predecessor.LateFinish = DateTimeOperations.Min(predecessor.LateFinish, tdNode.LateStart); } predecessor.RegisterSuccessor(); if (predecessor.AllSuccessorsHaveWeighedIn) { PropagateBackward(predecessor); } } }
public static TimingDataViewModel FromTimingData(TimingData TimingData) { TimingDataViewModel NewViewModel = new TimingDataViewModel() { Name = TimingData.Name, Type = TimingData.Type, Count = TimingData.Count, ExclusiveDuration = TimingData.ExclusiveDuration, HasChildren = TimingData.Children.Any(), }; foreach (KeyValuePair <string, TimingData> Child in TimingData.Children) { TimingDataViewModel ChildData = FromTimingData(Child.Value); NewViewModel.Children.Add(ChildData); } return(NewViewModel); }
public override bool Start(TelemetryData telemetryData, TimingData timingData, AnalysisManager analysisData) { _telemetryData = telemetryData; _timingData = timingData; _analysisData = analysisData; Reset(); ConnectionType connectionType = (ConnectionType)Properties.Settings.Default.connectionType; if (connectionType == ProjectCars.ConnectionType.SharedMemory) { ReadData(_cancel.Token); } else { ReadUDPData(_cancel.Token); } return(true); }
public static void BindTiming(ulong delayMillis, double sendRateRatio, float sendRate) { _sendRate = sendRate; _ratio = sendRateRatio; if (((sendRate == 0f) || (sendRateRatio == 0.0)) || ((sendRate < 0f) != (sendRateRatio < 0.0))) { _delayFromSendRateMillis = 0L; } else { _delayFromSendRateMillis = (ulong)Math.Ceiling((double)((1000.0 * sendRateRatio) / ((double)sendRate))); } _delayMillis = delayMillis; _totalDelayMillis = _delayFromSendRateMillis + _delayMillis; _delaySeconds = _delayMillis * 0.001; _delayFromSendRateSeconds = _delayFromSendRateMillis * 0.001; _totalDelaySeconds = _totalDelayMillis * 0.001; _deltaSeconds = -_totalDelaySeconds; @struct = Capture(); }
private static TimingDataViewModel FromBinaryReader(BinaryReader Reader) { // Read in the timing data. TimingData DeserializedTimingData = new TimingData(Reader); TimingDataViewModel ViewModel = FromTimingData(DeserializedTimingData); // If this is an aggregate, read in the look up table and the binary blobs, and also de-parent the // include, class, and function aggregate lists. if (ViewModel.Type == TimingDataType.Aggregate) { ViewModel.BinaryBlobLookupTable = new Dictionary <string, BinaryBlob>(); int BinaryBlobCount = Reader.ReadInt32(); for (int i = 0; i < BinaryBlobCount; ++i) { string BlobName = Reader.ReadString(); int BlobOffset = Reader.ReadInt32(); int BlobCompressedSize = Reader.ReadInt32(); int BlobDecompressedSize = Reader.ReadInt32(); ViewModel.BinaryBlobLookupTable.Add(BlobName, new BinaryBlob() { Offset = BlobOffset, CompressedSize = BlobCompressedSize, DecompressedSize = BlobDecompressedSize }); } // Allocate the memory for the binary blobs then copy them into it. int BinaryBlobLength = (int)(Reader.BaseStream.Length - Reader.BaseStream.Position); ViewModel.BinaryBlobBytes = Reader.ReadBytes(BinaryBlobLength); foreach (TreeGridElement SummaryChild in ViewModel.Children.Skip(1)) { foreach (TreeGridElement Child in SummaryChild.Children) { Child.Level = 0; } } } return(ViewModel); }
public static void Initialize() { if (IsInitialized) { Terminate(); } Screen = new CluwneWindow(CluwneLib.Video.getVideoMode(), "Developer Station 14", CluwneLib.Video.getWindowStyle()); _timer = new Clock(); FrameStats = new TimingData(_timer); renderTargetArray = new RenderTarget[5]; CurrentClippingViewport = new Viewport(0, 0, Screen.Size.X, Screen.Size.Y); IsInitialized = true; //Hook OpenTK into SFMLs Opengl OpenTK.Toolkit.Init(new OpenTK.ToolkitOptions { // Non-Native backend doesn't have a default GetAddress method Backend = OpenTK.PlatformBackend.PreferNative }); new GraphicsContext(OpenTK.ContextHandle.Zero, null); }
public BaseTimingService(TimingData timingData) { Timing = timingData; }
public virtual bool Start(TelemetryData telemetryData, TimingData timingData, AnalysisManager analysisData) { return(true); }
public TimingService(TimingData timingData) : base(timingData) { }
public static void CreateCsvs(DataSet <IncidentData> incidents, string incidentFile, string responseFile) { const string fn = "IncidentDataTools.CreateCsvs()"; Type dtype = typeof(IncidentDataTools); try { HashSet <string> incidentDataFields = new HashSet <string>(); HashSet <string> responseDataFields = new HashSet <string>(); HashSet <string> benchmarkNames = new HashSet <string>(); foreach (IncidentData incident in incidents) { foreach (string key in incident.Data.Keys) { if (!s_ignoredIncidentDataFields.Contains(key)) { incidentDataFields.Add(key); } } foreach (ResponseData response in incident.Responses) { foreach (string key in response.Data.Keys) { if (!s_ignoredResponseDataFields.Contains(key)) { responseDataFields.Add(key); } } foreach (TimingData benchmark in response.TimingData) { benchmarkNames.Add(benchmark.Name); } } } List <dynamic> incidentRecords = new List <dynamic>(); List <dynamic> responseRecords = new List <dynamic>(); foreach (IncidentData incident in incidents) { dynamic incidentRecord = new ExpandoObject(); foreach (string field in incidentDataFields) { IDictionary <string, object> inc_dict = incidentRecord as IDictionary <string, object>; if (inc_dict.ContainsKey(field)) { LogHelper.LogErrOnce(fn, "Fieldname '" + field + "' is apparently duplicated in the incident data map"); } else if (incident.Data.ContainsKey(field)) { inc_dict.Add(field, incident.Data[field]); } else { inc_dict.Add(field, string.Empty); } } foreach (ResponseData response in incident.Responses) { dynamic responseRecord = new ExpandoObject(); responseRecord.Id = incident.Id; IDictionary <string, object> rsp_dict = responseRecord as IDictionary <string, object>; foreach (string field in responseDataFields) { if (rsp_dict.ContainsKey(field)) { LogHelper.LogErrOnce(fn, "Fieldname '" + field + "' is apparently duplicated in the response data map"); } else if (response.Data.ContainsKey(field)) { rsp_dict.Add(field, response.Data[field]); } else { rsp_dict.Add(field, string.Empty); } } foreach (string benchmarkName in benchmarkNames) { TimingData benchmark = (from bmk in response.TimingData where bmk.Name == benchmarkName select bmk).FirstOrDefault(); if (rsp_dict.ContainsKey(benchmarkName)) { LogHelper.LogErrOnce(fn, "Benchmark '" + benchmarkName + "' is apparently duplicated in the benchmark data map"); } else if (benchmark != null) { object value; if (benchmark.Data.ContainsKey("DateTime")) { value = benchmark.Data["DateTime"]; } else if (!double.IsNaN(benchmark.Value)) { value = benchmark.Value; } else { value = ""; } rsp_dict.Add(benchmarkName, value); } else { rsp_dict.Add(benchmarkName, string.Empty); } } responseRecords.Add(responseRecord); } incidentRecords.Add(incidentRecord); } using (StringWriter writer = new StringWriter()) { using (CsvHelper.CsvWriter csv = new CsvHelper.CsvWriter(writer, CultureInfo.CurrentCulture)) { csv.WriteRecords(incidentRecords); } File.WriteAllText(incidentFile, writer.ToString()); LogHelper.DebugMessage(incidentRecords.Count + " incident records written to " + incidentFile); } using (StringWriter writer = new StringWriter()) { using (CsvHelper.CsvWriter csv = new CsvHelper.CsvWriter(writer, CultureInfo.CurrentCulture)) { csv.WriteRecords(responseRecords); } File.WriteAllText(responseFile, writer.ToString()); LogHelper.DebugMessage(responseRecords.Count + " response records written to " + responseFile); } } catch (Exception ex) { LogHelper.LogException(ex, "Error converting Incident Data to CSVs", true); } }
static void Main(string [] args) { string inputFile; string animToProcess = ""; string outputFile; bool remapAnalysisActors = false; string mapping = "sbm2"; if (args.Length < 1) { Console.WriteLine(); Console.WriteLine("usage: VisemeSchedulerFacefx [-mapping sbm|sbm2] [-remap] <facefx .xml file> [<animation to process>] [<smartbody .bml.txt file>]"); Console.WriteLine(" -remap will map 'Analysis Actor' names to action units (where possible)"); Console.WriteLine(" -mapping will use the sbm mapping or the sbm2 mapping"); Console.WriteLine(" for <lips> section only. Doesn't affect the <curves> section. Can be:"); Console.WriteLine(" -mapping sbm"); Console.WriteLine(" -mapping sbm2"); Console.WriteLine(" defaults is sbm2"); return; //inputFile = @"example.xml"; //animToProcess = @"line2"; } int argIndex = 0; if (args[argIndex] == "-mapping") { if (args.Length >= argIndex + 1 + 1) { mapping = args[argIndex + 1]; argIndex += 2; } } if (args[argIndex] == "-remap") { remapAnalysisActors = true; argIndex++; } if (args.Length <= argIndex + 1) { inputFile = args[argIndex]; } else { inputFile = args[argIndex]; animToProcess = args[argIndex + 1]; } if (args.Length >= argIndex + 3) { outputFile = args[argIndex + 2]; } else { outputFile = animToProcess + ".bml.txt"; } List <TimingData> timingData = new List <TimingData>(); XmlReader xmlReader = XmlReader.Create(inputFile); while (xmlReader.Read()) { // animation line looks like: // <animation name="line2" language="USEnglish" analysis_actor="Default" audio_path="D:\simcoach\simcoachart\scenes\character\ChrMale001\FaceFxFiles\Audio\line2.wav" audio_path_full="D:\simcoach\simcoachart\scenes\character\ChrMale001\FaceFxFiles\Audio\line2.wav"> if (xmlReader.NodeType == XmlNodeType.Element && xmlReader.Name == "animation") { string name = xmlReader.GetAttribute("name"); // if animToProcess hasn't been specified, process all animations in the file if (animToProcess == "") { TimingData data = new TimingData(); data.name = name; data.outputFile = name + ".bml.txt"; ReadXMLFromAnimationAttr(xmlReader, data, remapAnalysisActors); timingData.Add(data); } else if (animToProcess == name) { TimingData data = new TimingData(); data.name = name; data.outputFile = outputFile; ReadXMLFromAnimationAttr(xmlReader, data, remapAnalysisActors); timingData.Add(data); break; } } } xmlReader.Close(); foreach (TimingData d in timingData) { // 0 5 10 15 20 25 30 35 40 // string [] phonemeIndex = { "Iy", "Ih", "Eh", "Ey", "Ae", "Aa", "Aw", "Ay", "Ah", "Ao", "Oy", "Ow", "Uh", "Uw", "Er", "Ax", "S", "Sh", "Z", "Zh", "F", "Th", "V", "Dh", "M", "N", "Ng", "L", "R", "W", "Y", "Hh", "B", "D", "Jh", "G", "P", "T", "K", "Ch", "Sil", "ShortSil", "Flap" }; // Impersonator //string [] phonemeToViseme = { "EE", "Ih", "Ih", "Ih", "Ih", "Ao", "Ih", "Ih", "Ih", "Ao", "oh", "oh", "oh", "oh", "Er", "Ih", "Z", "j", "Z", "j", "F", "Th", "F", "Th", "BMP", "NG", "NG", "D", "R", "OO", "OO", "Ih", "BMP", "D", "j", "KG", "BMP", "D", "KG", "j", "_", "_", "_" }; Dictionary <string, string> phonemeToVisemeMap = new Dictionary <string, string>(); // taken from the SBM column in facefx-phoneme-to-viseme-map.xls if (mapping == "sbm") { phonemeToVisemeMap.Add("P", "BMP"); phonemeToVisemeMap.Add("B", "BMP"); phonemeToVisemeMap.Add("T", "D"); phonemeToVisemeMap.Add("D", "D"); phonemeToVisemeMap.Add("K", "KG"); phonemeToVisemeMap.Add("G", "KG"); phonemeToVisemeMap.Add("M", "BMP"); phonemeToVisemeMap.Add("N", "NG"); phonemeToVisemeMap.Add("NG", "NG"); phonemeToVisemeMap.Add("RA", "Er"); phonemeToVisemeMap.Add("RU", "Er"); phonemeToVisemeMap.Add("FLAP", "D"); phonemeToVisemeMap.Add("PH", "F"); phonemeToVisemeMap.Add("F", "F"); phonemeToVisemeMap.Add("V", "F"); phonemeToVisemeMap.Add("TH", "Th"); phonemeToVisemeMap.Add("DH", "Th"); phonemeToVisemeMap.Add("S", "Z"); phonemeToVisemeMap.Add("Z", "Z"); phonemeToVisemeMap.Add("SH", "j"); phonemeToVisemeMap.Add("ZH", "j"); phonemeToVisemeMap.Add("CX", "Ih"); phonemeToVisemeMap.Add("X", "Ih"); phonemeToVisemeMap.Add("GH", "KG"); phonemeToVisemeMap.Add("HH", "Ih"); phonemeToVisemeMap.Add("R", "R"); phonemeToVisemeMap.Add("Y", "OO"); phonemeToVisemeMap.Add("L", "Th"); phonemeToVisemeMap.Add("W", "Ao"); phonemeToVisemeMap.Add("H", "oh"); phonemeToVisemeMap.Add("TS", "D"); phonemeToVisemeMap.Add("CH", "KG"); phonemeToVisemeMap.Add("JH", "KG"); phonemeToVisemeMap.Add("IY", "EE"); phonemeToVisemeMap.Add("E", "Ih"); phonemeToVisemeMap.Add("EN", "Ih"); phonemeToVisemeMap.Add("EH", "Ih"); phonemeToVisemeMap.Add("A", "Ao"); phonemeToVisemeMap.Add("AA", "Ao"); phonemeToVisemeMap.Add("AAN", "Ao"); phonemeToVisemeMap.Add("AO", "Ao"); phonemeToVisemeMap.Add("AON", "Ao"); phonemeToVisemeMap.Add("O", "Ao"); phonemeToVisemeMap.Add("ON", "Ih"); phonemeToVisemeMap.Add("UW", "oh"); phonemeToVisemeMap.Add("UY", "OO"); phonemeToVisemeMap.Add("EU", "OO"); phonemeToVisemeMap.Add("OE", "oh"); phonemeToVisemeMap.Add("OEN", "oh"); phonemeToVisemeMap.Add("AH", "Ih"); phonemeToVisemeMap.Add("IH", "Ih"); phonemeToVisemeMap.Add("UU", "oh"); phonemeToVisemeMap.Add("UH", "oh"); phonemeToVisemeMap.Add("AX", "Ih"); phonemeToVisemeMap.Add("UX", "Ih"); phonemeToVisemeMap.Add("AE", "Ih"); phonemeToVisemeMap.Add("ER", "Er"); phonemeToVisemeMap.Add("AXR", "Er"); phonemeToVisemeMap.Add("EXR", "Er"); phonemeToVisemeMap.Add("EY", "Ih"); phonemeToVisemeMap.Add("AW", "Ih"); phonemeToVisemeMap.Add("AY", "Ih"); phonemeToVisemeMap.Add("OY", "oh"); phonemeToVisemeMap.Add("OW", "oh"); phonemeToVisemeMap.Add("SIL", "_"); } else if (mapping == "sbm2") { // taken from an FaceFX File->Export XML Actor from example_sbm2_mapping.facefx phonemeToVisemeMap.Add("P", "BMP"); phonemeToVisemeMap.Add("B", "BMP"); phonemeToVisemeMap.Add("T", "D"); phonemeToVisemeMap.Add("D", "D"); phonemeToVisemeMap.Add("M", "BMP"); phonemeToVisemeMap.Add("RA", "L"); phonemeToVisemeMap.Add("RU", "Er"); phonemeToVisemeMap.Add("FLAP", "D"); phonemeToVisemeMap.Add("PH", "F"); phonemeToVisemeMap.Add("F", "F"); phonemeToVisemeMap.Add("V", "F"); phonemeToVisemeMap.Add("TH", "Th"); phonemeToVisemeMap.Add("DH", "Th"); phonemeToVisemeMap.Add("S", "Z"); phonemeToVisemeMap.Add("Z", "Z"); phonemeToVisemeMap.Add("R", "R"); phonemeToVisemeMap.Add("L", "L"); phonemeToVisemeMap.Add("E", "Eh"); phonemeToVisemeMap.Add("EN", "Eh"); phonemeToVisemeMap.Add("EH", "Eh"); phonemeToVisemeMap.Add("A", "Aa"); phonemeToVisemeMap.Add("IH", "Ih"); phonemeToVisemeMap.Add("ER", "Er"); phonemeToVisemeMap.Add("AXR", "Er"); phonemeToVisemeMap.Add("EXR", "Er"); phonemeToVisemeMap.Add("AY", "Ay"); phonemeToVisemeMap.Add("ON", "Ow"); phonemeToVisemeMap.Add("AX", "Ah"); phonemeToVisemeMap.Add("UX", "Ah"); phonemeToVisemeMap.Add("AE", "Ah"); phonemeToVisemeMap.Add("AA", "Aa"); phonemeToVisemeMap.Add("AAN", "Aa"); phonemeToVisemeMap.Add("AO", "Aa"); phonemeToVisemeMap.Add("AON", "Aa"); phonemeToVisemeMap.Add("O", "Ow"); phonemeToVisemeMap.Add("EY", "Eh"); phonemeToVisemeMap.Add("UW", "W"); phonemeToVisemeMap.Add("OW", "Ow"); phonemeToVisemeMap.Add("OY", "Oy"); phonemeToVisemeMap.Add("H", "H"); phonemeToVisemeMap.Add("SH", "Sh"); phonemeToVisemeMap.Add("ZH", "Sh"); phonemeToVisemeMap.Add("N", "D"); phonemeToVisemeMap.Add("NG", "D"); phonemeToVisemeMap.Add("Y", "Sh"); phonemeToVisemeMap.Add("UY", "W"); phonemeToVisemeMap.Add("EU", "W"); phonemeToVisemeMap.Add("IY", "Ih"); phonemeToVisemeMap.Add("K", "Kg"); phonemeToVisemeMap.Add("G", "Kg"); phonemeToVisemeMap.Add("GH", "Kg"); phonemeToVisemeMap.Add("JH", "Sh"); phonemeToVisemeMap.Add("CH", "Sh"); phonemeToVisemeMap.Add("CX", "H"); phonemeToVisemeMap.Add("X", "H"); phonemeToVisemeMap.Add("HH", "H"); phonemeToVisemeMap.Add("W", "W"); phonemeToVisemeMap.Add("TS", "Z"); phonemeToVisemeMap.Add("OE", "W"); phonemeToVisemeMap.Add("OEN", "W"); phonemeToVisemeMap.Add("UU", "W"); phonemeToVisemeMap.Add("AH", "Ah"); phonemeToVisemeMap.Add("UH", "W"); phonemeToVisemeMap.Add("AW", "Aw"); phonemeToVisemeMap.Add("SIL", "_"); } for (int i = 0; i < d.phonemeData.Count; i++) { d.phonemeData[i].visemeMatch = phonemeToVisemeMap[d.phonemeData[i].phoneme]; } XmlWriterSettings xmlWriterSettings = new XmlWriterSettings(); xmlWriterSettings.Indent = true; xmlWriterSettings.IndentChars = (" "); XmlWriter xmlWriter = XmlWriter.Create(d.outputFile, xmlWriterSettings); xmlWriter.WriteStartElement("bml"); xmlWriter.WriteStartElement("speech"); xmlWriter.WriteAttributeString("id", "sp1"); xmlWriter.WriteAttributeString("start", "0.0"); xmlWriter.WriteAttributeString("ready", "0.1"); xmlWriter.WriteAttributeString("stroke", "0.1"); xmlWriter.WriteAttributeString("relax", "0.2"); xmlWriter.WriteAttributeString("end", "0.2"); xmlWriter.WriteStartElement("text"); int timeMarker = 0; for (int i = 0; i < d.wordBreakData.Count; i++) { xmlWriter.WriteStartElement("sync"); xmlWriter.WriteAttributeString("id", "T" + timeMarker.ToString()); xmlWriter.WriteAttributeString("time", d.wordBreakData[i].start.ToString()); xmlWriter.WriteEndElement(); timeMarker++; xmlWriter.WriteString(d.wordBreakData[i].word); xmlWriter.WriteWhitespace(xmlWriterSettings.NewLineChars); xmlWriter.WriteWhitespace(xmlWriterSettings.IndentChars); xmlWriter.WriteWhitespace(xmlWriterSettings.IndentChars); xmlWriter.WriteWhitespace(xmlWriterSettings.IndentChars); xmlWriter.WriteStartElement("sync"); xmlWriter.WriteAttributeString("id", "T" + timeMarker.ToString()); xmlWriter.WriteAttributeString("time", d.wordBreakData[i].end.ToString()); xmlWriter.WriteEndElement(); timeMarker++; xmlWriter.WriteWhitespace(xmlWriterSettings.NewLineChars); xmlWriter.WriteWhitespace(xmlWriterSettings.IndentChars); xmlWriter.WriteWhitespace(xmlWriterSettings.IndentChars); if (i != d.wordBreakData.Count - 1) { xmlWriter.WriteWhitespace(xmlWriterSettings.IndentChars); } } xmlWriter.WriteEndElement(); // text xmlWriter.WriteStartElement("description"); xmlWriter.WriteAttributeString("level", "1"); xmlWriter.WriteAttributeString("type", "audio/x-wav"); xmlWriter.WriteStartElement("file"); xmlWriter.WriteAttributeString("ref", d.name); xmlWriter.WriteEndElement(); xmlWriter.WriteEndElement(); // description xmlWriter.WriteEndElement(); // speech for (int i = 0; i < d.phonemeData.Count; i++) { xmlWriter.WriteStartElement("lips"); xmlWriter.WriteAttributeString("viseme", d.phonemeData[i].visemeMatch); xmlWriter.WriteAttributeString("articulation", "1.0"); xmlWriter.WriteAttributeString("start", d.phonemeData[i].start.ToString()); xmlWriter.WriteAttributeString("ready", d.phonemeData[i].start.ToString()); xmlWriter.WriteAttributeString("relax", d.phonemeData[i].end.ToString()); xmlWriter.WriteAttributeString("end", d.phonemeData[i].end.ToString()); xmlWriter.WriteEndElement(); } xmlWriter.WriteStartElement("curves"); for (int i = 0; i < d.facefxCurveDataXML.Count; i++) { xmlWriter.WriteStartElement("curve"); xmlWriter.WriteAttributeString("name", d.facefxCurveDataXML[i].name); xmlWriter.WriteAttributeString("num_keys", d.facefxCurveDataXML[i].numKeys.ToString()); xmlWriter.WriteAttributeString("owner", d.facefxCurveDataXML[i].owner); xmlWriter.WriteString(d.facefxCurveDataXML[i].curveData); xmlWriter.WriteEndElement(); } xmlWriter.WriteEndElement(); xmlWriter.WriteEndElement(); // bml xmlWriter.WriteWhitespace(xmlWriterSettings.NewLineChars); xmlWriter.Close(); } }
static public void ReadXMLFromAnimationAttr(XmlReader xmlReader, TimingData timingData, bool remapAnalysisActors) { timingData.phonemeData = new List <PhonemeData>(); timingData.wordBreakData = new List <WordBreakData>(); timingData.curveData = new List <CurveData>(); timingData.facefxCurveDataXML = new List <FaceFXCurveDataXML>(); XmlReader animation = xmlReader.ReadSubtree(); while (animation.Read()) { if (animation.NodeType == XmlNodeType.Element && animation.Name == "phonemes") { XmlReader phonemes = animation.ReadSubtree(); while (phonemes.Read()) { // phonene line looks like: // <phoneme phoneme="SIL" start="0.000000" end="1.200000" /> if (phonemes.NodeType == XmlNodeType.Element && phonemes.Name == "phoneme") { string phoneme = phonemes.GetAttribute("phoneme"); double start = XmlConvert.ToDouble(phonemes.GetAttribute("start")); double end = XmlConvert.ToDouble(phonemes.GetAttribute("end")); timingData.phonemeData.Add(new PhonemeData(phoneme, start, end)); } } phonemes.Close(); } else if (animation.NodeType == XmlNodeType.Element && animation.Name == "words") { XmlReader words = animation.ReadSubtree(); while (words.Read()) { // word line: // <word start="1.200000" end="1.380000">If</word> if (words.NodeType == XmlNodeType.Element && words.Name == "word") { double start = XmlConvert.ToDouble(words.GetAttribute("start")); double end = XmlConvert.ToDouble(words.GetAttribute("end")); string word = words.ReadElementString(); timingData.wordBreakData.Add(new WordBreakData(word, start, end)); } } words.Close(); } else if (animation.NodeType == XmlNodeType.Element && animation.Name == "curves") { // curve line: // <curve name="Head Yaw" num_keys="3" owner="analysis">2.823998 0.000000 0.000000 0.000000 3.174005 -1.599560 0.000000 0.000000 3.959330 0.000000 0.000000 0.000000 </curve> // description - http://www.facefx.com/documentation/2010/W99 XmlReader curves = animation.ReadSubtree(); while (curves.Read()) { if (curves.NodeType == XmlNodeType.Element && curves.Name == "curve") { string name = curves.GetAttribute("name"); int numKeys = XmlConvert.ToInt32(curves.GetAttribute("num_keys")); string owner = curves.GetAttribute("owner"); string curveString = curves.ReadElementString(); string [] curveStringSplit = curveString.Trim().Split(); if (numKeys > 0 && curveStringSplit.Length != (numKeys * 4)) { Console.WriteLine("Reading curve data, '{0}' expected num_keys({1}) elements, but received {2}", name, numKeys * 4, curveStringSplit.Length); } // HACK - TODO - The FaceFX maya exporter doesn't allow viseme poses to be named a single character. // So, the poses had to be named with 2 characters. // This is fixed up here. Use this hack until the FaceFX exporter is fixed. if (name == "DD") { name = "D"; } else if (name == "FF") { name = "F"; } else if (name == "HH") { name = "H"; } else if (name == "JJ") { name = "j"; } else if (name == "LL") { name = "L"; } else if (name == "RR") { name = "R"; } else if (name == "WW") { name = "W"; } else if (name == "ZZ") { name = "Z"; } // HACK - TODO - Remap curve names until we figure out how Analysis Actors work in FaceFX // only do this if -remap is specified if (remapAnalysisActors) { // these Analysis Actors are in the default set if (name == "Blink") { name = "au_45"; } if (name == "Eye Pitch") { name = "Eye Pitch"; } if (name == "Eye Yaw") { name = "Eye Yaw"; } if (name == "Eyebrow Raise") { name = "au_1"; } if (name == "Head Pitch") { name = "Head Pitch"; } if (name == "Head Roll") { name = "Head Roll"; } if (name == "Head Yaw") { name = "Head Yaw"; } if (name == "Squint") { name = "au_7"; } } timingData.facefxCurveDataXML.Add(new FaceFXCurveDataXML(name, numKeys, owner, curveString)); // read the split string into the data struct. Each curve is 4 values: // Time in seconds // Value // Slope In // Slope Out for (int i = 0; i < curveStringSplit.Length; i += 4) { if (i + 3 >= curveStringSplit.Length) { continue; } double time = XmlConvert.ToDouble(curveStringSplit[i]); double value = XmlConvert.ToDouble(curveStringSplit[i + 1]); double slopeIn = XmlConvert.ToDouble(curveStringSplit[i + 2]); double slopeOut = XmlConvert.ToDouble(curveStringSplit[i + 3]); // find the previous curve of the same name and set the end time to the current. int index = timingData.curveData.FindLastIndex(delegate(CurveData c) { if (c.name == name) { return(!c.endTimeSet); } else { return(false); } }); if (index != -1) { timingData.curveData[index].endTime = time; timingData.curveData[index].endTimeSet = true; } timingData.curveData.Add(new CurveData(name, time, value, slopeIn, slopeOut)); } } } curves.Close(); } } // make sure all endTime's have been set foreach (CurveData c in timingData.curveData) { if (!c.endTimeSet) { c.endTime = c.startTime; c.endTimeSet = true; } } // sort the curve data because by default, it's sorted by viseme timingData.curveData.Sort(); /* * foreach ( CurveData c in timingData.curveData ) * { * Console.WriteLine( "{0} - {1} {2} {3}", c.name, c.startTime, c.endTime, c.value ); * } */ }
public void AddTimingData(TimingData timingData) { TimingData.Enqueue(timingData); }