public static void Write(this BinaryWriter writer, MdbPacket packet) { writer.WriteAscii(packet.Signature, 4); writer.Write(packet.Size); long mark1 = writer.BaseStream.Position; switch (packet.Signature) { case "RIGD": writer.WriteMdbPacket((MdbPacket_RIGD)packet); break; case "SKIN": writer.WriteMdbPacket((MdbPacket_SKIN)packet); break; case "COL2": case "COL3": writer.WriteMdbPacket((MdbPacket_COLN)packet); break; case "COLS": writer.WriteMdbPacket((MdbPacket_COLS)packet); break; case "WALK": writer.WriteMdbPacket((MdbPacket_WALK)packet); break; case "HOOK": writer.WriteMdbPacket((MdbPacket_HOOK)packet); break; case "TRRN": writer.WriteMdbPacket((MdbPacket_TRRN)packet); break; case "HELM": writer.WriteMdbPacket((MdbPacket_HELM)packet); break; case "HAIR": writer.WriteMdbPacket((MdbPacket_HAIR)packet); break; default: CTDebug.Error("Unrecognized object format: {0}", packet.Signature); break; } long mark2 = writer.BaseStream.Position; if (mark2 - mark1 != packet.Size) { CTDebug.Error("Incorrectly sized packet!"); } }
public static ColumnInfo Parse(string data) { string[] split = data.Split(':'); if (split.Length < 2) { throw new FormatException("Not enough values to parse column info"); } string name = split[0]; ColumnType ctype = ColumnType.String; switch (split[1].ToLowerInvariant()) { case "index": ctype = ColumnType.Index; break; case "label": ctype = ColumnType.Label; break; case "string": ctype = ColumnType.String; break; case "scriptconst": ctype = ColumnType.ScriptConst; break; case "integer": ctype = ColumnType.Integer; break; case "flags": ctype = ColumnType.Flags; break; case "float": ctype = ColumnType.Float; break; case "tableref": ctype = ColumnType.TableRef; break; case "columnref": ctype = ColumnType.ColumnRef; break; case "scriptref": ctype = ColumnType.ScriptRef; break; case "tlkref": ctype = ColumnType.TLKRef; break; case "enum": ctype = ColumnType.Enum; break; default: CTDebug.Error("Could not parse column type: {0}", split[1]); break; } string refTo = ""; if (split.Length == 3) { refTo = split[2]; } return(new ColumnInfo(name, ctype, refTo)); }
public void ResolveReferences() { foreach (string colName in m_data.Keys) { try { m_data[colName].ResolveRef(InTable.GetColumn(colName)); } catch (Exception) { CTDebug.Error("Could not resolve reference in table {0}, row {1} column {2}", InTable.Name, Index, colName); m_data[colName].FromDataString("****", new ColumnInfo("string", ColumnType.String)); InTable.CanSave = false; } } HasUnresolvedReferences = false; }
public void Load(string path) { if (File.Exists(path)) { string[] input = File.ReadAllLines(path); for (int i = 0; i < input.Length; ++i) { if (input[i][0] == '!') { Group = input[i].Substring(1); } else if (input[i][0] == '@') { switch (input[i].Substring(1).ToLowerInvariant()) { case "minify": Minify = true; break; } } else { try { ColumnInfo readInfo = ColumnInfo.Parse(input[i]); int colIndex = this.FindIndex((ColumnInfo checkColumn) => { return(checkColumn.Name == readInfo.Name); }); if (colIndex >= 0 && this[colIndex].Name == readInfo.Name) { this[colIndex] = readInfo; } else { CTDebug.Warn("Could not find column {0}", readInfo.Name); } } catch (FormatException e) { CTDebug.Error(e.ToString()); } } } } }
public override void Compile() { List <TlkContents> ret = new List <TlkContents>(); List <TlkDocument> docStack = CTCore.GetOpenProject().GetTlkStack().GetDocuments(); for (int i = 0; i < docStack.Count; ++i) { TlkDocument doc = docStack[i]; CTDebug.Info("Writing from {0}.tlk ({1} lines)", doc.Name, doc.Contents.Length); ret.AddRange(doc.Contents); } try { TLKWriter.SaveFileContents(Path, ret.ToArray()); } catch (Exception) { CTDebug.Error("Could not save {0}. Make sure the file is not in use.", Path); } }
public static MdbPacket ReadMdbPacket(this BinaryReader reader) { string Signature = reader.ReadAscii(4); UInt32 Size = reader.ReadUInt32(); MdbPacket ret = null; long debugCheckPos = reader.BaseStream.Position; switch (Signature) { case "RIGD": ret = reader.ReadMdbPacket_RIGD(Size); break; case "SKIN": ret = reader.ReadMdbPacket_SKIN(Size); break; case "COL2": case "COL3": ret = reader.ReadMdbPacket_COLN(Size); break; case "COLS": ret = reader.ReadMdbPacket_COLS(Size); break; case "WALK": ret = reader.ReadMdbPacket_WALK(Size); break; case "HOOK": ret = reader.ReadMdbPacket_HOOK(Size); break; case "TRRN": ret = reader.ReadMdbPacket_TRRN(Size); break; case "HELM": ret = reader.ReadMdbPacket_HELM(Size); break; case "HAIR": ret = reader.ReadMdbPacket_HAIR(Size); break; default: CTDebug.Error("Unrecognized object format: {0}", Signature); break; } long diff = reader.BaseStream.Position - debugCheckPos; if (diff != Size) { CTDebug.Warn("Invalid size of MDB Packet."); } ret.Size = Size; ret.Signature = Signature; return(ret); }
public override void Compile() { HakHeader header = new HakHeader(); List <HakKeyEntry> keys = new List <HakKeyEntry>(); List <HakResEntry> resEntries = new List <HakResEntry>(); List <Byte[]> resources = new List <byte[]>(); foreach (KeyValuePair <string, CTArtifact> pair in m_afx) { if (!File.Exists(pair.Value.Path)) { continue; } HakKeyEntry key = new HakKeyEntry(); key.ResID = keys.Count; key.Resref = pair.Key; key.ResType = pair.Value.ErfID; key.Reserved = 0; keys.Add(key); byte[] resVal = File.ReadAllBytes(pair.Value.Path); resources.Add(resVal); HakResEntry res = new HakResEntry(); res.ResourceSize = resVal.Length; resEntries.Add(res); } try { if (File.Exists(Path)) { File.Delete(Path); } BinaryWriter writer = new BinaryWriter(File.OpenWrite(Path)); header.Header = "HAK V1.1"; header.StringTableSize = 0; header.StringCount = 0; header.EntryCount = keys.Count; header.OffsetToString = HakHeader.Size; header.OffsetToKeyList = HakHeader.Size; header.OffsetToResources = HakHeader.Size + (keys.Count * HakKeyEntry.Size); header.BuildYear = DateTime.Now.Year - 1900; header.BuildDay = DateTime.Now.Day; header.DescriptionIndex = 0; header.Write(writer); foreach (var key in keys) { key.Write(writer); } int ResourceOffset = header.OffsetToResources + (resEntries.Count * HakResEntry.Size); for (int i = 0; i < resEntries.Count; ++i) { resEntries[i].OffsetToResource = ResourceOffset; resEntries[i].Write(writer); ResourceOffset += resEntries[i].ResourceSize; } for (int i = 0; i < resources.Count; ++i) { writer.Write(resources[i]); } writer.Flush(); } catch (Exception) { CTDebug.Error("Could not save {0}. Make sure the file is not in use.", Path); } }