/* * Reads a db file from stream, using the version information * contained in the header read from it. */ public DBFile Decode(Stream stream) { BinaryReader reader = new BinaryReader(stream); reader.BaseStream.Position = 0; DBFileHeader header = readHeader(reader); List <TypeInfo> infos = DBTypeMap.Instance.GetVersionedInfos(typeName, header.Version); if (infos.Count == 0) { infos.AddRange(DBTypeMap.Instance.GetAllInfos(typeName)); } foreach (TypeInfo realInfo in infos) { try { #if DEBUG //Console.WriteLine("Parsing version {1} with info {0}", string.Join(",", realInfo.Fields), header.Version); #endif DBFile result = ReadFile(reader, header, realInfo); return(result); } catch (Exception e) { // Console.WriteLine("Exception parsing version {1} with info {0}: {2}", string.Join(",", realInfo.Fields), header.Version, e); } } return(null); // throw new DBFileNotSupportedException(string.Format("No applicable type definition found")); }
public DBFile ReadFile(BinaryReader reader, DBFileHeader header, TypeInfo info) { reader.BaseStream.Position = header.Length; DBFile file = new DBFile(header, info); int i = 0; while (reader.BaseStream.Position < reader.BaseStream.Length) { try { file.Entries.Add(ReadFields(reader, info)); i++; } catch (Exception x) { string message = string.Format("{2} at entry {0}, db version {1}", i, file.Header.Version, x.Message); throw new DBFileNotSupportedException(message, x); } } if (file.Entries.Count != header.EntryCount) { throw new DBFileNotSupportedException(string.Format("Expected {0} entries, got {1}", header.EntryCount, file.Entries.Count)); } else if (reader.BaseStream.Position != reader.BaseStream.Length) { throw new DBFileNotSupportedException(string.Format("Expected {0} bytes, read {1}", header.Length, reader.BaseStream.Position)); } return(file); }
/* * Query if given packed file can be deccoded. * Is not entirely reliable because it only reads the header and checks if a * type definition is available for the given GUID and/or type name and version. * The actual decode tries out all available type infos for that type name * but that is less efficient because it has to read the whole file at least once * if successful. */ public static bool CanDecode(PackedFile packedFile, out string display) { bool result = true; string key = DBFile.Typename(packedFile.FullPath); if (DBTypeMap.Instance.IsSupported(key)) { try { DBFileHeader header = PackedFileDbCodec.readHeader(packedFile); int maxVersion = DBTypeMap.Instance.MaxVersion(key); if (maxVersion != 0 && header.Version > maxVersion) { display = string.Format("{0}: needs {1}, has {2}", key, header.Version, DBTypeMap.Instance.MaxVersion(key)); result = false; } else { display = string.Format("Version: {0}", header.Version); } } catch (Exception x) { display = string.Format("{0}: {1}", key, x.Message); } } else { display = string.Format("{0}: no definition available", key); result = false; } return(result); }
public override bool Equals(object other) { bool result = false; if (other is DBFileHeader) { DBFileHeader header2 = (DBFileHeader)other; result = GUID.Equals(header2.GUID); result &= Version.Equals(header2.Version); result &= EntryCount.Equals(header2.EntryCount); } return(result); }
/* * Writes the given header to the given writer. */ public static void WriteHeader(BinaryWriter writer, DBFileHeader header) { if (header.GUID != "") { writer.Write(GUID_MARKER); IOFunctions.WriteCAString(writer, header.GUID, Encoding.Unicode); } if (header.Version != 0) { writer.Write(VERSION_MARKER); writer.Write(header.Version); } writer.Write((byte)1); writer.Write(header.EntryCount); }
private void AddFromPacked(PackedFile packed) { if (packed.Size != 0) { string type = DBFile.Typename(packed.FullPath); DBFileHeader header = PackedFileDbCodec.readHeader(packed); int currentMaxVersion; if (!maxVersion.TryGetValue(type, out currentMaxVersion)) { currentMaxVersion = header.Version; } else { currentMaxVersion = Math.Max(header.Version, currentMaxVersion); } maxVersion[type] = currentMaxVersion; } }
public static DBFileHeader readHeader(BinaryReader reader) { byte index = reader.ReadByte(); int version = 0; string guid = ""; bool hasMarker = false; uint entryCount = 0; try { if (index != 1) { // I don't think those can actually occur more than once per file while (index == 0xFC || index == 0xFD) { var bytes = new List <byte>(4); bytes.Add(index); bytes.AddRange(reader.ReadBytes(3)); UInt32 marker = BitConverter.ToUInt32(bytes.ToArray(), 0); if (marker == GUID_MARKER) { guid = IOFunctions.ReadCAString(reader, Encoding.Unicode); index = reader.ReadByte(); } else if (marker == VERSION_MARKER) { hasMarker = true; version = reader.ReadInt32(); index = reader.ReadByte(); // break; } else { throw new DBFileNotSupportedException(string.Format("could not interpret {0}", marker)); } } } entryCount = reader.ReadUInt32(); } catch { } DBFileHeader header = new DBFileHeader(guid, version, entryCount, hasMarker); return(header); }
// this could do with an update; since the transition to schema.xml, // we also know obsolete fields and can remove them, // and we can add fields in the middle instead of assuming they got appended. public void UpdatePackedFile(PackedFile packedFile) { string key = DBFile.Typename(packedFile.FullPath); if (DBTypeMap.Instance.IsSupported(key)) { PackedFileDbCodec codec = PackedFileDbCodec.FromFilename(packedFile.FullPath); int maxVersion = DBTypeMap.Instance.MaxVersion(key); DBFileHeader header = PackedFileDbCodec.readHeader(packedFile); if (header.Version < maxVersion) { // found a more recent db definition; read data from db file DBFile updatedFile = PackedFileDbCodec.Decode(packedFile); TypeInfo dbFileInfo = updatedFile.CurrentType; TypeInfo targetInfo = GetTargetTypeInfo(key, maxVersion); if (targetInfo == null) { throw new Exception(string.Format("Can't decide new structure for {0} version {1}.", key, maxVersion)); } // identify FieldInstances missing in db file for (int i = 0; i < targetInfo.Fields.Count; i++) { FieldInfo oldField = dbFileInfo[targetInfo.Fields[i].Name]; if (oldField == null) { foreach (List <FieldInstance> entry in updatedFile.Entries) { entry.Insert(i, targetInfo.Fields[i].CreateInstance()); } } } //updatedFile.Header.GUID = guid; updatedFile.Header.Version = maxVersion; packedFile.Data = codec.Encode(updatedFile); } } }
/* * Add data contained in the given db file to this one. */ public void Import(DBFile file) { if (CurrentType.Name != file.CurrentType.Name) { throw new DBFileNotSupportedException ("File type of imported DB doesn't match that of the currently opened one", this); } // check field type compatibility for (int i = 0; i < file.CurrentType.Fields.Count; i++) { if (file.CurrentType.Fields [i].TypeCode != CurrentType.Fields [i].TypeCode) { throw new DBFileNotSupportedException ("Data structure of imported DB doesn't match that of currently opened one at field " + i, this); } } DBFileHeader h = file.Header; Header = new DBFileHeader(h.GUID, h.Version, h.EntryCount, h.HasVersionMarker); CurrentType = file.CurrentType; // this.entries = new List<List<FieldInstance>> (); entries.AddRange(file.entries); Header.EntryCount = (uint)entries.Count; }
/* * Create copy of given header. */ public DBFileHeader(DBFileHeader toCopy) : this(toCopy.GUID, toCopy.Version, 0, toCopy.HasVersionMarker) { }
/* * Create copy of the given db file. */ public DBFile(DBFile toCopy) : this(toCopy.Header, toCopy.CurrentType) { Header = new DBFileHeader(toCopy.Header.GUID, toCopy.Header.Version, toCopy.Header.EntryCount, toCopy.Header.HasVersionMarker); // we need to create a new instance for every field so we don't write through to the old data toCopy.entries.ForEach(entry => entries.Add(new DBRow(toCopy.CurrentType, entry))); }
/* * Create db file with the given header and the given type. */ public DBFile(DBFileHeader h, TypeInfo info) { Header = h; CurrentType = info; }
/* * Decode the given stream to return its data as a DBFile. */ public DBFile Decode(Stream stream) { DBFile result = null; using (TextReader reader = new StreamReader(stream)) { XmlDocument doc = new XmlDocument(); doc.Load(reader); foreach (XmlNode dataroot in doc.ChildNodes) { string guid = ""; foreach (XmlNode entry in dataroot.ChildNodes) { // ignore uuid if ("edit_uuid".Equals(entry.Name)) { guid = entry.InnerText; continue; } // use cached type info or read from TWaD if none has been cached yet string recordName = entry.Name; TypeInfo typeinfo; if (!allInfos.TryGetValue(recordName, out typeinfo)) { typeinfo = LoadTypeInfos(recordName); allInfos[recordName] = typeinfo; } // create a new header upon the first data item if (result == null) { DBFileHeader header = new DBFileHeader(guid, 0, 0, false); result = new DBFile(header, typeinfo); } // get a field-to-value map and remember the fields requiring translation Dictionary <string, string> fieldValues = new Dictionary <string, string>(); List <string> requireTranslation = new List <string>(); foreach (XmlNode row in entry.ChildNodes) { fieldValues[row.Name] = row.InnerText; XmlAttribute at = row.Attributes["requires_translation"]; if (at != null && "true".Equals(at.Value)) { requireTranslation.Add(row.Name); } } // create entry from type info and fill with values List <FieldInstance> fields = result.GetNewEntry(); foreach (FieldInstance field in fields) { string val; try { if (fieldValues.TryGetValue(field.Name, out val)) { if (field.Info.TypeName.Equals("boolean")) { field.Value = "1".Equals(val) ? "true" : "false"; } else { field.Value = val; } field.RequiresTranslation = requireTranslation.Contains(field.Name); } } catch (Exception e) { Console.WriteLine("Wait a minute!"); throw e; } } result.Entries.Add(new DBRow(typeinfo, fields)); } } } return(result); }
// read from given stream public DBFile Decode(StreamReader reader) { // another tool might have saved tabs and quotes around this // (at least open office does) string typeInfoName = reader.ReadLine().Replace("\t", "").Trim(QUOTES); string[] split = typeInfoName.Split(GUID_SEPARATOR, StringSplitOptions.RemoveEmptyEntries); if (split.Length == 2) { typeInfoName = split[0]; } string versionStr = reader.ReadLine().Replace("\t", "").Trim(QUOTES); int version; switch (versionStr) { case "1.0": version = 0; break; case "1.2": version = 1; break; default: version = int.Parse(versionStr); break; } DBFile file = null; // skip table header reader.ReadLine(); List <String> read = new List <String>(); while (!reader.EndOfStream) { read.Add(reader.ReadLine()); } List <TypeInfo> infos = DBTypeMap.Instance.GetVersionedInfos(typeInfoName, version); foreach (TypeInfo info in infos) { bool parseSuccessful = true; List <DBRow> entries = new List <DBRow> (); foreach (String line in read) { try { String[] strArray = line.Split(TABS, StringSplitOptions.None); if (strArray.Length != info.Fields.Count) { parseSuccessful = false; break; } List <FieldInstance> item = new List <FieldInstance> (); for (int i = 0; i < strArray.Length; i++) { FieldInstance field = info.Fields [i].CreateInstance(); string fieldValue = CsvUtil.Unformat(strArray [i]); field.Value = fieldValue; item.Add(field); } entries.Add(new DBRow(info, item)); #if DEBUG } catch (Exception x) { Console.WriteLine(x); #else } catch { #endif parseSuccessful = false; break; } } if (parseSuccessful) { String guid = ""; DBFileHeader header = new DBFileHeader(guid, version, (uint)entries.Count, version != 0); file = new DBFile(header, info); file.Entries.AddRange(entries); break; } } return(file); }