private DBHeader ExtractHeader(BinaryReader dbReader) { DBHeader header = null; string signature = dbReader.ReadString(4); if (string.IsNullOrWhiteSpace(signature)) { return(null); } switch (signature) { case "WDB5": header = new WDB5(); break; case "WDB6": header = new WDB6(); break; case "WDC1": header = new WDC1(); break; default: return(null); } header?.ReadHeader(ref dbReader, signature); return(header); }
public bool Read(DBHeader counterpart, DBEntry dbentry) { WDB6CounterPart = counterpart as WDB6; if (WDB6CounterPart == null) { return(false); } var entries = Entries.Where(x => (x.Locale == counterpart.Locale || x.Locale == 0) && x.TableHash == counterpart.TableHash); if (entries.Any()) { OffsetLengths = entries.Select(x => (int)x.Size + 4).ToArray(); TableStructure = WDB6CounterPart.TableStructure; Flags = WDB6CounterPart.Flags; FieldStructure = WDB6CounterPart.FieldStructure; RecordCount = (uint)entries.Count(); dbentry.LoadTableStructure(); IEnumerable <byte> Data = new byte[0]; foreach (var e in entries) { Data = Data.Concat(BitConverter.GetBytes(e.RowId)).Concat(e.Data); } using (MemoryStream ms = new MemoryStream(Data.ToArray())) using (BinaryReader br = new BinaryReader(ms)) new DBReader().ReadIntoTable(ref dbentry, br, new Dictionary <int, string>()); return(true); } return(false); }
//private IEnumerable<int> unqiueRowIndices; public DBEntry(DBHeader header, string filepath) { Header = header; FilePath = filepath; SavePath = filepath; Header.TableStructure = Database.Definitions.Tables.FirstOrDefault(x => x.Name.Equals(Path.GetFileNameWithoutExtension(filepath), Ignorecase) && x.Build == Database.BuildNumber); LoadDefinition(); }
private int GetArraySize(ref DBHeader header, int index) { var f = header.FieldStructure[index]; if (f.CommonDataColumn) { return(1); } else if (index == header.FieldStructure.Count - 1) { return(Math.Max(1, ((int)header.RecordSize - f.Offset) / f.ByteCount)); } else { return(Math.Max(1, (header.FieldStructure[index + 1].Offset - f.Offset) / f.ByteCount)); } }
internal DBStatus Open() { if (OpenCount > 0) { OpenCount++; DBStatus stat = new DBStatus(); stat.Values = new string[FieldCount]; stat.BlankRow(); return(stat); } try { DBStream = new FileStream(DataFileName(Name + DBFEXENSION), FileMode.Open); }catch (Exception e) { LogError(e); return(null); } Formatter = new BinaryFormatter(); Header = (DBHeader)Formatter.Deserialize(DBStream); if (!Header.IsValid()) { DBStream.Close(); ServerLog.Register("No se puedo abrir \"" + Name + "\" debido a que el encabezado es inválido."); return(null); } FieldCount = Header.Fields; Fields = new DBField[FieldCount]; for (int i = 0; i < FieldCount; i++) { DBField field = (DBField)Formatter.Deserialize(DBStream); field.Name = Encoding.ASCII.GetString(field.BName).Trim(); Fields[i] = field; } DBStatus status = new DBStatus(); status.Table = this; status.Values = new string[FieldCount]; status.BlankRow(); return(status); }
public DBEntry(DBHeader header, string filepath, uint build) { this.Header = header; this.FilePath = filepath; Header.TableStructure = typeof(T).GetProperties(BindingFlags.Public | BindingFlags.Instance); this.PrimaryKey = this.TableStructure.FirstOrDefault(x => x.GetAttribute <DBKeyAttribute>() != null); if (PrimaryKey == null) { throw new Exception("Primary Key must not be null."); } if (PrimaryKey.PropertyType != typeof(int)) { throw new Exception("Primary Key must be an int."); } this.Rows = new RowCollection <T>(this.PrimaryKey); this.Padding = this.TableStructure.Select(x => x.GetAttribute <DBFieldAttribute>()?.Padding ?? 0).ToArray(); this.Build = build; }
public bool HasEntry(DBHeader counterpart) => Entries.Any(x => (x.Locale == counterpart.Locale || x.Locale == 0) && x.TableHash == counterpart.TableHash && x.IsValid == 1);
internal string[] SaveStruct() { int rsize = 0; int fieldCt = Fields.Length; MemoryStream stream = new MemoryStream(); Formatter = new BinaryFormatter(); Header = new DBHeader(); FieldCount = 0; Formatter.Serialize(stream, Header); int ix; for (int i = 0; i < fieldCt; i++) { DBField field = Fields[i]; switch (field.Type) { case DB.FieldTypeString: break; case DB.FieldTypeNumeric: break; default: ServerLog.Register("Se intentó crear \"" + Name + "\" con al menos un tipo de dato inválido."); return(null); } if (FetchField(field.Name, out ix) != null) { ServerLog.Register("Se intentó crear \"" + Name + "\" con al menos un campo duplicado."); return(null); } FieldCount++; field.Offset = rsize; rsize += field.Size; Formatter.Serialize(stream, field); } if (rsize == 0) { ServerLog.Register("Se intentó crear \"" + Name + "\" pero la longitud del registro es cero."); return(null); } Header.Fields = fieldCt; Header.RowSize = rsize; Header.HeaderSize = stream.Position; Header.VirtualEOF = stream.Position; stream.Position = 0; Formatter.Serialize(stream, Header); try { DBStream = new FileStream(DataFileName(Name + DBFEXENSION), FileMode.CreateNew); byte[] bytes = stream.ToArray(); DBStream.Write(bytes, 0, bytes.Length); stream.Close(); DBStream.Flush(); OpenCount++; } catch (Exception e) { LogError(e); return(null); } string[] values = new string[FieldCount]; BlankRow(values); return(values); }
private void Parse(MemoryStream stream, string file) { ParseWDC1(stream, file); return; #pragma warning disable CS0162 // Unreachable code detected stream.Position = 0; using (var dbReader = new BinaryReader(stream, Encoding.UTF8)) { DBHeader header = ExtractHeader(dbReader); if (header == null) { return; } long pos = dbReader.BaseStream.Position; int CopyTableSize = header.CopyTableSize; //Only WDB5 has a copy table uint CommonDataTableSize = header.CommonDataTableSize; //StringTable - only if applicable long copyTablePos = dbReader.BaseStream.Length - CommonDataTableSize - CopyTableSize; long indexTablePos = copyTablePos - (header.HasIndexTable ? header.RecordCount * 4 : 0); long wch7TablePos = indexTablePos - (header.UnknownWCH7 * 4); long stringTableStart = wch7TablePos - header.StringBlockSize; Dictionary <int, string> StringTable = new Dictionary <int, string>(); if (!header.HasOffsetTable) { dbReader.Scrub(stringTableStart); StringTable = new StringTable().Read(dbReader, stringTableStart, stringTableStart + header.StringBlockSize); dbReader.Scrub(pos); } Dictionary <int, FieldType> FieldTypes = new Dictionary <int, FieldType>() { { 4, FieldType.UNKNOWN }, { 3, FieldType.INT }, { 2, FieldType.USHORT }, { 1, FieldType.BYTE }, }; //Read data List <byte[]> copytabledata = new List <byte[]>(); if (header.IsTypeOf <WDB6>()) { copytabledata = (header as WDB6).ReadOffsetData(dbReader, pos).Values.ToList(); } else { copytabledata = (header as WDB5).ReadOffsetData(dbReader, pos).Values.ToList(); } //String table bool stringtableused = StringTable.Values.Any(x => !string.IsNullOrWhiteSpace(x)) && !header.HasOffsetTable; //Calculate known field types List <FieldInfo> fields = new List <FieldInfo>(); for (int i = 0; i < header.FieldStructure.Count; i++) { int bytecount = header.FieldStructure[i].ByteCount; FieldInfo fi = new FieldInfo(); fi.ArraySize = GetArraySize(ref header, i); if (i == header.IdIndex) { fi.Type = FieldType.INT; } else { fi.Type = FieldTypes[bytecount]; } //WDB6 Common Data check if (header.FieldStructure[i].CommonDataColumn) { switch (header.FieldStructure[i].CommonDataType) { case 0: fi.Type = FieldType.STRING; break; case 1: fi.Type = FieldType.USHORT; break; case 2: fi.Type = FieldType.BYTE; break; case 3: fi.Type = FieldType.FLOAT; break; case 4: fi.Type = FieldType.INT; break; } } fields.Add(fi); } //Attempt to figure out unknown types for (int i = 0; i < fields.Count; i++) { if (fields[i].Type != FieldType.UNKNOWN) { continue; } List <FieldType> options = new List <FieldType>() { FieldType.INT, FieldType.UINT, FieldType.FLOAT, FieldType.STRING }; if (!stringtableused) { options.Remove(FieldType.STRING); //Stringtable not used } List <int> intvals = new List <int>(); List <string> stringvals = new List <string>(); List <float> floatvals = new List <float>(); for (int d = 0; d < copytabledata.Count; d++) { byte[] cdata = copytabledata[d]; int start = header.FieldStructure[i].Offset; if (header.HasOffsetTable) { start = 0; for (int x = 0; x < i; x++) { if (fields[x].Type != FieldType.STRING) { int bytecount = header.FieldStructure[x].ByteCount; start += bytecount * fields[x].ArraySize; } else { start += cdata.Skip(start).TakeWhile(b => b != 0).Count() + 1; } } } byte[] data = cdata.Skip(start).Take(4).ToArray(); if (!header.HasOffsetTable && data.All(x => x == 0)) { continue; //Ignore 0 byte columns as they could be anything } //Get int value int intval = BitConverter.ToInt32(data, 0); intvals.Add(intval); //String check if (options.Contains(FieldType.STRING)) { if (header.HasOffsetTable) { //Check for control and nonunicode chars string stringval = Encoding.UTF8.GetString(cdata.Skip(start).TakeWhile(x => x != 0).ToArray()); if (stringval.Length >= 1 && stringval.Any(x => char.IsControl(x) || x == 0xFFFD)) { options.Remove(FieldType.STRING); } else { stringvals.Add(stringval); } } else { //Check it is in the stringtable and more than -1 if (intval < 0 || !StringTable.ContainsKey(intval)) { options.Remove(FieldType.STRING); } } } //Float check if (options.Contains(FieldType.FLOAT)) { //Basic float checks float single = BitConverter.ToSingle(data, 0); if (!float.IsInfinity(single) && !float.IsNaN(single) && (single >= 9.99999997475243E-07 && single <= 100000.0)) { floatvals.Add(single); } else if (single != 0) //Ignore 0s { options.Remove(FieldType.FLOAT); } } //UInt check if (options.Contains(FieldType.UINT) && intval < 0) //If less than 0 must be signed { options.Remove(FieldType.UINT); } } var uniquestr = new HashSet <string>(stringvals); var uniqueint = new HashSet <int>(intvals); var uniquefloat = new HashSet <float>(floatvals); if (uniqueint.Count == 1 && uniqueint.First() == 0) //All 0s { fields[i].Type = FieldType.INT; } else if (!header.HasOffsetTable && options.Contains(FieldType.STRING)) //Int if only 1 Int else String { fields[i].Type = (uniqueint.Count == 1 ? FieldType.INT : FieldType.STRING); } else if (header.HasOffsetTable && options.Contains(FieldType.STRING) && uniquestr.Count > 1) //More than 1 string { fields[i].Type = FieldType.STRING; } else if (header.HasOffsetTable && options.Contains(FieldType.STRING) && uniquefloat.Count <= 1) //1 or less float and string { fields[i].Type = FieldType.STRING; } else if (options.Contains(FieldType.FLOAT) && floatvals.Count > 0) //Floats count more than 1 { fields[i].Type = FieldType.FLOAT; } else if (options.Contains(FieldType.UINT)) //Uint over Int { fields[i].Type = FieldType.UINT; } else { fields[i].Type = FieldType.INT; } } Table table = new Table(); table.Name = Path.GetFileNameWithoutExtension(file); table.Fields = new List <Field>(); string format = $"X{header.FieldStructure.Max(x => x.Offset).ToString().Length}"; //X2, X3 etc for (int i = 0; i < fields.Count; i++) { Field field = new Field(); field.Name = (i == header.IdIndex ? "ID" : $"field{header.FieldStructure[i].Offset.ToString(format)}"); field.IsIndex = (i == header.IdIndex); field.ArraySize = (field.IsIndex ? 1 : fields[i].ArraySize); field.Type = fields[i].Type.ToString().ToLower(); table.Fields.Add(field); Console.WriteLine($"Name: {field.Name} | Array: {field.ArraySize} | Type: {field.Type}"); } tables.Add(table); Database.ForceGC(); } #pragma warning restore CS0162 // Unreachable code detected }
public static DataTable Read(string path) { byte[] inputData = DataManager.GetFileBytes(path); List <Column> list = new List <Column>(); DataTable table = new DataTable(); try { using (BinaryReader binaryReader = new BinaryReader(new MemoryStream(inputData))) { DBHeader dBHeader = new DBHeader(); dBHeader.Signature = binaryReader.ReadString(4, false); dBHeader.Version = binaryReader.ReadUInt32(); dBHeader.TableNameLength = binaryReader.ReadUInt64(); dBHeader.Unknown = binaryReader.ReadUInt64(); dBHeader.RecordSize = binaryReader.ReadUInt64(); dBHeader.FieldCount = binaryReader.ReadUInt64(); dBHeader.DescriptionOffset = binaryReader.ReadUInt64(); dBHeader.RecordCount = binaryReader.ReadUInt64(); dBHeader.FullRecordSize = binaryReader.ReadUInt64(); dBHeader.EntryOffset = binaryReader.ReadUInt64(); dBHeader.MaxId = binaryReader.ReadUInt64(); dBHeader.IDLookupOffset = binaryReader.ReadUInt64(); dBHeader.Unknown2 = binaryReader.ReadUInt64(); if (dBHeader.IsValidTblFile) { string str = binaryReader.ReadString((int)dBHeader.TableNameLength, true); //table.BeginLoadData(); for (uint num = 0u; num < dBHeader.FieldCount; num++) { binaryReader.BaseStream.Position = (long)(dBHeader.DescriptionOffset + 96 + 24 * num); Column column = new Column(); column.NameLength = binaryReader.ReadUInt32(); column.Unknown = binaryReader.ReadUInt32(); column.NameOffset = binaryReader.ReadUInt64(); column.DataType = binaryReader.ReadUInt16(); column.Unknown2 = binaryReader.ReadUInt16(); column.Unknown3 = binaryReader.ReadUInt32(); Column column2 = column; long num2 = (long)(96 + dBHeader.FieldCount * 24 + dBHeader.DescriptionOffset + column2.NameOffset); binaryReader.BaseStream.Position = ((dBHeader.FieldCount % 2uL == 0) ? num2 : (num2 + 8)); column2.Name = binaryReader.ReadString((int)(column2.NameLength - 1), true); table.Columns.Add(column2.Name, null); list.Add(column2); switch (column2.DataType) { case 3: table.ColumnTypes.Add(typeof(uint)); //table.Columns[column2.Name] = typeof(uint); break; case 4: table.ColumnTypes.Add(typeof(float)); //table.Columns[column2.Name] = typeof(float); break; case 11: table.ColumnTypes.Add(typeof(string)); //table.Columns[column2.Name] = typeof(string); break; case 20: table.ColumnTypes.Add(typeof(ulong)); //table.Columns[column2.Name] = typeof(ulong); break; case 130: table.ColumnTypes.Add(typeof(string)); //table.Columns[column2.Name] = typeof(string); break; default: Console.Log("Not supported data type '" + column2.DataType + "'", Console.LogType.Error); break; } } ulong num3 = dBHeader.EntryOffset + 96; for (uint num4 = 0u; num4 < dBHeader.RecordCount; num4++) { List <object> dataRow = new List <object>(); int num5 = 0; bool flag = false; binaryReader.BaseStream.Position = (long)(num3 + dBHeader.RecordSize * num4); for (int i = 0; i < (int)dBHeader.FieldCount; i++) { Column column3 = list[i]; if (flag && column3.DataType != 130 && num5 == 130) { binaryReader.BaseStream.Position += 4L; } switch (column3.DataType) { case 3: dataRow.Add(binaryReader.ReadUInt32()); break; case 4: dataRow.Add(binaryReader.ReadSingle()); break; case 11: dataRow.Add(Convert.ToBoolean(binaryReader.ReadUInt32()).ToString()); break; case 20: dataRow.Add(binaryReader.ReadUInt64()); break; case 130: { uint num6 = binaryReader.ReadUInt32(); uint num7 = binaryReader.ReadUInt32(); if (num6 == 0) { flag = true; } long position = binaryReader.BaseStream.Position; binaryReader.BaseStream.Position = (long)(((num6 != 0) ? num6 : num7) + num3); dataRow.Add(binaryReader.ReadWString()); binaryReader.BaseStream.Position = position; break; } } num5 = column3.DataType; } table.dataRows.Add(dataRow); } //table.EndLoadData(); } } } catch (Exception ex) { Console.Log($"Error while loading : {ex.Message}", Console.LogType.Error); } return(table); }
public WCH5(DBHeader counterpart) { HeaderSize = 0x30; this.CounterPart = counterpart; }
public WCH8(DBHeader counterpart) { StringTableOffset = 0x14; HeaderSize = 0x34; this.CounterPart = counterpart; }
public static DataTable Read(MemoryStream dbStream, Tuple<Type, Dictionary<Tuple<string, string>, Array>, Dictionary<Tuple<string, string>, string>, FileFlags> info) { var table = new DataTable(); try { var dbReader = new BinaryReader(dbStream); var header = new DBHeader { Signature = dbReader.ReadString(4), RecordCount = dbReader.Read<uint>(), FieldCount = dbReader.Read<uint>(), RecordSize = dbReader.Read<uint>(), BlockValue = dbReader.Read<uint>() }; var hasDataOffsetBlock = false; var hasIndex = false; var recordSizeList = new List<ushort>(); if (header.IsValidDb5File) { header.TableHash = dbReader.Read<uint>(); header.LayouteHash = dbReader.Read<uint>(); header.MinId = dbReader.Read<int>(); header.MaxId = dbReader.Read<int>(); header.Locale = dbReader.Read<int>(); header.ReferenceDataSize = dbReader.Read<int>(); header.FileFlags = (FileFlags)dbReader.Read<ushort>(); header.IdIndex = dbReader.Read<ushort>(); // ushort FieldType, ushort FieldOffset for (var i = 0; i < header.FieldCount; i++) { var bits = 32 - dbReader.Read<ushort>(); var offset = dbReader.Read<ushort>(); header.FieldInfo.Add(new FieldInfo { Bits = bits, Count = offset }); } for (var i = 0; i < header.FieldCount - 1; i++) { var bits = header.FieldInfo[i].Bits; var offset = header.FieldInfo[i].Count; var nextOffset = header.FieldInfo[i + 1].Count; header.FieldInfo[i].Count = ((nextOffset - offset) * 8) / bits; } if (info.Item2.Keys.Any(k => k.Item2 == info.Item1.GetFields()[(int)header.FieldCount - 1].Name)) header.FieldInfo[(int)header.FieldCount - 1].Count = info.Item2.Single(k => k.Key.Item2 == info.Item1.GetFields()[(int)header.FieldCount - 1].Name).Value.Length; else header.FieldInfo[(int)header.FieldCount - 1].Count = 1; var dataSize = header.RecordCount * header.RecordSize; var indexDataSize = header.RecordCount * 4; var indexDataStart = 0; hasDataOffsetBlock = header.FileFlags.HasFlag(FileFlags.DataOffset); if (hasDataOffsetBlock) { dbReader.BaseStream.Position = header.BlockValue; while (header.DataBlockOffsets.Count < header.RecordCount) { var offset = dbReader.ReadUInt32(); var size = dbReader.ReadUInt16(); if (offset > 0 && !header.DataBlockOffsets.ContainsKey(offset)) header.DataBlockOffsets.Add(offset, size); } indexDataStart = (int)dbReader.BaseStream.Position; var dataBlockWriter = new BinaryWriter(new MemoryStream()); foreach (var dataBlockOffset in header.DataBlockOffsets) { dbReader.BaseStream.Position = dataBlockOffset.Key; dataBlockWriter.Write(dbReader.ReadBytes(dataBlockOffset.Value)); recordSizeList.Add(dataBlockOffset.Value); } header.Data = (dataBlockWriter.BaseStream as MemoryStream).ToArray(); dbReader.BaseStream.Position = indexDataStart; } else { header.Data = dbReader.ReadBytes((int)dataSize); header.StringData = dbReader.ReadBytes((int)header.BlockValue); } hasIndex = header.FileFlags.HasFlag(FileFlags.Index); // Some index data stuff?! if (header.FileFlags.HasFlag(FileFlags.Unknown)) dbReader.ReadBytes((int)indexDataSize); if (hasIndex) header.IndexData = dbReader.ReadBytes((int)indexDataSize); if (header.ReferenceDataSize > 0) header.ReferenceDataBlock = dbReader.ReadBytes(header.ReferenceDataSize); var data = new BinaryWriter(new MemoryStream()); var dataReader = new BinaryReader(new MemoryStream(header.Data)); var indexDataReader = new BinaryReader(new MemoryStream(header.IndexData)); if (!hasIndex) { for (var i = 0; i < header.RecordCount; i++) data.Write(dataReader.ReadBytes((int)header.RecordSize)); } else { if (hasDataOffsetBlock) { for (var i = 0; i < header.RecordCount; i++) { data.Write(indexDataReader.ReadBytes(4)); data.Write(dataReader.ReadBytes(recordSizeList[i])); } } else { for (var i = 0; i < header.RecordCount; i++) { data.Write(indexDataReader.ReadBytes(4)); data.Write(dataReader.ReadBytes((int)header.RecordSize)); } } } data.Write(header.StringData); dataReader.Dispose(); indexDataReader.Dispose(); dbReader = new BinaryReader(data.BaseStream); dbReader.BaseStream.Position = 0; } var fields = info.Item1.GetFields(); // Hack... if (hasIndex && info.Item1.Name != "WmoMinimapTexture") { fields = typeof(AutoId).GetFields().Concat(fields).ToArray(); header.FieldInfo.Insert(0, new FieldInfo { Bits = 32, Count = 1 }); } var lastStringOffset = 0; var lastString = ""; table.BeginLoadData(); foreach (var f in fields) { if (f.FieldType == typeof(Unused) || f.FieldType == typeof(UnusedByte) || f.FieldType == typeof(UnusedShort) || f.FieldType == typeof(UnusedLong) || f.FieldType == typeof(Unused[]) || f.FieldType == typeof(UnusedByte[]) || f.FieldType == typeof(UnusedShort[]) || f.FieldType == typeof(UnusedLong[])) continue; if (f.FieldType.IsArray) { var arr = info.Item2.Single(af => af.Key.Item2 == f.Name).Value; for (var i = 0; i < arr.Length; i++) { if (arr.GetType().GetElementType() == typeof(int)) table.Columns.Add(f.Name + i); else table.Columns.Add(f.Name + i, arr.GetType().GetElementType()); } } else if (f.FieldType == typeof(int)) table.Columns.Add(f.Name); else table.Columns.Add(f.Name, f.FieldType); } // Hack... if (hasIndex && info.Item1.Name != "WmoMinimapTexture") table.PrimaryKey = new[] { table.Columns[0] }; else if (table.Columns.Contains("Id")) table.PrimaryKey = new[] { table.Columns["Id"] }; if (table.PrimaryKey.Length == 1) table.PrimaryKey[0].DataType = typeof(uint); var hasPadding = recordSizeList.Any(v => v > header.RecordSize); var recordSize = 0; for (var i = 0; i < header.RecordCount; i++) { var row = table.NewRow(); if (!hasPadding) { dbReader.BaseStream.Position = i * header.RecordSize; if (hasIndex) dbReader.BaseStream.Position += i * 4; } var lastFieldType = ""; for (var l = 0; l < fields.Length; l++) { var f = fields[l]; lastFieldType = f.FieldType.Name; switch (lastFieldType) { case "SByte": row[f.Name] = dbReader.ReadSByte(); break; case "Byte": row[f.Name] = dbReader.ReadByte(); break; case "Int16": row[f.Name] = dbReader.ReadInt16(); break; case "UInt16": row[f.Name] = dbReader.ReadUInt16(); break; case "Int32": if (l >= header.FieldInfo.Count || header.FieldInfo[l].Bits == 32) row[f.Name] = dbReader.ReadInt32(); else { var valBytes = dbReader.ReadBytes(header.FieldInfo[l].Bits / 8); valBytes = valBytes.Concat(new byte[4 - valBytes.Length]).ToArray(); row[f.Name] = BitConverter.ToInt32(valBytes, 0); } break; case "UInt32": if (l >= header.FieldInfo.Count || header.FieldInfo[l].Bits == 32) row[f.Name] = dbReader.ReadUInt32(); else { var valBytes = dbReader.ReadBytes(header.FieldInfo[l].Bits / 8); valBytes = valBytes.Concat(new byte[4 - valBytes.Length]).ToArray(); row[f.Name] = BitConverter.ToUInt32(valBytes, 0); } break; case "Int64": row[f.Name] = dbReader.ReadInt64(); break; case "UInt64": row[f.Name] = dbReader.ReadUInt64(); break; case "Single": row[f.Name] = dbReader.ReadSingle(); break; case "Boolean": row[f.Name] = dbReader.ReadBoolean(); break; case "SByte[]": var length = info.Item2.Single(af => af.Key.Item2 == f.Name).Value.Length; for (var j = 0; j < length; j++) row[f.Name + j] = dbReader.ReadSByte(); break; case "Byte[]": length = info.Item2.Single(af => af.Key.Item2 == f.Name).Value.Length; for (var j = 0; j < length; j++) row[f.Name + j] = dbReader.ReadByte(); break; case "Int16[]": length = info.Item2.Single(af => af.Key.Item2 == f.Name).Value.Length; for (var j = 0; j < length; j++) row[f.Name + j] = dbReader.ReadInt16(); break; case "UInt16[]": length = info.Item2.Single(af => af.Key.Item2 == f.Name).Value.Length; for (var j = 0; j < length; j++) row[f.Name + j] = dbReader.ReadUInt16(); break; case "Int32[]": length = info.Item2.Single(af => af.Key.Item2 == f.Name).Value.Length; for (var j = 0; j < length; j++) { if (l >= header.FieldInfo.Count || (header.FieldInfo[l].Bits == 32)) row[f.Name + j] = dbReader.Read<int>(); else { var valBytes = dbReader.ReadBytes(header.FieldInfo[l].Bits / 8); valBytes = valBytes.Concat(new byte[4 - valBytes.Length]).ToArray(); row[f.Name + j] = BitConverter.ToInt32(valBytes, 0); } } break; case "UInt32[]": length = info.Item2.Single(af => af.Key.Item2 == f.Name).Value.Length; for (var j = 0; j < length; j++) { if (l >= header.FieldInfo.Count || header.FieldInfo[l].Bits == 32) row[f.Name + j] = dbReader.ReadUInt32(); else { var valBytes = dbReader.ReadBytes(header.FieldInfo[l].Bits / 8); valBytes = valBytes.Concat(new byte[4 - valBytes.Length]).ToArray(); row[f.Name + j] = BitConverter.ToUInt32(valBytes, 0); } } break; case "Single[]": length = info.Item2.Single(af => af.Key.Item2 == f.Name).Value.Length; for (var j = 0; j < length; j++) row[f.Name + j] = dbReader.ReadSingle(); break; case "Int64[]": length = info.Item2.Single(af => af.Key.Item2 == f.Name).Value.Length; for (var j = 0; j < length; j++) row[f.Name + j] = dbReader.ReadInt64(); break; case "UInt64[]": length = info.Item2.Single(af => af.Key.Item2 == f.Name).Value.Length; for (var j = 0; j < length; j++) row[f.Name + j] = dbReader.ReadUInt64(); break; case "String[]": { length = info.Item2.Single(af => af.Key.Item2 == f.Name).Value.Length; for (var j = 0; j < length; j++) { if (hasDataOffsetBlock) { row[f.Name + j] = dbReader.ReadCString(); } else { var stringOffset = dbReader.ReadUInt32(); if (stringOffset == 0) break; if (stringOffset != lastStringOffset) { var currentPos = dbReader.BaseStream.Position; var stringStart = (header.RecordCount * header.RecordSize) + stringOffset; if (hasIndex) stringStart += (header.RecordCount * 4); dbReader.BaseStream.Seek(stringStart, 0); row[f.Name + j] = lastString = dbReader.ReadCString(); dbReader.BaseStream.Seek(currentPos, 0); } else row[f.Name + j] = lastString; } } break; } case "String": { if (hasDataOffsetBlock) { row[f.Name] = dbReader.ReadCString(); } else { var stringOffset = dbReader.ReadUInt32(); if (stringOffset == 0) break; if (stringOffset != lastStringOffset) { var currentPos = dbReader.BaseStream.Position; var stringStart = (header.RecordCount * header.RecordSize) + stringOffset; if (hasIndex) stringStart += (header.RecordCount * 4); dbReader.BaseStream.Seek(stringStart, 0); row[f.Name] = lastString = dbReader.ReadCString(); dbReader.BaseStream.Seek(currentPos, 0); } else row[f.Name] = lastString; } break; } default: dbReader.BaseStream.Position += 4; break; } } if (hasPadding) { recordSize += recordSizeList[i]; if (hasIndex) recordSize += 4; dbReader.BaseStream.Position = recordSize; } table.Rows.Add(row); } table.EndLoadData(); var refReader = new BinaryReader(new MemoryStream(header.ReferenceDataBlock)); if (header.ReferenceDataBlock.Length > 0) { while (refReader.BaseStream.Position != refReader.BaseStream.Length) { var id = refReader.ReadUInt32(); var referenceId = refReader.ReadUInt32(); var referenceRow = table.Rows.Find(referenceId); if (referenceRow != null) { var row = table.NewRow(); row.ItemArray = referenceRow.ItemArray; row[0] = id; table.Rows.Add(row); } } } if (table.Columns.Contains("Id")) table.DefaultView.Sort = $"Id"; } catch { Console.WriteLine($"Error while loading {info.Item1.Name}"); } return table; }