public void Read <T>(FieldCache[] fields, T entry, BitReader r, int recordOffset, Dictionary <long, string> stringsTable, FieldMetaData[] fieldMeta, ColumnMetaData[] columnMeta, Value32[][] palletData, Dictionary <int, Value32>[] commonData, int id, int refId, bool isSparse = false) where T : ClientDBRow { int fieldIndex = 0; foreach (var f in fields) { if (f.IsIndex && id != -1) { ((FieldCache <T, int>)f).Setter(entry, id); continue; } if (fieldIndex >= fieldMeta.Length) { if (refId != -1) { ((FieldCache <T, int>)f).Setter(entry, refId); } continue; } if (f.IsArray) { switch (f) { case FieldCache <T, int[]> c1: c1.Setter(entry, FieldReader.GetFieldValueArray <int>(r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex], c1.ArraySize)); break; case FieldCache <T, uint[]> c1: c1.Setter(entry, FieldReader.GetFieldValueArray <uint>(r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex], c1.ArraySize)); break; case FieldCache <T, byte[]> c1: c1.Setter(entry, FieldReader.GetFieldValueArray <byte>(r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex], c1.ArraySize)); break; case FieldCache <T, sbyte[]> c1: c1.Setter(entry, FieldReader.GetFieldValueArray <sbyte>(r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex], c1.ArraySize)); break; case FieldCache <T, short[]> c1: c1.Setter(entry, FieldReader.GetFieldValueArray <short>(r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex], c1.ArraySize)); break; case FieldCache <T, ushort[]> c1: c1.Setter(entry, FieldReader.GetFieldValueArray <ushort>(r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex], c1.ArraySize)); break; case FieldCache <T, float[]> c1: c1.Setter(entry, FieldReader.GetFieldValueArray <float>(r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex], c1.ArraySize)); break; case FieldCache <T, long[]> c1: c1.Setter(entry, FieldReader.GetFieldValueArray <long>(r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex], c1.ArraySize)); break; case FieldCache <T, ulong[]> c1: c1.Setter(entry, FieldReader.GetFieldValueArray <ulong>(r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex], c1.ArraySize)); break; case FieldCache <T, string[]> c1: c1.Setter(entry, FieldReader.GetFieldValueStringsArray(r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex], stringsTable, isSparse, recordOffset, c1.ArraySize)); break; default: throw new Exception($"Unhandled DbcTable type: {f.Field.FieldType.FullName} in {f.Field.DeclaringType.FullName}.{f.Field.Name}"); } } else { switch (f) { case FieldCache <T, int> c1: c1.Setter(entry, FieldReader.GetFieldValue <int>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex])); break; case FieldCache <T, uint> c1: c1.Setter(entry, FieldReader.GetFieldValue <uint>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex])); break; case FieldCache <T, byte> c1: c1.Setter(entry, FieldReader.GetFieldValue <byte>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex])); break; case FieldCache <T, sbyte> c1: c1.Setter(entry, FieldReader.GetFieldValue <sbyte>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex])); break; case FieldCache <T, short> c1: c1.Setter(entry, FieldReader.GetFieldValue <short>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex])); break; case FieldCache <T, ushort> c1: c1.Setter(entry, FieldReader.GetFieldValue <ushort>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex])); break; case FieldCache <T, float> c1: c1.Setter(entry, FieldReader.GetFieldValue <float>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex])); break; case FieldCache <T, long> c1: c1.Setter(entry, FieldReader.GetFieldValue <long>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex])); break; case FieldCache <T, ulong> c1: c1.Setter(entry, FieldReader.GetFieldValue <ulong>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex])); break; case FieldCache <T, string> c1: c1.Setter(entry, isSparse ? r.ReadCString() : stringsTable[(recordOffset + (r.Position >> 3)) + FieldReader.GetFieldValue <int>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex])]); break; default: throw new Exception($"Unhandled DbcTable type: {f.Field.FieldType.FullName} in {f.Field.DeclaringType.FullName}.{f.Field.Name}"); } } fieldIndex++; } }
public WDC3Reader(Stream stream, Func <ulong, bool> hasTactKey = null) { if (hasTactKey == null) { hasTactKeyFunc = (key) => false; } else { hasTactKeyFunc = hasTactKey; } using (var reader = new BinaryReader(stream, Encoding.UTF8)) { if (reader.BaseStream.Length < HeaderSize) { throw new InvalidDataException(String.Format("WDC3 file is corrupted!")); } uint magic = reader.ReadUInt32(); if (magic != WDC3FmtSig) { throw new InvalidDataException(String.Format("WDC3 file is corrupted!")); } RecordsCount = reader.ReadInt32(); FieldsCount = reader.ReadInt32(); RecordSize = reader.ReadInt32(); StringTableSize = reader.ReadInt32(); TableHash = reader.ReadUInt32(); LayoutHash = reader.ReadUInt32(); MinIndex = reader.ReadInt32(); MaxIndex = reader.ReadInt32(); int locale = reader.ReadInt32(); int flags = reader.ReadUInt16(); IdFieldIndex = reader.ReadUInt16(); int totalFieldsCount = reader.ReadInt32(); int packedDataOffset = reader.ReadInt32(); // Offset within the field where packed data starts int lookupColumnCount = reader.ReadInt32(); // count of lookup columns int columnMetaDataSize = reader.ReadInt32(); // 24 * NumFields bytes, describes column bit packing, {ushort recordOffset, ushort size, uint additionalDataSize, uint compressionType, uint packedDataOffset or commonvalue, uint cellSize, uint cardinality}[NumFields], sizeof(DBC2CommonValue) == 8 int commonDataSize = reader.ReadInt32(); int palletDataSize = reader.ReadInt32(); // in bytes, sizeof(DBC2PalletValue) == 4 int sectionsCount = reader.ReadInt32(); //if (sectionsCount > 1) // throw new Exception("sectionsCount > 1"); SectionHeader_WDC3[] sections = reader.ReadArray <SectionHeader_WDC3>(sectionsCount); // field meta data m_meta = reader.ReadArray <FieldMetaData>(FieldsCount); if (sectionsCount == 0 || RecordsCount == 0) { return; } // column meta data m_columnMeta = reader.ReadArray <ColumnMetaData>(FieldsCount); // pallet data m_palletData = new Value32[m_columnMeta.Length][]; for (int i = 0; i < m_columnMeta.Length; i++) { if (m_columnMeta[i].CompressionType == CompressionType.Pallet || m_columnMeta[i].CompressionType == CompressionType.PalletArray) { m_palletData[i] = reader.ReadArray <Value32>((int)m_columnMeta[i].AdditionalDataSize / 4); } } // common data m_commonData = new Dictionary <int, Value32> [m_columnMeta.Length]; for (int i = 0; i < m_columnMeta.Length; i++) { if (m_columnMeta[i].CompressionType == CompressionType.Common) { Dictionary <int, Value32> commonValues = new Dictionary <int, Value32>(); m_commonData[i] = commonValues; for (int j = 0; j < m_columnMeta[i].AdditionalDataSize / 8; j++) { commonValues[reader.ReadInt32()] = reader.Read <Value32>(); } } } bool isSparse = (flags & 0x1) != 0; bool hasIndex = (flags & 0x4) != 0; for (int sectionIndex = 0; sectionIndex < sectionsCount; sectionIndex++) { if (sections[sectionIndex].TactKeyLookup != 0 && !hasTactKeyFunc(sections[sectionIndex].TactKeyLookup)) { //Console.WriteLine("Detected db2 with encrypted section! HasKey {0}", CASC.HasKey(Sections[sectionIndex].TactKeyLookup)); continue; } reader.BaseStream.Position = sections[sectionIndex].FileOffset; byte[] recordsData; Dictionary <long, string> stringsTable = null; SparseEntry[] sparseEntries = null; if (isSparse) { // sparse data with inlined strings recordsData = reader.ReadBytes(sections[sectionIndex].SparseDataEndOffset - sections[sectionIndex].FileOffset); if (reader.BaseStream.Position != sections[sectionIndex].SparseDataEndOffset) { throw new Exception("reader.BaseStream.Position != sections[sectionIndex].SparseDataEndOffset"); } } else { // records data recordsData = reader.ReadBytes(sections[sectionIndex].NumRecords * RecordSize); // string data stringsTable = new Dictionary <long, string>(); long stringDataOffset = (RecordsCount - sections[sectionIndex].NumRecords) * RecordSize; for (int i = 0; i < sections[sectionIndex].StringTableSize;) { long oldPos = reader.BaseStream.Position; stringsTable[oldPos + stringDataOffset] = reader.ReadCString(); i += (int)(reader.BaseStream.Position - oldPos); } } Array.Resize(ref recordsData, recordsData.Length + 8); // pad with extra zeros so we don't crash when reading // index data int[] indexData = reader.ReadArray <int>(sections[sectionIndex].IndexDataSize / 4); bool isIndexEmpty = hasIndex && indexData.Count(i => i == 0) == sections[sectionIndex].NumRecords; // duplicate rows data Dictionary <int, int> copyData = new Dictionary <int, int>(); for (int i = 0; i < sections[sectionIndex].NumCopyRecords; i++) { copyData[reader.ReadInt32()] = reader.ReadInt32(); } if (sections[sectionIndex].NumSparseRecords > 0) { sparseEntries = reader.ReadArray <SparseEntry>(sections[sectionIndex].NumSparseRecords); } // reference data ReferenceData refData = null; if (sections[sectionIndex].ParentLookupDataSize > 0) { refData = new ReferenceData { NumRecords = reader.ReadInt32(), MinId = reader.ReadInt32(), MaxId = reader.ReadInt32() }; ReferenceEntry[] entries = reader.ReadArray <ReferenceEntry>(refData.NumRecords); refData.Entries = new Dictionary <int, int>(); for (int i = 0; i < entries.Length; i++) { refData.Entries[entries[i].Index] = entries[i].Id; } } else { refData = new ReferenceData { Entries = new Dictionary <int, int>() }; } if (sections[sectionIndex].NumSparseRecords > 0) { // TODO: use this shit int[] sparseIndexData = reader.ReadArray <int>(sections[sectionIndex].NumSparseRecords); if (hasIndex && indexData.Length != sparseIndexData.Length) { throw new Exception("indexData.Length != sparseIndexData.Length"); } indexData = sparseIndexData; } BitReader bitReader = new BitReader(recordsData); if (sections[sectionIndex].NumSparseRecords > 0 && sections[sectionIndex].NumRecords != sections[sectionIndex].NumSparseRecords) { throw new Exception("sections[sectionIndex].NumSparseRecords > 0 && sections[sectionIndex].NumRecords != sections[sectionIndex].NumSparseRecords"); } for (int i = 0; i < sections[sectionIndex].NumRecords; ++i) { bitReader.Position = 0; if (isSparse) { bitReader.Offset = sparseEntries[i].Offset - sections[sectionIndex].FileOffset; } else { bitReader.Offset = i * RecordSize; } bool hasRef = refData.Entries.TryGetValue(i, out int refId); IDB2Row rec = new WDC3Row(this, bitReader, sections[sectionIndex].FileOffset, hasIndex ? (isIndexEmpty ? i : indexData[i]) : -1, hasRef ? refId : -1, isSparse, stringsTable); _Records.Add(rec.Id, rec); if (i % 1000 == 0) { Console.Write("\r{0} records read", i); } } foreach (var copyRow in copyData) { IDB2Row rec = _Records[copyRow.Value].Clone(); rec.Id = copyRow.Key; _Records.Add(copyRow.Key, rec); } } } }
public WDC1Reader(Stream stream) { using (var reader = new BinaryReader(stream, Encoding.UTF8)) { if (reader.BaseStream.Length < HeaderSize) { throw new InvalidDataException(String.Format("WDC1 file is corrupted!")); } uint magic = reader.ReadUInt32(); if (magic != WDC1FmtSig) { throw new InvalidDataException(String.Format("WDC1 file is corrupted!")); } RecordsCount = reader.ReadInt32(); FieldsCount = reader.ReadInt32(); RecordSize = reader.ReadInt32(); StringTableSize = reader.ReadInt32(); uint tableHash = reader.ReadUInt32(); uint layoutHash = reader.ReadUInt32(); MinIndex = reader.ReadInt32(); MaxIndex = reader.ReadInt32(); int locale = reader.ReadInt32(); int copyTableSize = reader.ReadInt32(); int flags = reader.ReadUInt16(); IdFieldIndex = reader.ReadUInt16(); int totalFieldsCount = reader.ReadInt32(); int packedDataOffset = reader.ReadInt32(); // Offset within the field where packed data starts int lookupColumnCount = reader.ReadInt32(); // count of lookup columns int sparseTableOffset = reader.ReadInt32(); // absolute value, {uint offset, ushort size}[MaxId - MinId + 1] int indexDataSize = reader.ReadInt32(); // int indexData[IndexDataSize / 4] int columnMetaDataSize = reader.ReadInt32(); // 24 * NumFields bytes, describes column bit packing, {ushort recordOffset, ushort size, uint additionalDataSize, uint compressionType, uint packedDataOffset or commonvalue, uint cellSize, uint cardinality}[NumFields], sizeof(DBC2CommonValue) == 8 int commonDataSize = reader.ReadInt32(); int palletDataSize = reader.ReadInt32(); // in bytes, sizeof(DBC2PalletValue) == 4 int referenceDataSize = reader.ReadInt32(); // uint NumRecords, uint minId, uint maxId, {uint id, uint index}[NumRecords], questionable usefulness... // field meta data m_meta = reader.ReadArray <FieldMetaData>(FieldsCount); if ((flags & 0x1) == 0) { // records data recordsData = reader.ReadBytes(RecordsCount * RecordSize); Array.Resize(ref recordsData, recordsData.Length + 8); // pad with extra zeros so we don't crash when reading // string data m_stringsTable = new Dictionary <long, string>(); for (int i = 0; i < StringTableSize;) { long oldPos = reader.BaseStream.Position; m_stringsTable[i] = reader.ReadCString(); i += (int)(reader.BaseStream.Position - oldPos); } } else { // sparse data with inlined strings sparseData = reader.ReadBytes(sparseTableOffset - HeaderSize - Marshal.SizeOf <FieldMetaData>() * FieldsCount); if (reader.BaseStream.Position != sparseTableOffset) { throw new Exception("r.BaseStream.Position != sparseTableOffset"); } sparseEntries = reader.ReadArray <SparseEntry>(MaxIndex - MinIndex + 1); if (sparseTableOffset != 0) { throw new Exception("Sparse Table NYI!"); } else { throw new Exception("Sparse Table with zero offset?"); } } // index data m_indexData = reader.ReadArray <int>(indexDataSize / 4); // duplicate rows data Dictionary <int, int> copyData = new Dictionary <int, int>(); for (int i = 0; i < copyTableSize / 8; i++) { copyData[reader.ReadInt32()] = reader.ReadInt32(); } // column meta data m_columnMeta = reader.ReadArray <ColumnMetaData>(FieldsCount); // pallet data m_palletData = new Value32[m_columnMeta.Length][]; for (int i = 0; i < m_columnMeta.Length; i++) { if (m_columnMeta[i].CompressionType == CompressionType.Pallet || m_columnMeta[i].CompressionType == CompressionType.PalletArray) { m_palletData[i] = reader.ReadArray <Value32>((int)m_columnMeta[i].AdditionalDataSize / 4); } } // common data m_commonData = new Dictionary <int, Value32> [m_columnMeta.Length]; for (int i = 0; i < m_columnMeta.Length; i++) { if (m_columnMeta[i].CompressionType == CompressionType.Common) { Dictionary <int, Value32> commonValues = new Dictionary <int, Value32>(); m_commonData[i] = commonValues; for (int j = 0; j < m_columnMeta[i].AdditionalDataSize / 8; j++) { commonValues[reader.ReadInt32()] = reader.Read <Value32>(); } } } // reference data ReferenceData refData = null; if (referenceDataSize > 0) { refData = new ReferenceData { NumRecords = reader.ReadInt32(), MinId = reader.ReadInt32(), MaxId = reader.ReadInt32() }; refData.Entries = reader.ReadArray <ReferenceEntry>(refData.NumRecords); } BitReader bitReader = new BitReader(recordsData); for (int i = 0; i < RecordsCount; ++i) { bitReader.Position = 0; bitReader.Offset = i * RecordSize; IDB2Row rec = new WDC1Row(this, bitReader, indexDataSize != 0 ? m_indexData[i] : -1, refData?.Entries[i]); if (indexDataSize != 0) { _Records.Add(m_indexData[i], rec); } else { _Records.Add(rec.Id, rec); } if (i % 1000 == 0) { Console.Write("\r{0} records read", i); } } foreach (var copyRow in copyData) { IDB2Row rec = _Records[copyRow.Value].Clone(); rec.Id = copyRow.Key; _Records.Add(copyRow.Key, rec); } } }
public WDC2ReaderGeneric(Stream stream, Func <ulong, bool> hasTactKey = null) { if (hasTactKey == null) { hasTactKeyFunc = (key) => false; } else { hasTactKeyFunc = hasTactKey; } using (var reader = new BinaryReader(stream, Encoding.UTF8)) { if (reader.BaseStream.Length < HeaderSize) { throw new InvalidDataException(String.Format("WDC2 file is corrupted!")); } uint magic = reader.ReadUInt32(); if (magic != MagicWDC2 && magic != Magic1SLC) { throw new InvalidDataException(String.Format("WDC2 file is corrupted!")); } RecordsCount = reader.ReadInt32(); FieldsCount = reader.ReadInt32(); RecordSize = reader.ReadInt32(); StringTableSize = reader.ReadInt32(); TableHash = reader.ReadUInt32(); LayoutHash = reader.ReadUInt32(); MinIndex = reader.ReadInt32(); MaxIndex = reader.ReadInt32(); int locale = reader.ReadInt32(); int flags = reader.ReadUInt16(); IdFieldIndex = reader.ReadUInt16(); int totalFieldsCount = reader.ReadInt32(); int packedDataOffset = reader.ReadInt32(); // Offset within the field where packed data starts int lookupColumnCount = reader.ReadInt32(); // count of lookup columns int columnMetaDataSize = reader.ReadInt32(); // 24 * NumFields bytes, describes column bit packing, {ushort recordOffset, ushort size, uint additionalDataSize, uint compressionType, uint packedDataOffset or commonvalue, uint cellSize, uint cardinality}[NumFields], sizeof(DBC2CommonValue) == 8 int commonDataSize = reader.ReadInt32(); int palletDataSize = reader.ReadInt32(); // in bytes, sizeof(DBC2PalletValue) == 4 int sectionsCount = reader.ReadInt32(); //if (sectionsCount > 1) // throw new Exception("sectionsCount > 1"); SectionHeader_WDC2[] sections = reader.ReadArray <SectionHeader_WDC2>(sectionsCount); // field meta data m_meta = reader.ReadArray <FieldMetaData>(FieldsCount); if (sectionsCount == 0 || RecordsCount == 0) { return; } // column meta data m_columnMeta = reader.ReadArray <ColumnMetaData>(FieldsCount); // pallet data m_palletData = new Value32[m_columnMeta.Length][]; for (int i = 0; i < m_columnMeta.Length; i++) { if (m_columnMeta[i].CompressionType == CompressionType.Pallet || m_columnMeta[i].CompressionType == CompressionType.PalletArray) { m_palletData[i] = reader.ReadArray <Value32>((int)m_columnMeta[i].AdditionalDataSize / 4); } } // common data m_commonData = new Dictionary <int, Value32> [m_columnMeta.Length]; for (int i = 0; i < m_columnMeta.Length; i++) { if (m_columnMeta[i].CompressionType == CompressionType.Common) { Dictionary <int, Value32> commonValues = new Dictionary <int, Value32>(); m_commonData[i] = commonValues; for (int j = 0; j < m_columnMeta[i].AdditionalDataSize / 8; j++) { commonValues[reader.ReadInt32()] = reader.Read <Value32>(); } } } bool isSparse = (flags & 0x1) != 0; bool hasIndex = (flags & 0x4) != 0; long previousStringTableSize = 0; for (int sectionIndex = 0; sectionIndex < sectionsCount; sectionIndex++) { if (sections[sectionIndex].TactKeyLookup != 0 && !hasTactKeyFunc(sections[sectionIndex].TactKeyLookup)) { //Console.WriteLine("Detected db2 with encrypted section! HasKey {0}", CASC.HasKey(Sections[sectionIndex].TactKeyLookup)); previousStringTableSize += sections[sectionIndex].StringTableSize; continue; } reader.BaseStream.Position = sections[sectionIndex].FileOffset; byte[] recordsData; Dictionary <long, string> stringsTable = null; SparseEntry[] sparseEntries = null; if (isSparse) { // sparse data with inlined strings recordsData = reader.ReadBytes(sections[sectionIndex].SparseTableOffset - sections[sectionIndex].FileOffset); if (reader.BaseStream.Position != sections[sectionIndex].SparseTableOffset) { throw new Exception("reader.BaseStream.Position != sections[sectionIndex].SparseTableOffset"); } sparseEntries = reader.ReadArray <SparseEntry>(MaxIndex - MinIndex + 1); } else { // records data recordsData = reader.ReadBytes(sections[sectionIndex].NumRecords * RecordSize); // string data stringsTable = new Dictionary <long, string>(); long stringDataOffset = 0; if (sectionIndex == 0) { stringDataOffset = (RecordsCount - sections[sectionIndex].NumRecords) * RecordSize; } else { stringDataOffset = previousStringTableSize; } for (int i = 0; i < sections[sectionIndex].StringTableSize;) { long oldPos = reader.BaseStream.Position; stringsTable[oldPos + stringDataOffset] = reader.ReadCString(); i += (int)(reader.BaseStream.Position - oldPos); } } Array.Resize(ref recordsData, recordsData.Length + 8); // pad with extra zeros so we don't crash when reading // index data int[] indexData = reader.ReadArray <int>(sections[sectionIndex].IndexDataSize / 4); bool isIndexEmpty = hasIndex && indexData.Count(i => i == 0) == sections[sectionIndex].NumRecords; // duplicate rows data Dictionary <int, int> copyData = new Dictionary <int, int>(); for (int i = 0; i < sections[sectionIndex].CopyTableSize / 8; i++) { copyData[reader.ReadInt32()] = reader.ReadInt32(); } // reference data ReferenceData refData = null; if (sections[sectionIndex].ParentLookupDataSize > 0) { refData = new ReferenceData { NumRecords = reader.ReadInt32(), MinId = reader.ReadInt32(), MaxId = reader.ReadInt32() }; ReferenceEntry[] entries = reader.ReadArray <ReferenceEntry>(refData.NumRecords); refData.Entries = new Dictionary <int, int>(); for (int i = 0; i < entries.Length; i++) { refData.Entries[entries[i].Index] = entries[i].Id; } } else { refData = new ReferenceData { Entries = new Dictionary <int, int>() }; } BitReader bitReader = new BitReader(recordsData); FieldCache[] fieldCache = FieldsCache <T> .Cache; if (hasIndex) { fieldCache[0].IsIndex = true; } else { fieldCache[IdFieldIndex].IsIndex = true; } if (isSparse) { int currentId = MinIndex, uniqueIndex = 0; HashSet <int> temp = new HashSet <int>(); for (int i = 0; i < sparseEntries.Length; i++) { if (sparseEntries[i].Offset == 0 && sparseEntries[i].Size == 0) { currentId++; continue; } T rec = new T(); bitReader.Position = 0; bitReader.Offset = sparseEntries[i].Offset - sections[sectionIndex].FileOffset; bool hasRef = refData.Entries.TryGetValue(i, out int refId); rec.Read(fieldCache, rec, bitReader, sections[sectionIndex].FileOffset, stringsTable, m_meta, m_columnMeta, m_palletData, m_commonData, currentId, hasRef ? refId : -1, isSparse); _Records.Add(rec.GetId(), rec); if (indexData != null && !temp.Contains(sparseEntries[i].Offset)) { temp.Add(sparseEntries[i].Offset); if (currentId != indexData[uniqueIndex]) { throw new Exception("currentId != indexData[uniqueIndex]"); } uniqueIndex++; } currentId++; if (i % 1000 == 0) { Console.Write("\r{0} records read", i); } } } else { for (int i = 0; i < sections[sectionIndex].NumRecords; i++) { T rec = new T(); bitReader.Position = 0; bitReader.Offset = i * RecordSize; bool hasRef = refData.Entries.TryGetValue(i, out int refId); rec.Read(fieldCache, rec, bitReader, sections[sectionIndex].FileOffset, stringsTable, m_meta, m_columnMeta, m_palletData, m_commonData, hasIndex ? (isIndexEmpty ? i : indexData[i]) : -1, hasRef ? refId : -1, isSparse); _Records.Add(rec.GetId(), rec); if (i % 1000 == 0) { Console.Write("\r{0} records read", i); } } } FieldCache <T, int> idField = (FieldCache <T, int>)(hasIndex ? fieldCache[0] : fieldCache[IdFieldIndex]); foreach (var copyRow in copyData) { T rec = (T)_Records[copyRow.Value].Clone(); idField.Setter(rec, copyRow.Key); _Records.Add(copyRow.Key, rec); } previousStringTableSize += sections[sectionIndex].StringTableSize; } } }
private static T[] GetFieldValueArray <T>(BitReader r, FieldMetaData fieldMeta, ColumnMetaData columnMeta, Value32[] palletData, Dictionary <int, Value32> commonData, int arraySize) where T : unmanaged { switch (columnMeta.CompressionType) { case CompressionType.None: int bitSize = 32 - fieldMeta.Bits; T[] arr1 = new T[arraySize]; for (int i = 0; i < arr1.Length; i++) { if (bitSize > 0) { arr1[i] = r.Read <T>(bitSize); } else { arr1[i] = r.Read <T>(columnMeta.Immediate.BitWidth); } } return(arr1); case CompressionType.Immediate: T[] arr2 = new T[arraySize]; for (int i = 0; i < arr2.Length; i++) { arr2[i] = r.Read <T>(columnMeta.Immediate.BitWidth); } return(arr2); case CompressionType.SignedImmediate: T[] arr4 = new T[arraySize]; for (int i = 0; i < arr4.Length; i++) { arr4[i] = r.ReadSigned <T>(columnMeta.Immediate.BitWidth); } return(arr4); case CompressionType.PalletArray: int cardinality = columnMeta.Pallet.Cardinality; if (arraySize != cardinality) { throw new Exception("Struct missmatch for pallet array field?"); } uint palletArrayIndex = r.ReadUInt32(columnMeta.Pallet.BitWidth); T[] arr3 = new T[cardinality]; for (int i = 0; i < arr3.Length; i++) { arr3[i] = palletData[i + cardinality * (int)palletArrayIndex].As <T>(); } return(arr3); } throw new Exception(string.Format("Unexpected compression type {0}", columnMeta.CompressionType)); }
public void Read(BitReader r, long recordsOffset, Dictionary <long, string> stringsTable, FieldMetaData[] fieldMeta, ColumnMetaData[] columnMeta, Value32[][] palletData, Dictionary <int, Value32>[] commonData, int refId = -1, int id = -1, bool isSparse = false) { int fieldIndex = 0; foreach (var f in Fields) { Type t = f.FieldType; if (f.Name == "Id" && id != -1) { f.SetValue(this, id); continue; } if (fieldIndex >= fieldMeta.Length) { if (refId != -1) { f.SetValue(this, refId); } continue; } if (t == typeof(float)) { f.SetValue(this, FieldReader.GetFieldValue <float>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex])); } else if (t == typeof(long)) { f.SetValue(this, FieldReader.GetFieldValue <long>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex])); } else if (t == typeof(ulong)) { f.SetValue(this, FieldReader.GetFieldValue <ulong>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex])); } else if (t == typeof(int)) { f.SetValue(this, FieldReader.GetFieldValue <int>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex])); } else if (t == typeof(uint)) { f.SetValue(this, FieldReader.GetFieldValue <uint>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex])); } else if (t == typeof(short)) { f.SetValue(this, FieldReader.GetFieldValue <short>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex])); } else if (t == typeof(ushort)) { f.SetValue(this, FieldReader.GetFieldValue <ushort>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex])); } else if (t == typeof(byte)) { f.SetValue(this, FieldReader.GetFieldValue <byte>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex])); } else if (t == typeof(sbyte)) { f.SetValue(this, FieldReader.GetFieldValue <sbyte>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex])); } else if (t == typeof(string)) { if (isSparse) { f.SetValue(this, r.ReadCString()); } else { var pos = recordsOffset + r.Offset + (r.Position >> 3); int ofs = FieldReader.GetFieldValue <int>(GetId(), r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex]); f.SetValue(this, stringsTable[pos + ofs]); } } else if (t.IsArray) { Type arrayElementType = f.FieldType.GetElementType(); ArraySizeAttribute atr = (ArraySizeAttribute)f.GetCustomAttribute(typeof(ArraySizeAttribute)); if (atr == null) { throw new Exception(GetType().Name + "." + f.Name + " missing ArraySizeAttribute"); } if (arrayElementType == typeof(int)) { f.SetValue(this, FieldReader.GetFieldValueArray <int>(r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex], atr.Size)); } else if (arrayElementType == typeof(uint)) { f.SetValue(this, FieldReader.GetFieldValueArray <uint>(r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex], atr.Size)); } //else if (arrayElementType == typeof(ulong)) //{ // f.SetValue(this, FieldReader.GetFieldValueArray<ulong>(r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex], atr.Size)); //} else if (arrayElementType == typeof(float)) { f.SetValue(this, FieldReader.GetFieldValueArray <float>(r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex], atr.Size)); } else if (arrayElementType == typeof(ushort)) { f.SetValue(this, FieldReader.GetFieldValueArray <ushort>(r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex], atr.Size)); } else if (arrayElementType == typeof(byte)) { f.SetValue(this, FieldReader.GetFieldValueArray <byte>(r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex], atr.Size)); } else if (arrayElementType == typeof(string)) { string[] array = new string[atr.Size]; if (isSparse) { for (int i = 0; i < array.Length; i++) { array[i] = r.ReadCString(); } } else { var pos = recordsOffset + r.Offset + (r.Position >> 3); int[] strIdx = FieldReader.GetFieldValueArray <int>(r, fieldMeta[fieldIndex], columnMeta[fieldIndex], palletData[fieldIndex], commonData[fieldIndex], atr.Size); for (int i = 0; i < array.Length; i++) { array[i] = stringsTable[pos + i * 4 + strIdx[i]]; } } f.SetValue(this, array); } else { throw new Exception("Unhandled array type: " + arrayElementType.Name); } } else { throw new Exception("Unhandled DbcTable type: " + t.Name); } fieldIndex++; } }
public WDC2Reader(Stream stream) { using (var reader = new BinaryReader(stream, Encoding.UTF8)) { if (reader.BaseStream.Length < HeaderSize) { throw new InvalidDataException(String.Format("WDC2 file is corrupted!")); } uint magic = reader.ReadUInt32(); if (magic != WDC2FmtSig) { throw new InvalidDataException(String.Format("WDC2 file is corrupted!")); } RecordsCount = reader.ReadInt32(); FieldsCount = reader.ReadInt32(); RecordSize = reader.ReadInt32(); StringTableSize = reader.ReadInt32(); TableHash = reader.ReadUInt32(); LayoutHash = reader.ReadUInt32(); MinIndex = reader.ReadInt32(); MaxIndex = reader.ReadInt32(); int locale = reader.ReadInt32(); int flags = reader.ReadUInt16(); IdFieldIndex = reader.ReadUInt16(); int totalFieldsCount = reader.ReadInt32(); int packedDataOffset = reader.ReadInt32(); // Offset within the field where packed data starts int lookupColumnCount = reader.ReadInt32(); // count of lookup columns int columnMetaDataSize = reader.ReadInt32(); // 24 * NumFields bytes, describes column bit packing, {ushort recordOffset, ushort size, uint additionalDataSize, uint compressionType, uint packedDataOffset or commonvalue, uint cellSize, uint cardinality}[NumFields], sizeof(DBC2CommonValue) == 8 int commonDataSize = reader.ReadInt32(); int palletDataSize = reader.ReadInt32(); // in bytes, sizeof(DBC2PalletValue) == 4 int sectionsCount = reader.ReadInt32(); if (sectionsCount > 1) { throw new Exception("sectionsCount > 1"); } SectionHeader_WDC2[] sections = reader.ReadArray <SectionHeader_WDC2>(sectionsCount); // field meta data m_meta = reader.ReadArray <FieldMetaData>(FieldsCount); // column meta data m_columnMeta = reader.ReadArray <ColumnMetaData>(FieldsCount); // pallet data m_palletData = new Value32[m_columnMeta.Length][]; for (int i = 0; i < m_columnMeta.Length; i++) { if (m_columnMeta[i].CompressionType == CompressionType.Pallet || m_columnMeta[i].CompressionType == CompressionType.PalletArray) { m_palletData[i] = reader.ReadArray <Value32>((int)m_columnMeta[i].AdditionalDataSize / 4); } } // common data m_commonData = new Dictionary <int, Value32> [m_columnMeta.Length]; for (int i = 0; i < m_columnMeta.Length; i++) { if (m_columnMeta[i].CompressionType == CompressionType.Common) { Dictionary <int, Value32> commonValues = new Dictionary <int, Value32>(); m_commonData[i] = commonValues; for (int j = 0; j < m_columnMeta[i].AdditionalDataSize / 8; j++) { commonValues[reader.ReadInt32()] = reader.Read <Value32>(); } } } for (int sectionIndex = 0; sectionIndex < sectionsCount; sectionIndex++) { reader.BaseStream.Position = sections[sectionIndex].FileOffset; byte[] recordsData; Dictionary <long, string> stringsTable = null; SparseEntry[] sparseEntries = null; if ((flags & 0x1) == 0) { // records data recordsData = reader.ReadBytes(sections[sectionIndex].NumRecords * RecordSize); Array.Resize(ref recordsData, recordsData.Length + 8); // pad with extra zeros so we don't crash when reading // string data stringsTable = new Dictionary <long, string>(); for (int i = 0; i < sections[sectionIndex].StringTableSize;) { long oldPos = reader.BaseStream.Position; stringsTable[oldPos] = reader.ReadCString(); i += (int)(reader.BaseStream.Position - oldPos); } } else { // sparse data with inlined strings recordsData = reader.ReadBytes(sections[sectionIndex].SparseTableOffset - sections[sectionIndex].FileOffset); if (reader.BaseStream.Position != sections[sectionIndex].SparseTableOffset) { throw new Exception("reader.BaseStream.Position != sections[sectionIndex].SparseTableOffset"); } sparseEntries = reader.ReadArray <SparseEntry>(MaxIndex - MinIndex + 1); if (sections[sectionIndex].SparseTableOffset != 0) { throw new Exception("Sparse Table NYI!"); } else { throw new Exception("Sparse Table with zero offset?"); } } // index data int[] indexData = reader.ReadArray <int>(sections[sectionIndex].IndexDataSize / 4); // duplicate rows data Dictionary <int, int> copyData = new Dictionary <int, int>(); for (int i = 0; i < sections[sectionIndex].CopyTableSize / 8; i++) { copyData[reader.ReadInt32()] = reader.ReadInt32(); } // reference data ReferenceData refData = null; if (sections[sectionIndex].ParentLookupDataSize > 0) { refData = new ReferenceData { NumRecords = reader.ReadInt32(), MinId = reader.ReadInt32(), MaxId = reader.ReadInt32() }; ReferenceEntry[] entries = reader.ReadArray <ReferenceEntry>(refData.NumRecords); refData.Entries = entries.ToDictionary(e => e.Index, e => e.Id); } else { refData = new ReferenceData { Entries = new Dictionary <int, int>() }; } BitReader bitReader = new BitReader(recordsData); for (int i = 0; i < RecordsCount; ++i) { bitReader.Position = 0; bitReader.Offset = i * RecordSize; bool hasRef = refData.Entries.TryGetValue(i, out int refId); IDB2Row rec = new WDC2Row(this, bitReader, sections[sectionIndex].FileOffset, sections[sectionIndex].IndexDataSize != 0 ? indexData[i] : -1, hasRef ? refId : -1, stringsTable); if (sections[sectionIndex].IndexDataSize != 0) { _Records.Add(indexData[i], rec); } else { _Records.Add(rec.Id, rec); } if (i % 1000 == 0) { Console.Write("\r{0} records read", i); } } foreach (var copyRow in copyData) { IDB2Row rec = _Records[copyRow.Value].Clone(); rec.Id = copyRow.Key; _Records.Add(copyRow.Key, rec); } } } }