Пример #1
0
        public unsafe WDB6Reader(Stream stream)
        {
            using (var reader = new BinaryReader(stream, Encoding.UTF8))
            {
                if (reader.BaseStream.Length < HeaderSize)
                {
                    throw new InvalidDataException("WDB6 file is corrupted!");
                }

                uint magic = reader.ReadUInt32();

                if (magic != WDB6FmtSig)
                {
                    throw new InvalidDataException("WDB6 file is corrupted!");
                }

                RecordsCount    = reader.ReadInt32();
                FieldsCount     = reader.ReadInt32();
                RecordSize      = reader.ReadInt32();
                StringTableSize = reader.ReadInt32();
                TableHash       = reader.ReadUInt32();
                LayoutHash      = reader.ReadUInt32();
                MinIndex        = reader.ReadInt32();
                MaxIndex        = reader.ReadInt32();
                int locale        = reader.ReadInt32();
                int copyTableSize = reader.ReadInt32();
                Flags        = (DB2Flags)reader.ReadUInt16();
                IdFieldIndex = reader.ReadUInt16();
                int totalFieldCount = reader.ReadInt32();
                int commonDataSize  = reader.ReadInt32();

                if (RecordsCount == 0)
                {
                    return;
                }

                // field meta data
                m_meta = reader.ReadArray <FieldMetaData>(FieldsCount);

                if (!Flags.HasFlagExt(DB2Flags.Sparse))
                {
                    // records data
                    recordsData = reader.ReadBytes(RecordsCount * RecordSize);
                    Array.Resize(ref recordsData, recordsData.Length + 8); // pad with extra zeros so we don't crash when reading

                    // string table
                    m_stringsTable = new Dictionary <long, string>(StringTableSize / 0x20);
                    for (int i = 0; i < StringTableSize;)
                    {
                        long oldPos = reader.BaseStream.Position;
                        m_stringsTable[i] = reader.ReadCString();
                        i += (int)(reader.BaseStream.Position - oldPos);
                    }
                }
                else
                {
                    // sparse data with inlined strings
                    recordsData = reader.ReadBytes(StringTableSize - (int)reader.BaseStream.Position);

                    int sparseCount = MaxIndex - MinIndex + 1;

                    m_sparseEntries = new List <SparseEntry>(sparseCount);
                    m_copyData      = new Dictionary <int, int>(sparseCount);
                    var sparseIdLookup = new Dictionary <uint, int>(sparseCount);

                    for (int i = 0; i < sparseCount; i++)
                    {
                        SparseEntry sparse = reader.Read <SparseEntry>();
                        if (sparse.Offset == 0 || sparse.Size == 0)
                        {
                            continue;
                        }

                        if (sparseIdLookup.TryGetValue(sparse.Offset, out int copyId))
                        {
                            m_copyData[MinIndex + i] = copyId;
                        }
                        else
                        {
                            m_sparseEntries.Add(sparse);
                            sparseIdLookup.Add(sparse.Offset, MinIndex + i);
                        }
                    }
                }

                // secondary key
                if (Flags.HasFlagExt(DB2Flags.SecondaryKey))
                {
                    m_foreignKeyData = reader.ReadArray <int>(MaxIndex - MinIndex + 1);
                }

                // index table
                if (Flags.HasFlagExt(DB2Flags.Index))
                {
                    m_indexData = reader.ReadArray <int>(RecordsCount);
                }

                // duplicate rows data
                if (m_copyData == null)
                {
                    m_copyData = new Dictionary <int, int>(copyTableSize / 8);
                }

                for (int i = 0; i < copyTableSize / 8; i++)
                {
                    m_copyData[reader.ReadInt32()] = reader.ReadInt32();
                }

                if (commonDataSize > 0)
                {
                    Array.Resize(ref m_meta, totalFieldCount);

                    int fieldCount = reader.ReadInt32();
                    m_commonData = new Dictionary <int, Value32> [fieldCount];

                    // HACK as of 24473 values are 4 byte aligned
                    // try to calculate this by seeing if all <id, value> tuples are 8 bytes
                    bool aligned = (commonDataSize - 4 - (fieldCount * 5)) % 8 == 0;

                    for (int i = 0; i < fieldCount; i++)
                    {
                        int  count = reader.ReadInt32();
                        byte type  = reader.ReadByte();
                        int  size  = aligned ? 4 : (32 - CommonTypeBits[type]) >> 3;

                        // add the new meta entry
                        if (i > FieldsCount)
                        {
                            m_meta[i] = new FieldMetaData()
                            {
                                Bits   = CommonTypeBits[type],
                                Offset = (short)(m_meta[i - 1].Offset + ((32 - m_meta[i - 1].Bits) >> 3))
                            };
                        }

                        var commonValues = new Dictionary <int, Value32>(count);
                        for (int j = 0; j < count; j++)
                        {
                            int     id     = reader.ReadInt32();
                            byte[]  buffer = reader.ReadBytes(size);
                            Value32 value  = Unsafe.ReadUnaligned <Value32>(ref buffer[0]);

                            commonValues.Add(id, value);
                        }

                        m_commonData[i] = commonValues;
                    }
                }

                int position = 0;
                for (int i = 0; i < RecordsCount; i++)
                {
                    BitReader bitReader = new BitReader(recordsData)
                    {
                        Position = 0
                    };

                    if (Flags.HasFlagExt(DB2Flags.Sparse))
                    {
                        bitReader.Position = position;
                        position          += m_sparseEntries[i].Size * 8;
                    }
                    else
                    {
                        bitReader.Offset = i * RecordSize;
                    }

                    IDBRow rec = new WDB6Row(this, bitReader, Flags.HasFlagExt(DB2Flags.Index) ? m_indexData[i] : -1, i);
                    _Records.Add(i, rec);
                }
            }
        }
Пример #2
0
        public WDB4Reader(Stream stream)
        {
            using (var reader = new BinaryReader(stream, Encoding.UTF8))
            {
                if (reader.BaseStream.Length < HeaderSize)
                {
                    throw new InvalidDataException("WDB4 file is corrupted!");
                }

                uint magic = reader.ReadUInt32();

                if (magic != WDB4FmtSig)
                {
                    throw new InvalidDataException("WDB4 file is corrupted!");
                }

                RecordsCount    = reader.ReadInt32();
                FieldsCount     = reader.ReadInt32();
                RecordSize      = reader.ReadInt32();
                StringTableSize = reader.ReadInt32();
                TableHash       = reader.ReadUInt32();
                uint build     = reader.ReadUInt32();
                uint timestamp = reader.ReadUInt32();
                MinIndex = reader.ReadInt32();
                MaxIndex = reader.ReadInt32();
                int locale        = reader.ReadInt32();
                int copyTableSize = reader.ReadInt32();
                Flags = (DB2Flags)reader.ReadUInt32();

                if (RecordsCount == 0)
                {
                    return;
                }

                if (!Flags.HasFlagExt(DB2Flags.Sparse))
                {
                    // records data
                    recordsData = reader.ReadBytes(RecordsCount * RecordSize);
                    Array.Resize(ref recordsData, recordsData.Length + 8); // pad with extra zeros so we don't crash when reading

                    // string table
                    m_stringsTable = new Dictionary <long, string>(StringTableSize / 0x20);
                    for (int i = 0; i < StringTableSize;)
                    {
                        long oldPos = reader.BaseStream.Position;
                        m_stringsTable[i] = reader.ReadCString();
                        i += (int)(reader.BaseStream.Position - oldPos);
                    }
                }
                else
                {
                    // sparse data with inlined strings
                    recordsData = reader.ReadBytes(StringTableSize - HeaderSize);

                    int sparseCount = MaxIndex - MinIndex + 1;

                    m_sparseEntries = new List <SparseEntry>(sparseCount);
                    m_copyData      = new Dictionary <int, int>(sparseCount);
                    var sparseIdLookup = new Dictionary <uint, int>(sparseCount);

                    for (int i = 0; i < sparseCount; i++)
                    {
                        SparseEntry sparse = reader.Read <SparseEntry>();
                        if (sparse.Offset == 0 || sparse.Size == 0)
                        {
                            continue;
                        }

                        if (sparseIdLookup.TryGetValue(sparse.Offset, out int copyId))
                        {
                            m_copyData[MinIndex + i] = copyId;
                        }
                        else
                        {
                            m_sparseEntries.Add(sparse);
                            sparseIdLookup.Add(sparse.Offset, MinIndex + i);
                        }
                    }
                }

                // secondary key
                if (Flags.HasFlagExt(DB2Flags.SecondaryKey))
                {
                    m_foreignKeyData = reader.ReadArray <int>(MaxIndex - MinIndex + 1);
                }

                // index table
                if (Flags.HasFlagExt(DB2Flags.Index))
                {
                    m_indexData = reader.ReadArray <int>(RecordsCount);
                }

                // duplicate rows data
                if (m_copyData == null)
                {
                    m_copyData = new Dictionary <int, int>(copyTableSize / 8);
                }

                for (int i = 0; i < copyTableSize / 8; i++)
                {
                    m_copyData[reader.ReadInt32()] = reader.ReadInt32();
                }

                int position = 0;
                for (int i = 0; i < RecordsCount; i++)
                {
                    BitReader bitReader = new BitReader(recordsData)
                    {
                        Position = 0
                    };

                    if (Flags.HasFlagExt(DB2Flags.Sparse))
                    {
                        bitReader.Position = position;
                        position          += m_sparseEntries[i].Size * 8;
                    }
                    else
                    {
                        bitReader.Offset = i * RecordSize;
                    }

                    IDBRow rec = new WDB4Row(this, bitReader, Flags.HasFlagExt(DB2Flags.Index) ? m_indexData[i] : -1, i);
                    _Records.Add(i, rec);
                }
            }
        }
Пример #3
0
        public WDC1Reader(Stream stream)
        {
            using (var reader = new BinaryReader(stream, Encoding.UTF8))
            {
                if (reader.BaseStream.Length < HeaderSize)
                {
                    throw new InvalidDataException("WDC1 file is corrupted!");
                }

                uint magic = reader.ReadUInt32();

                if (magic != WDC1FmtSig)
                {
                    throw new InvalidDataException("WDC1 file is corrupted!");
                }

                RecordsCount    = reader.ReadInt32();
                FieldsCount     = reader.ReadInt32();
                RecordSize      = reader.ReadInt32();
                StringTableSize = reader.ReadInt32();

                TableHash  = reader.ReadUInt32();
                LayoutHash = reader.ReadUInt32();
                MinIndex   = reader.ReadInt32();
                MaxIndex   = reader.ReadInt32();
                int locale        = reader.ReadInt32();
                int copyTableSize = reader.ReadInt32();
                Flags        = (DB2Flags)reader.ReadUInt16();
                IdFieldIndex = reader.ReadUInt16();

                int totalFieldsCount   = reader.ReadInt32();
                int packedDataOffset   = reader.ReadInt32(); // Offset within the field where packed data starts
                int lookupColumnCount  = reader.ReadInt32(); // count of lookup columns
                int sparseTableOffset  = reader.ReadInt32(); // absolute value, {uint offset, ushort size}[MaxId - MinId + 1]
                int indexDataSize      = reader.ReadInt32(); // int indexData[IndexDataSize / 4]
                int columnMetaDataSize = reader.ReadInt32(); // 24 * NumFields bytes, describes column bit packing, {ushort recordOffset, ushort size, uint additionalDataSize, uint compressionType, uint packedDataOffset or commonvalue, uint cellSize, uint cardinality}[NumFields], sizeof(DBC2CommonValue) == 8
                int commonDataSize     = reader.ReadInt32();
                int palletDataSize     = reader.ReadInt32(); // in bytes, sizeof(DBC2PalletValue) == 4
                int referenceDataSize  = reader.ReadInt32(); // uint NumRecords, uint minId, uint maxId, {uint id, uint index}[NumRecords], questionable usefulness...

                if (RecordsCount == 0)
                {
                    return;
                }

                // field meta data
                m_meta = reader.ReadArray <FieldMetaData>(FieldsCount);

                if (!Flags.HasFlagExt(DB2Flags.Sparse))
                {
                    // records data
                    recordsData = reader.ReadBytes(RecordsCount * RecordSize);

                    Array.Resize(ref recordsData, recordsData.Length + 8); // pad with extra zeros so we don't crash when reading

                    // string data
                    m_stringsTable = new Dictionary <long, string>(StringTableSize / 0x20);
                    for (int i = 0; i < StringTableSize;)
                    {
                        long oldPos = reader.BaseStream.Position;
                        m_stringsTable[i] = reader.ReadCString();
                        i += (int)(reader.BaseStream.Position - oldPos);
                    }
                }
                else
                {
                    // sparse data with inlined strings
                    recordsData = reader.ReadBytes(sparseTableOffset - HeaderSize - Marshal.SizeOf <FieldMetaData>() * FieldsCount);

                    if (reader.BaseStream.Position != sparseTableOffset)
                    {
                        throw new Exception("r.BaseStream.Position != sparseTableOffset");
                    }

                    int sparseCount = MaxIndex - MinIndex + 1;

                    m_sparseEntries = new List <SparseEntry>(sparseCount);
                    m_copyData      = new Dictionary <int, int>(sparseCount);
                    var sparseIdLookup = new Dictionary <uint, int>(sparseCount);

                    for (int i = 0; i < sparseCount; i++)
                    {
                        SparseEntry sparse = reader.Read <SparseEntry>();
                        if (sparse.Offset == 0 || sparse.Size == 0)
                        {
                            continue;
                        }

                        if (sparseIdLookup.TryGetValue(sparse.Offset, out int copyId))
                        {
                            m_copyData[MinIndex + i] = copyId;
                        }
                        else
                        {
                            m_sparseEntries.Add(sparse);
                            sparseIdLookup.Add(sparse.Offset, MinIndex + i);
                        }
                    }
                }

                // index data
                m_indexData = reader.ReadArray <int>(indexDataSize / 4);

                // duplicate rows data
                if (m_copyData == null)
                {
                    m_copyData = new Dictionary <int, int>(copyTableSize / 8);
                }

                for (int i = 0; i < copyTableSize / 8; i++)
                {
                    m_copyData[reader.ReadInt32()] = reader.ReadInt32();
                }

                // column meta data
                m_columnMeta = reader.ReadArray <ColumnMetaData>(FieldsCount);

                // pallet data
                m_palletData = new Value32[m_columnMeta.Length][];
                for (int i = 0; i < m_columnMeta.Length; i++)
                {
                    if (m_columnMeta[i].CompressionType == CompressionType.Pallet || m_columnMeta[i].CompressionType == CompressionType.PalletArray)
                    {
                        m_palletData[i] = reader.ReadArray <Value32>((int)m_columnMeta[i].AdditionalDataSize / 4);
                    }
                }

                // common data
                m_commonData = new Dictionary <int, Value32> [m_columnMeta.Length];
                for (int i = 0; i < m_columnMeta.Length; i++)
                {
                    if (m_columnMeta[i].CompressionType == CompressionType.Common)
                    {
                        var commonValues = new Dictionary <int, Value32>((int)m_columnMeta[i].AdditionalDataSize / 8);
                        m_commonData[i] = commonValues;

                        for (int j = 0; j < m_columnMeta[i].AdditionalDataSize / 8; j++)
                        {
                            commonValues[reader.ReadInt32()] = reader.Read <Value32>();
                        }
                    }
                }

                // reference data
                ReferenceData refData = new ReferenceData();
                if (referenceDataSize > 0)
                {
                    refData.NumRecords = reader.ReadInt32();
                    refData.MinId      = reader.ReadInt32();
                    refData.MaxId      = reader.ReadInt32();

                    var entries = reader.ReadArray <ReferenceEntry>(refData.NumRecords);
                    for (int i = 0; i < entries.Length; i++)
                    {
                        refData.Entries[entries[i].Index] = entries[i].Id;
                    }
                }

                int position = 0;
                for (int i = 0; i < RecordsCount; i++)
                {
                    BitReader bitReader = new BitReader(recordsData)
                    {
                        Position = 0
                    };

                    if (Flags.HasFlagExt(DB2Flags.Sparse))
                    {
                        bitReader.Position = position;
                        position          += m_sparseEntries[i].Size * 8;
                    }
                    else
                    {
                        bitReader.Offset = i * RecordSize;
                    }

                    refData.Entries.TryGetValue(i, out int refId);

                    IDBRow rec = new WDC1Row(this, bitReader, indexDataSize != 0 ? m_indexData[i] : -1, refId, i);
                    _Records.Add(i, rec);
                }
            }
        }
Пример #4
0
        public WDC2Reader(Stream stream)
        {
            using (var reader = new BinaryReader(stream, Encoding.UTF8))
            {
                if (reader.BaseStream.Length < HeaderSize)
                {
                    throw new InvalidDataException("WDC2 file is corrupted!");
                }

                uint magic = reader.ReadUInt32();

                if (magic != WDC2FmtSig && magic != CLS1FmtSig)
                {
                    throw new InvalidDataException("WDC2 file is corrupted!");
                }

                Signature       = magic;
                RecordsCount    = reader.ReadInt32();
                FieldsCount     = reader.ReadInt32();
                RecordSize      = reader.ReadInt32();
                StringTableSize = reader.ReadInt32();
                TableHash       = reader.ReadUInt32();
                LayoutHash      = reader.ReadUInt32();
                MinIndex        = reader.ReadInt32();
                MaxIndex        = reader.ReadInt32();
                int locale = reader.ReadInt32();
                Flags        = (DB2Flags)reader.ReadUInt16();
                IdFieldIndex = reader.ReadUInt16();
                int totalFieldsCount = reader.ReadInt32();
                PackedDataOffset = reader.ReadInt32();        // Offset within the field where packed data starts
                int lookupColumnCount  = reader.ReadInt32();  // count of lookup columns
                int columnMetaDataSize = reader.ReadInt32();  // 24 * NumFields bytes, describes column bit packing, {ushort recordOffset, ushort size, uint additionalDataSize, uint compressionType, uint packedDataOffset or commonvalue, uint cellSize, uint cardinality}[NumFields], sizeof(DBC2CommonValue) == 8
                int commonDataSize     = reader.ReadInt32();
                int palletDataSize     = reader.ReadInt32();  // in bytes, sizeof(DBC2PalletValue) == 4
                int sectionsCount      = reader.ReadInt32();

                if (sectionsCount > 1)
                {
                    throw new Exception("WDC2 only supports 1 section");
                }

                if (sectionsCount == 0 || RecordsCount == 0)
                {
                    return;
                }

                var sections = reader.ReadArray <SectionHeader>(sectionsCount).ToList();
                m_sections = sections.OfType <IEncryptableDatabaseSection>().ToList();

                // field meta data
                Meta = reader.ReadArray <FieldMetaData>(FieldsCount);

                // column meta data
                ColumnMeta = reader.ReadArray <ColumnMetaData>(FieldsCount);

                // pallet data
                PalletData = new Value32[ColumnMeta.Length][];
                for (int i = 0; i < ColumnMeta.Length; i++)
                {
                    if (ColumnMeta[i].CompressionType == CompressionType.Pallet || ColumnMeta[i].CompressionType == CompressionType.PalletArray)
                    {
                        PalletData[i] = reader.ReadArray <Value32>((int)ColumnMeta[i].AdditionalDataSize / 4);
                    }
                }

                // common data
                CommonData = new Dictionary <int, Value32> [ColumnMeta.Length];
                for (int i = 0; i < ColumnMeta.Length; i++)
                {
                    if (ColumnMeta[i].CompressionType == CompressionType.Common)
                    {
                        var commonValues = new Dictionary <int, Value32>((int)ColumnMeta[i].AdditionalDataSize / 8);
                        CommonData[i] = commonValues;

                        for (int j = 0; j < ColumnMeta[i].AdditionalDataSize / 8; j++)
                        {
                            commonValues[reader.ReadInt32()] = reader.Read <Value32>();
                        }
                    }
                }

                for (int sectionIndex = 0; sectionIndex < sectionsCount; sectionIndex++)
                {
                    reader.BaseStream.Position = sections[sectionIndex].FileOffset;

                    if (!Flags.HasFlagExt(DB2Flags.Sparse))
                    {
                        // records data
                        RecordsData = reader.ReadBytes(sections[sectionIndex].NumRecords * RecordSize);

                        Array.Resize(ref RecordsData, RecordsData.Length + 8); // pad with extra zeros so we don't crash when reading

                        // string data
                        StringTable = new Dictionary <long, string>(sections[sectionIndex].StringTableSize / 0x20);
                        for (int i = 0; i < sections[sectionIndex].StringTableSize;)
                        {
                            long oldPos = reader.BaseStream.Position;
                            StringTable[oldPos] = reader.ReadCString();
                            i += (int)(reader.BaseStream.Position - oldPos);
                        }
                    }
                    else
                    {
                        // sparse data with inlined strings
                        RecordsData = reader.ReadBytes(sections[sectionIndex].SparseTableOffset - sections[sectionIndex].FileOffset);

                        if (reader.BaseStream.Position != sections[sectionIndex].SparseTableOffset)
                        {
                            throw new Exception("reader.BaseStream.Position != sections[sectionIndex].SparseTableOffset");
                        }

                        int sparseCount = MaxIndex - MinIndex + 1;

                        SparseEntries = new List <SparseEntry>(sparseCount);
                        CopyData      = new Dictionary <int, int>(sparseCount);
                        var sparseIdLookup = new Dictionary <uint, int>(sparseCount);

                        for (int i = 0; i < sparseCount; i++)
                        {
                            SparseEntry sparse = reader.Read <SparseEntry>();
                            if (sparse.Offset == 0 || sparse.Size == 0)
                            {
                                continue;
                            }

                            if (sparseIdLookup.TryGetValue(sparse.Offset, out int copyId))
                            {
                                CopyData[MinIndex + i] = copyId;
                            }
                            else
                            {
                                SparseEntries.Add(sparse);
                                sparseIdLookup.Add(sparse.Offset, MinIndex + i);
                            }
                        }
                    }

                    // index data
                    IndexData = reader.ReadArray <int>(sections[sectionIndex].IndexDataSize / 4);

                    // fix zero-filled index data
                    if (IndexData.Length > 0 && IndexData.All(x => x == 0))
                    {
                        IndexData = Enumerable.Range(MinIndex, MaxIndex - MinIndex + 1).ToArray();
                    }

                    // duplicate rows data
                    if (CopyData == null)
                    {
                        CopyData = new Dictionary <int, int>(sections[sectionIndex].CopyTableSize / 8);
                    }

                    for (int i = 0; i < sections[sectionIndex].CopyTableSize / 8; i++)
                    {
                        CopyData[reader.ReadInt32()] = reader.ReadInt32();
                    }

                    // reference data
                    ReferenceData refData = new ReferenceData();
                    if (sections[sectionIndex].ParentLookupDataSize > 0)
                    {
                        refData.NumRecords = reader.ReadInt32();
                        refData.MinId      = reader.ReadInt32();
                        refData.MaxId      = reader.ReadInt32();

                        var entries = reader.ReadArray <ReferenceEntry>(refData.NumRecords);
                        for (int i = 0; i < entries.Length; i++)
                        {
                            refData.Entries[entries[i].Index] = entries[i].Id;
                        }
                    }

                    int position = 0;
                    for (int i = 0; i < RecordsCount; i++)
                    {
                        BitReader bitReader = new BitReader(RecordsData)
                        {
                            Position = 0
                        };

                        if (Flags.HasFlagExt(DB2Flags.Sparse))
                        {
                            bitReader.Position = position;
                            position          += SparseEntries[i].Size * 8;
                        }
                        else
                        {
                            bitReader.Offset = i * RecordSize;
                        }

                        refData.Entries.TryGetValue(i, out int refId);

                        IDBRow rec = new WDC2Row(this, bitReader, sections[sectionIndex].FileOffset, sections[sectionIndex].IndexDataSize != 0 ? IndexData[i] : -1, refId, i);
                        _Records.Add(i, rec);
                    }
                }
            }
        }