Beispiel #1
0
        protected void RemoveBitLimits()
        {
            if (HasOffsetTable)
            {
                return;
            }

            int c  = HasIndexTable ? 1 : 0;
            int cm = ColumnMeta.Count - (RelationShipData != null ? 1 : 0);

            var skipType = new HashSet <CompressionType>(new[] { CompressionType.None, CompressionType.Sparse });

            for (int i = c; i < cm; i++)
            {
                ColumnStructureEntry col  = ColumnMeta[i];
                CompressionType      type = col.CompressionType;
                int oldsize = col.BitWidth;
                var newsize = (ushort)(columnSizes[c] * 8);

                c += col.ArraySize;

                if (skipType.Contains(col.CompressionType) || newsize == oldsize)
                {
                    continue;
                }

                col.BitWidth = col.Size = newsize;

                for (int x = i + 1; x < cm; x++)
                {
                    if (skipType.Contains(ColumnMeta[x].CompressionType))
                    {
                        continue;
                    }

                    ColumnMeta[x].RecordOffset += (ushort)(newsize - oldsize);
                    ColumnMeta[x].BitOffset     = ColumnMeta[x].RecordOffset - PackedDataOffset * 8;
                }
            }

            RecordSize = (uint)((ColumnMeta.Sum(x => x.Size) + 7) / 8);
        }
Beispiel #2
0
        static Dictionary <int, byte[]> ReadData(BinaryReader reader)
        {
            Dictionary <int, byte[]>          CopyTable        = new Dictionary <int, byte[]>();
            List <Tuple <int, short> >        offsetmap        = new List <Tuple <int, short> >();
            Dictionary <int, OffsetDuplicate> firstindex       = new Dictionary <int, OffsetDuplicate>();
            Dictionary <int, int>             OffsetDuplicates = new Dictionary <int, int>();
            Dictionary <int, List <int> >     Copies           = new Dictionary <int, List <int> >();

            byte[] recordData;
            if (Header.HasOffsetTable())
            {
                recordData = reader.ReadBytes((int)(Header.OffsetTableOffset - 84 - 4 * Header.FieldCount));
            }
            else
            {
                recordData = reader.ReadBytes((int)(Header.RecordCount * Header.RecordSize));
                Array.Resize(ref recordData, recordData.Length + 8);
            }

            if (Header.StringTableSize != 0)
            {
                // string data
                StringTable = new Dictionary <int, string>();

                for (int i = 0; i < Header.StringTableSize;)
                {
                    long oldPos = reader.BaseStream.Position;

                    StringTable[i] = reader.ReadCString();

                    i += (int)(reader.BaseStream.Position - oldPos);
                }
            }

            int[] m_indexes = null;

            // OffsetTable
            if (Header.HasOffsetTable() && Header.OffsetTableOffset > 0)
            {
                reader.BaseStream.Position = Header.OffsetTableOffset;
                for (int i = 0; i < (Header.MaxId - Header.MinId + 1); i++)
                {
                    int   offset = reader.ReadInt32();
                    short length = reader.ReadInt16();

                    if (offset == 0 || length == 0)
                    {
                        continue;
                    }

                    // special case, may contain duplicates in the offset map that we don't want
                    if (Header.CopyTableSize == 0)
                    {
                        if (!firstindex.ContainsKey(offset))
                        {
                            firstindex.Add(offset, new OffsetDuplicate(offsetmap.Count, firstindex.Count));
                        }
                        else
                        {
                            OffsetDuplicates.Add(Header.MinId + i, firstindex[offset].VisibleIndex);
                        }
                    }

                    offsetmap.Add(new Tuple <int, short>(offset, length));
                }
            }

            // IndexTable
            if (Header.HasIndexTable())
            {
                m_indexes = reader.ReadArray <int>(Header.RecordCount);
            }

            // Copytable
            if (Header.CopyTableSize > 0)
            {
                long end = reader.BaseStream.Position + Header.CopyTableSize;
                while (reader.BaseStream.Position < end)
                {
                    int id     = reader.ReadInt32();
                    int idcopy = reader.ReadInt32();

                    if (!Copies.ContainsKey(idcopy))
                    {
                        Copies.Add(idcopy, new List <int>());
                    }

                    Copies[idcopy].Add(id);
                }
            }

            // ColumnMeta
            ColumnMeta = new List <ColumnStructureEntry>();
            if (Header.ColumnMetaSize != 0)
            {
                for (int i = 0; i < Header.FieldCount; i++)
                {
                    var column = new ColumnStructureEntry()
                    {
                        RecordOffset       = reader.ReadUInt16(),
                        Size               = reader.ReadUInt16(),
                        AdditionalDataSize = reader.ReadUInt32(), // size of pallet / sparse values
                        CompressionType    = (DB2ColumnCompression)reader.ReadUInt32(),
                        BitOffset          = reader.ReadInt32(),
                        BitWidth           = reader.ReadInt32(),
                        Cardinality        = reader.ReadInt32()
                    };

                    // preload arraysizes
                    if (column.CompressionType == DB2ColumnCompression.None)
                    {
                        column.ArraySize = Math.Max(column.Size / FieldStructure[i].BitCount, 1);
                    }
                    else if (column.CompressionType == DB2ColumnCompression.PalletArray)
                    {
                        column.ArraySize = Math.Max(column.Cardinality, 1);
                    }

                    ColumnMeta.Add(column);
                }
            }

            // Pallet values
            for (int i = 0; i < ColumnMeta.Count; i++)
            {
                if (ColumnMeta[i].CompressionType == DB2ColumnCompression.Pallet || ColumnMeta[i].CompressionType == DB2ColumnCompression.PalletArray)
                {
                    int elements    = (int)ColumnMeta[i].AdditionalDataSize / 4;
                    int cardinality = Math.Max(ColumnMeta[i].Cardinality, 1);

                    ColumnMeta[i].PalletValues = new List <byte[]>();
                    for (int j = 0; j < elements / cardinality; j++)
                    {
                        ColumnMeta[i].PalletValues.Add(reader.ReadBytes(cardinality * 4));
                    }
                }
            }

            // Sparse values
            for (int i = 0; i < ColumnMeta.Count; i++)
            {
                if (ColumnMeta[i].CompressionType == DB2ColumnCompression.CommonData)
                {
                    ColumnMeta[i].SparseValues = new Dictionary <int, byte[]>();
                    for (int j = 0; j < ColumnMeta[i].AdditionalDataSize / 8; j++)
                    {
                        ColumnMeta[i].SparseValues[reader.ReadInt32()] = reader.ReadBytes(4);
                    }
                }
            }

            // Relationships
            if (Header.RelationshipDataSize > 0)
            {
                RelationShipData = new RelationShipData()
                {
                    Records = reader.ReadUInt32(),
                    MinId   = reader.ReadUInt32(),
                    MaxId   = reader.ReadUInt32(),
                    Entries = new Dictionary <uint, byte[]>()
                };

                for (int i = 0; i < RelationShipData.Records; i++)
                {
                    byte[] foreignKey = reader.ReadBytes(4);
                    uint   index      = reader.ReadUInt32();
                    // has duplicates just like the copy table does... why?
                    if (!RelationShipData.Entries.ContainsKey(index))
                    {
                        RelationShipData.Entries.Add(index, foreignKey);
                    }
                }
            }

            // Record Data
            for (int i = 0; i < Header.RecordCount; i++)
            {
                int id = 0;

                if (Header.HasOffsetTable() && Header.HasIndexTable())
                {
                    id = m_indexes[CopyTable.Count];
                    var map = offsetmap[i];

                    if (Header.CopyTableSize == 0 && firstindex[map.Item1].HiddenIndex != i) // ignore duplicates
                    {
                        continue;
                    }

                    reader.BaseStream.Position = map.Item1;

                    byte[] data = reader.ReadBytes(map.Item2);

                    IEnumerable <byte> recordbytes = data;

                    // append relationship id
                    if (RelationShipData != null)
                    {
                        // seen cases of missing indicies
                        if (RelationShipData.Entries.TryGetValue((uint)i, out byte[] foreignData))
Beispiel #3
0
        public override void ReadHeader(ref BinaryReader dbReader, string signature)
        {
            ReadBaseHeader(ref dbReader, signature);

            TableHash        = dbReader.ReadUInt32();
            LayoutHash       = dbReader.ReadInt32();
            MinId            = dbReader.ReadInt32();
            MaxId            = dbReader.ReadInt32();
            Locale           = dbReader.ReadInt32();
            Flags            = (HeaderFlags)dbReader.ReadUInt16();
            IdIndex          = dbReader.ReadUInt16();
            TotalFieldSize   = dbReader.ReadUInt32();
            PackedDataOffset = dbReader.ReadInt32();

            RelationshipCount  = dbReader.ReadUInt32();
            ColumnMetadataSize = dbReader.ReadInt32();
            SparseDataSize     = dbReader.ReadInt32();
            PalletDataSize     = dbReader.ReadInt32();

            SectionCount = dbReader.ReadInt32();

            // TODO convert to array when the time comes
            Unknown1             = dbReader.ReadInt32();
            Unknown2             = dbReader.ReadInt32();
            RecordDataOffset     = dbReader.ReadInt32();
            RecordDataRowCount   = dbReader.ReadInt32();
            RecordDataStringSize = dbReader.ReadInt32();
            CopyTableSize        = dbReader.ReadInt32();
            OffsetTableOffset    = dbReader.ReadInt32();
            IndexSize            = dbReader.ReadInt32();
            RelationshipDataSize = dbReader.ReadInt32();

            if (RecordCount == 0 || FieldCount == 0)
            {
                return;
            }

            //Gather field structures
            FieldStructure = new List <FieldStructureEntry>();
            for (int i = 0; i < FieldCount; i++)
            {
                var field = new FieldStructureEntry(dbReader.ReadInt16(), dbReader.ReadUInt16());
                FieldStructure.Add(field);
            }

            // ColumnMeta
            ColumnMeta = new List <ColumnStructureEntry>();
            for (int i = 0; i < FieldCount; i++)
            {
                var column = new ColumnStructureEntry()
                {
                    RecordOffset       = dbReader.ReadUInt16(),
                    Size               = dbReader.ReadUInt16(),
                    AdditionalDataSize = dbReader.ReadUInt32(),                     // size of pallet / sparse values
                    CompressionType    = (CompressionType)dbReader.ReadUInt32(),
                    BitOffset          = dbReader.ReadInt32(),
                    BitWidth           = dbReader.ReadInt32(),
                    Cardinality        = dbReader.ReadInt32()
                };

                // preload arraysizes
                if (column.CompressionType == CompressionType.None)
                {
                    column.ArraySize = Math.Max(column.Size / FieldStructure[i].BitCount, 1);
                }
                else if (column.CompressionType == CompressionType.PalletArray)
                {
                    column.ArraySize = Math.Max(column.Cardinality, 1);
                }

                ColumnMeta.Add(column);
            }

            // Pallet values
            for (int i = 0; i < ColumnMeta.Count; i++)
            {
                if (ColumnMeta[i].CompressionType == CompressionType.Pallet || ColumnMeta[i].CompressionType == CompressionType.PalletArray)
                {
                    int elements    = (int)ColumnMeta[i].AdditionalDataSize / 4;
                    int cardinality = Math.Max(ColumnMeta[i].Cardinality, 1);

                    ColumnMeta[i].PalletValues = new List <byte[]>();
                    for (int j = 0; j < elements / cardinality; j++)
                    {
                        ColumnMeta[i].PalletValues.Add(dbReader.ReadBytes(cardinality * 4));
                    }
                }
            }

            // Sparse values
            for (int i = 0; i < ColumnMeta.Count; i++)
            {
                if (ColumnMeta[i].CompressionType == CompressionType.Sparse)
                {
                    ColumnMeta[i].SparseValues = new Dictionary <int, byte[]>();
                    for (int j = 0; j < ColumnMeta[i].AdditionalDataSize / 8; j++)
                    {
                        ColumnMeta[i].SparseValues[dbReader.ReadInt32()] = dbReader.ReadBytes(4);
                    }
                }
            }

            // RecordData
            recordData = dbReader.ReadBytes((int)(RecordCount * RecordSize));
            Array.Resize(ref recordData, recordData.Length + 8);

            Flags &= ~HeaderFlags.SecondIndex;             // appears to be obsolete now
        }
Beispiel #4
0
        public new Dictionary <int, byte[]> ReadOffsetData(BinaryReader dbReader, long pos)
        {
            var CopyTable        = new Dictionary <int, byte[]>();
            var offsetmap        = new List <Tuple <int, short> >();
            var firstindex       = new Dictionary <int, OffsetDuplicate>();
            var OffsetDuplicates = new Dictionary <int, int>();
            var Copies           = new Dictionary <int, List <int> >();

            int[] m_indexes = null;

            // OffsetTable
            if (HasOffsetTable && OffsetTableOffset > 0)
            {
                dbReader.BaseStream.Position = OffsetTableOffset;
                for (var i = 0; i < MaxId - MinId + 1; i++)
                {
                    int   offset = dbReader.ReadInt32();
                    short length = dbReader.ReadInt16();

                    if (offset == 0 || length == 0)
                    {
                        continue;
                    }

                    // special case, may contain duplicates in the offset map that we don't want
                    if (CopyTableSize == 0)
                    {
                        if (!firstindex.ContainsKey(offset))
                        {
                            firstindex.Add(offset, new OffsetDuplicate(offsetmap.Count, firstindex.Count));
                        }
                        else
                        {
                            OffsetDuplicates.Add(MinId + i, firstindex[offset].VisibleIndex);
                            continue;
                        }
                    }

                    offsetmap.Add(new Tuple <int, short>(offset, length));
                }
            }

            // IndexTable
            if (HasIndexTable)
            {
                m_indexes = new int[RecordCount];
                for (var i = 0; i < RecordCount; i++)
                {
                    m_indexes[i] = dbReader.ReadInt32();
                }
            }

            // Copytable
            if (CopyTableSize > 0)
            {
                long end = dbReader.BaseStream.Position + CopyTableSize;
                while (dbReader.BaseStream.Position < end)
                {
                    int id     = dbReader.ReadInt32();
                    int idcopy = dbReader.ReadInt32();

                    if (!Copies.ContainsKey(idcopy))
                    {
                        Copies.Add(idcopy, new List <int>());
                    }

                    Copies[idcopy].Add(id);
                }
            }

            // ColumnMeta
            ColumnMeta = new List <ColumnStructureEntry>();
            for (var i = 0; i < FieldCount; i++)
            {
                ColumnStructureEntry column = new ColumnStructureEntry
                {
                    RecordOffset       = dbReader.ReadUInt16(),
                    Size               = dbReader.ReadUInt16(),
                    AdditionalDataSize = dbReader.ReadUInt32(), // size of pallet / sparse values
                    CompressionType    = (CompressionType)dbReader.ReadUInt32(),
                    BitOffset          = dbReader.ReadInt32(),
                    BitWidth           = dbReader.ReadInt32(),
                    Cardinality        = dbReader.ReadInt32()
                };

                // preload arraysizes
                if (column.CompressionType == CompressionType.None)
                {
                    column.ArraySize = Math.Max(column.Size / FieldStructure[i].BitCount, 1);
                }
                else if (column.CompressionType == CompressionType.PalletArray)
                {
                    column.ArraySize = Math.Max(column.Cardinality, 1);
                }

                ColumnMeta.Add(column);
            }

            // Pallet values
            for (var i = 0; i < ColumnMeta.Count; i++)
            {
                if (ColumnMeta[i].CompressionType == CompressionType.Pallet ||
                    ColumnMeta[i].CompressionType == CompressionType.PalletArray)
                {
                    int elements    = (int)ColumnMeta[i].AdditionalDataSize / 4;
                    int cardinality = Math.Max(ColumnMeta[i].Cardinality, 1);

                    ColumnMeta[i].PalletValues = new List <byte[]>();
                    for (var j = 0; j < elements / cardinality; j++)
                    {
                        ColumnMeta[i].PalletValues.Add(dbReader.ReadBytes(cardinality * 4));
                    }
                }
            }

            // Sparse values
            for (var i = 0; i < ColumnMeta.Count; i++)
            {
                if (ColumnMeta[i].CompressionType == CompressionType.Sparse)
                {
                    ColumnMeta[i].SparseValues = new Dictionary <int, byte[]>();
                    for (var j = 0; j < ColumnMeta[i].AdditionalDataSize / 8; j++)
                    {
                        ColumnMeta[i].SparseValues[dbReader.ReadInt32()] = dbReader.ReadBytes(4);
                    }
                }
            }

            // Relationships
            if (RelationshipDataSize > 0)
            {
                RelationShipData = new RelationShipData
                {
                    Records = dbReader.ReadUInt32(),
                    MinId   = dbReader.ReadUInt32(),
                    MaxId   = dbReader.ReadUInt32(),
                    Entries = new Dictionary <uint, byte[]>()
                };

                for (var i = 0; i < RelationShipData.Records; i++)
                {
                    byte[] foreignKey = dbReader.ReadBytes(4);
                    uint   index      = dbReader.ReadUInt32();
                    // has duplicates just like the copy table does... why?
                    if (!RelationShipData.Entries.ContainsKey(index))
                    {
                        RelationShipData.Entries.Add(index, foreignKey);
                    }
                }

                FieldStructure.Add(new FieldStructureEntry(0, 0));
                ColumnMeta.Add(new ColumnStructureEntry());
            }

            // Record Data
            BitStream bitStream = new(recordData);

            for (var i = 0; i < RecordCount; i++)
            {
                var id = 0;

                if (HasOffsetTable && HasIndexTable)
                {
                    id = m_indexes[CopyTable.Count];
                    var map = offsetmap[i];

                    if (CopyTableSize == 0 && firstindex[map.Item1].HiddenIndex != i) //Ignore duplicates
                    {
                        continue;
                    }

                    dbReader.BaseStream.Position = map.Item1;

                    byte[] data = dbReader.ReadBytes(map.Item2);

                    var recordbytes = BitConverter.GetBytes(id).Concat(data);

                    // append relationship id
                    if (RelationShipData != null)
                    {
                        // seen cases of missing indicies
                        if (RelationShipData.Entries.TryGetValue((uint)i, out byte[] foreignData))
Beispiel #5
0
        public new Dictionary <int, byte[]> ReadOffsetData(BinaryReader dbReader, long pos)
        {
            Dictionary <int, byte[]>          CopyTable        = new Dictionary <int, byte[]>();
            List <Tuple <int, short> >        offsetmap        = new List <Tuple <int, short> >();
            Dictionary <int, OffsetDuplicate> firstindex       = new Dictionary <int, OffsetDuplicate>();
            Dictionary <int, int>             OffsetDuplicates = new Dictionary <int, int>();
            Dictionary <int, List <int> >     Copies           = new Dictionary <int, List <int> >();

            int[] m_indexes = null;

            // OffsetTable
            if (HasOffsetTable && OffsetTableOffset > 0)
            {
                dbReader.BaseStream.Position = OffsetTableOffset;
                for (int i = 0; i < (MaxId - MinId + 1); i++)
                {
                    int   offset = dbReader.ReadInt32();
                    short length = dbReader.ReadInt16();

                    if (offset == 0 || length == 0)
                    {
                        continue;
                    }

                    // special case, may contain duplicates in the offset map that we don't want
                    if (CopyTableSize == 0)
                    {
                        if (!firstindex.ContainsKey(offset))
                        {
                            firstindex.Add(offset, new OffsetDuplicate(offsetmap.Count, firstindex.Count));
                        }
                        else
                        {
                            OffsetDuplicates.Add(MinId + i, firstindex[offset].VisibleIndex);
                        }
                    }

                    offsetmap.Add(new Tuple <int, short>(offset, length));
                }
            }

            // IndexTable
            if (HasIndexTable)
            {
                m_indexes = new int[RecordCount];
                for (int i = 0; i < RecordCount; i++)
                {
                    m_indexes[i] = dbReader.ReadInt32();
                }
            }

            // Copytable
            if (CopyTableSize > 0)
            {
                long end = dbReader.BaseStream.Position + CopyTableSize;
                while (dbReader.BaseStream.Position < end)
                {
                    int id     = dbReader.ReadInt32();
                    int idcopy = dbReader.ReadInt32();

                    if (!Copies.ContainsKey(idcopy))
                    {
                        Copies.Add(idcopy, new List <int>());
                    }

                    Copies[idcopy].Add(id);
                }
            }

            // ColumnMeta
            ColumnMeta = new List <ColumnStructureEntry>();
            for (int i = 0; i < FieldCount; i++)
            {
                var column = new ColumnStructureEntry()
                {
                    RecordOffset       = dbReader.ReadUInt16(),
                    Size               = dbReader.ReadUInt16(),
                    AdditionalDataSize = dbReader.ReadUInt32(),                     // size of pallet / sparse values
                    CompressionType    = (CompressionType)dbReader.ReadUInt32(),
                    BitOffset          = dbReader.ReadInt32(),
                    BitWidth           = dbReader.ReadInt32(),
                    Cardinality        = dbReader.ReadInt32()
                };

                // preload arraysizes
                if (column.CompressionType == CompressionType.None)
                {
                    column.ArraySize = Math.Max(column.Size / FieldStructure[i].BitCount, 1);
                }
                else if (column.CompressionType == CompressionType.PalletArray)
                {
                    column.ArraySize = Math.Max(column.Cardinality, 1);
                }

                ColumnMeta.Add(column);
            }

            // Pallet values
            for (int i = 0; i < ColumnMeta.Count; i++)
            {
                if (ColumnMeta[i].CompressionType == CompressionType.Pallet || ColumnMeta[i].CompressionType == CompressionType.PalletArray)
                {
                    int elements    = (int)ColumnMeta[i].AdditionalDataSize / 4;
                    int cardinality = Math.Max(ColumnMeta[i].Cardinality, 1);

                    ColumnMeta[i].PalletValues = new List <byte[]>();
                    for (int j = 0; j < elements / cardinality; j++)
                    {
                        ColumnMeta[i].PalletValues.Add(dbReader.ReadBytes(cardinality * 4));
                    }
                }
            }

            // Sparse values
            for (int i = 0; i < ColumnMeta.Count; i++)
            {
                if (ColumnMeta[i].CompressionType == CompressionType.Sparse)
                {
                    ColumnMeta[i].SparseValues = new Dictionary <int, byte[]>();
                    for (int j = 0; j < ColumnMeta[i].AdditionalDataSize / 8; j++)
                    {
                        ColumnMeta[i].SparseValues[dbReader.ReadInt32()] = dbReader.ReadBytes(4);
                    }
                }
            }

            // Relationships
            if (RelationshipDataSize > 0)
            {
                RelationShipData = new RelationShipData()
                {
                    Records = dbReader.ReadUInt32(),
                    MinId   = dbReader.ReadUInt32(),
                    MaxId   = dbReader.ReadUInt32(),
                    Entries = new List <RelationShipEntry>()
                };

                for (int i = 0; i < RelationShipData.Records; i++)
                {
                    RelationShipData.Entries.Add(new RelationShipEntry(dbReader.ReadUInt32(), dbReader.ReadUInt32()));
                }

                FieldStructure.Add(new FieldStructureEntry(0, 0));
                ColumnMeta.Add(new ColumnStructureEntry());
            }

            // Record Data
            BitStream bitStream = new BitStream(recordData);

            for (int i = 0; i < RecordCount; i++)
            {
                int id = 0;

                if (HasOffsetTable && HasIndexTable)
                {
                    id = m_indexes[CopyTable.Count];
                    var map = offsetmap[i];

                    if (CopyTableSize == 0 && firstindex[map.Item1].HiddenIndex != i)                     // ignore duplicates
                    {
                        continue;
                    }

                    dbReader.BaseStream.Position = map.Item1;

                    byte[] data = dbReader.ReadBytes(map.Item2);

                    IEnumerable <byte> recordbytes = BitConverter.GetBytes(id).Concat(data);
                    if (RelationShipData != null)
                    {
                        byte[] relation = BitConverter.GetBytes(RelationShipData.Entries.First(x => x.Index == i).Id);
                        recordbytes = recordbytes.Concat(relation);
                    }


                    CopyTable.Add(id, recordbytes.ToArray());

                    if (Copies.ContainsKey(id))
                    {
                        foreach (var copy in Copies[id])
                        {
                            CopyTable.Add(copy, BitConverter.GetBytes(copy).Concat(data).ToArray());
                        }
                    }
                }
                else
                {
                    bitStream.Seek(i * RecordSize, 0);
                    int idOffset = 0;

                    List <byte> data = new List <byte>();

                    if (HasIndexTable)
                    {
                        id = m_indexes[i];
                        data.AddRange(BitConverter.GetBytes(id));
                    }

                    for (int f = 0; f < FieldCount; f++)
                    {
                        int  bitOffset   = ColumnMeta[f].BitOffset;
                        int  bitWidth    = ColumnMeta[f].BitWidth;
                        int  cardinality = ColumnMeta[f].Cardinality;
                        uint palletIndex;

                        switch (ColumnMeta[f].CompressionType)
                        {
                        case CompressionType.None:
                            int bitSize = FieldStructure[f].BitCount;
                            if (!HasIndexTable && f == IdIndex)
                            {
                                idOffset = data.Count;
                                id       = (int)bitStream.ReadUInt32(bitSize);                                   // always read Ids as ints
                                data.AddRange(BitConverter.GetBytes(id));
                            }
                            else
                            {
                                for (int x = 0; x < ColumnMeta[f].ArraySize; x++)
                                {
                                    data.AddRange(bitStream.ReadBytesPadded(bitSize));
                                }
                            }
                            break;

                        case CompressionType.Immediate:
                            if (!HasIndexTable && f == IdIndex)
                            {
                                idOffset = data.Count;
                                id       = (int)bitStream.ReadUInt32(bitWidth);                                   // always read Ids as ints
                                data.AddRange(BitConverter.GetBytes(id));
                                continue;
                            }
                            else
                            {
                                data.AddRange(bitStream.ReadBytesPadded(bitWidth));
                            }
                            break;

                        case CompressionType.Sparse:
                            if (ColumnMeta[f].SparseValues.TryGetValue(id, out byte[] valBytes))