Beispiel #1
0
        public new Dictionary <int, byte[]> ReadOffsetData(BinaryReader dbReader, long pos)
        {
            Dictionary <int, byte[]>          CopyTable  = new Dictionary <int, byte[]>();
            List <Tuple <int, short> >        offsetmap  = new List <Tuple <int, short> >();
            Dictionary <int, OffsetDuplicate> firstindex = new Dictionary <int, OffsetDuplicate>();

            long commonDataTablePos = dbReader.BaseStream.Length - CommonDataTableSize;
            long copyTablePos       = commonDataTablePos - CopyTableSize;
            long indexTablePos      = copyTablePos - (HasIndexTable ? RecordCount * 4 : 0);

            int[] m_indexes = null;

            //Offset Map
            if (HasOffsetTable)
            {
                // Records table
                dbReader.Scrub(StringBlockSize);

                for (int i = 0; i < (MaxId - MinId + 1); i++)
                {
                    int   offset = dbReader.ReadInt32();
                    short length = dbReader.ReadInt16();

                    if (offset == 0 || length == 0)
                    {
                        continue;
                    }

                    //Special case, may contain duplicates in the offset map that we don't want
                    if (CopyTableSize == 0)
                    {
                        if (!firstindex.ContainsKey(offset))
                        {
                            firstindex.Add(offset, new OffsetDuplicate(offsetmap.Count, firstindex.Count));
                        }
                        else
                        {
                            OffsetDuplicates.Add(MinId + i, firstindex[offset].VisibleIndex);
                        }
                    }

                    offsetmap.Add(new Tuple <int, short>(offset, length));
                }
            }

            //Index table
            if (HasIndexTable)
            {
                //Offset map alone reads straight into this others may not
                if (!HasOffsetTable || HasSecondIndex)
                {
                    dbReader.Scrub(indexTablePos);
                }

                m_indexes = new int[RecordCount];
                for (int i = 0; i < RecordCount; i++)
                {
                    m_indexes[i] = dbReader.ReadInt32();
                }
            }

            //Extract record data
            for (int i = 0; i < Math.Max(RecordCount, offsetmap.Count); i++)
            {
                if (HasOffsetTable)
                {
                    int id  = m_indexes[CopyTable.Count];
                    var map = offsetmap[i];

                    if (CopyTableSize == 0 && firstindex[map.Item1].HiddenIndex != i) //Ignore duplicates
                    {
                        continue;
                    }

                    dbReader.Scrub(map.Item1);

                    IEnumerable <byte> recordbytes = BitConverter.GetBytes(id)
                                                     .Concat(dbReader.ReadBytes(map.Item2));

                    CopyTable.Add(id, recordbytes.ToArray());
                }
                else
                {
                    dbReader.Scrub(pos + i * RecordSize);
                    byte[] recordbytes = dbReader.ReadBytes((int)RecordSize).ToArray();

                    if (HasIndexTable)
                    {
                        IEnumerable <byte> newrecordbytes = BitConverter.GetBytes(m_indexes[i]).Concat(recordbytes);
                        CopyTable.Add(m_indexes[i], newrecordbytes.ToArray());
                    }
                    else
                    {
                        int bytecount = FieldStructure[IdIndex].ByteCount;
                        int offset    = FieldStructure[IdIndex].Offset;

                        int id = 0;
                        for (int j = 0; j < bytecount; j++)
                        {
                            id |= (recordbytes[offset + j] << (j * 8));
                        }

                        CopyTable.Add(id, recordbytes);
                    }
                }
            }

            //CopyTable
            if (CopyTableSize != 0 && copyTablePos != dbReader.BaseStream.Length)
            {
                dbReader.Scrub(copyTablePos);
                while (dbReader.BaseStream.Position != dbReader.BaseStream.Length)
                {
                    int id     = dbReader.ReadInt32();
                    int idcopy = dbReader.ReadInt32();

                    byte[] copyRow = CopyTable[idcopy];
                    byte[] newRow  = new byte[copyRow.Length];
                    Array.Copy(copyRow, newRow, newRow.Length);
                    Array.Copy(BitConverter.GetBytes(id), newRow, sizeof(int));

                    CopyTable.Add(id, newRow);
                }
            }

            //CommonDataTable
            if (CommonDataTableSize > 0)
            {
                dbReader.Scrub(commonDataTablePos);
                int columncount = dbReader.ReadInt32();

                var commondatalookup = new Dictionary <int, byte[]> [columncount];

                //Initial Data extraction
                for (int i = 0; i < columncount; i++)
                {
                    int   count = dbReader.ReadInt32();
                    byte  type  = dbReader.ReadByte();
                    short bit   = CommonDataBits[type];
                    int   size  = (32 - bit) >> 3;

                    commondatalookup[i] = new Dictionary <int, byte[]>();

                    //New field not defined in header
                    if (i > FieldStructure.Count - 1)
                    {
                        var offset = (ushort)((FieldStructure.Count == 0 ? 0 : FieldStructure[i - 1].Offset + FieldStructure[i - 1].ByteCount));
                        FieldStructure.Add(new FieldStructureEntry(bit, offset, type));

                        if (FieldStructure.Count > 1)
                        {
                            FieldStructure[i - 1].SetLength(FieldStructure[i]);
                        }
                    }

                    for (int x = 0; x < count; x++)
                    {
                        commondatalookup[i].Add(dbReader.ReadInt32(), dbReader.ReadBytes(size));
                    }
                }

                var ids = CopyTable.Keys.ToArray();
                foreach (var id in ids)
                {
                    for (int i = 0; i < commondatalookup.Length; i++)
                    {
                        if (!FieldStructure[i].CommonDataColumn)
                        {
                            continue;
                        }

                        var col          = commondatalookup[i];
                        var defaultValue = TableStructure?.Fields?[i]?.DefaultValue;
                        defaultValue = string.IsNullOrEmpty(defaultValue) ? "0" : defaultValue;

                        var field    = FieldStructure[i];
                        var zeroData = new byte[field.ByteCount];
                        if (defaultValue != "0")
                        {
                            switch (field.CommonDataType)
                            {
                            case 1:
                                zeroData = BitConverter.GetBytes(ushort.Parse(defaultValue));
                                break;

                            case 2:
                                zeroData = new[] { byte.Parse(defaultValue) };
                                break;

                            case 3:
                                zeroData = BitConverter.GetBytes(float.Parse(defaultValue));
                                break;

                            case 4:
                                zeroData = BitConverter.GetBytes(int.Parse(defaultValue));
                                break;
                            }
                        }

                        byte[] currentData = CopyTable[id];
                        byte[] data        = col.ContainsKey(id) ? col[id] : zeroData;
                        Array.Resize(ref currentData, currentData.Length + data.Length);
                        Array.Copy(data, 0, currentData, field.Offset, data.Length);
                        CopyTable[id] = currentData;
                    }
                }

                commondatalookup = null;
                RawRecordSize    = (uint)CopyTable.Values.First().Length;
            }

            return(CopyTable);
        }
Beispiel #2
0
        public new Dictionary <int, byte[]> ReadOffsetData(BinaryReader dbReader, long pos)
        {
            Dictionary <int, byte[]>          CopyTable  = new Dictionary <int, byte[]>();
            List <Tuple <int, short> >        offsetmap  = new List <Tuple <int, short> >();
            Dictionary <int, OffsetDuplicate> firstindex = new Dictionary <int, OffsetDuplicate>();
            Dictionary <int, List <int> >     Copies     = new Dictionary <int, List <int> >();

            columnOffsets = new List <int>();
            recordOffsets = new List <int>();
            int[] m_indexes = null;

            // OffsetTable
            if (HasOffsetTable && OffsetTableOffset > 0)
            {
                dbReader.BaseStream.Position = OffsetTableOffset;
                for (int i = 0; i < (MaxId - MinId + 1); i++)
                {
                    int   offset = dbReader.ReadInt32();
                    short length = dbReader.ReadInt16();

                    if (offset == 0 || length == 0)
                    {
                        continue;
                    }

                    // special case, may contain duplicates in the offset map that we don't want
                    if (CopyTableSize == 0)
                    {
                        if (!firstindex.ContainsKey(offset))
                        {
                            firstindex.Add(offset, new OffsetDuplicate(offsetmap.Count, firstindex.Count));
                        }
                        else
                        {
                            continue;
                        }
                    }

                    offsetmap.Add(new Tuple <int, short>(offset, length));
                }
            }

            // IndexTable
            if (HasIndexTable)
            {
                m_indexes = new int[RecordCount];
                for (int i = 0; i < RecordCount; i++)
                {
                    m_indexes[i] = dbReader.ReadInt32();
                }
            }

            // Copytable
            if (CopyTableSize > 0)
            {
                long end = dbReader.BaseStream.Position + CopyTableSize;
                while (dbReader.BaseStream.Position < end)
                {
                    int id     = dbReader.ReadInt32();
                    int idcopy = dbReader.ReadInt32();

                    if (!Copies.ContainsKey(idcopy))
                    {
                        Copies.Add(idcopy, new List <int>());
                    }

                    Copies[idcopy].Add(id);
                }
            }

            // Relationships
            if (RelationshipDataSize > 0)
            {
                RelationShipData = new RelationShipData()
                {
                    Records = dbReader.ReadUInt32(),
                    MinId   = dbReader.ReadUInt32(),
                    MaxId   = dbReader.ReadUInt32(),
                    Entries = new Dictionary <uint, byte[]>()
                };

                for (int i = 0; i < RelationShipData.Records; i++)
                {
                    byte[] foreignKey = dbReader.ReadBytes(4);
                    uint   index      = dbReader.ReadUInt32();
                    // has duplicates just like the copy table does... why?
                    if (!RelationShipData.Entries.ContainsKey(index))
                    {
                        RelationShipData.Entries.Add(index, foreignKey);
                    }
                }

                FieldStructure.Add(new FieldStructureEntry(0, 0));
                ColumnMeta.Add(new ColumnStructureEntry());
            }

            // Record Data
            BitStream bitStream = new BitStream(recordData);

            for (int i = 0; i < RecordCount; i++)
            {
                int id = 0;

                if (HasOffsetTable && HasIndexTable)
                {
                    id = m_indexes[CopyTable.Count];
                    var map = offsetmap[i];

                    if (CopyTableSize == 0 && firstindex[map.Item1].HiddenIndex != i)                     //Ignore duplicates
                    {
                        continue;
                    }

                    dbReader.BaseStream.Position = map.Item1;

                    byte[] data = dbReader.ReadBytes(map.Item2);

                    IEnumerable <byte> recordbytes = BitConverter.GetBytes(id).Concat(data);

                    // append relationship id
                    if (RelationShipData != null)
                    {
                        // seen cases of missing indicies
                        if (RelationShipData.Entries.TryGetValue((uint)i, out byte[] foreignData))
Beispiel #3
0
        public new Dictionary <int, byte[]> ReadOffsetData(BinaryReader dbReader, long pos)
        {
            var CopyTable        = new Dictionary <int, byte[]>();
            var offsetmap        = new List <Tuple <int, short> >();
            var firstindex       = new Dictionary <int, OffsetDuplicate>();
            var OffsetDuplicates = new Dictionary <int, int>();
            var Copies           = new Dictionary <int, List <int> >();

            int[] m_indexes = null;

            // OffsetTable
            if (HasOffsetTable && OffsetTableOffset > 0)
            {
                dbReader.BaseStream.Position = OffsetTableOffset;
                for (var i = 0; i < MaxId - MinId + 1; i++)
                {
                    int   offset = dbReader.ReadInt32();
                    short length = dbReader.ReadInt16();

                    if (offset == 0 || length == 0)
                    {
                        continue;
                    }

                    // special case, may contain duplicates in the offset map that we don't want
                    if (CopyTableSize == 0)
                    {
                        if (!firstindex.ContainsKey(offset))
                        {
                            firstindex.Add(offset, new OffsetDuplicate(offsetmap.Count, firstindex.Count));
                        }
                        else
                        {
                            OffsetDuplicates.Add(MinId + i, firstindex[offset].VisibleIndex);
                            continue;
                        }
                    }

                    offsetmap.Add(new Tuple <int, short>(offset, length));
                }
            }

            // IndexTable
            if (HasIndexTable)
            {
                m_indexes = new int[RecordCount];
                for (var i = 0; i < RecordCount; i++)
                {
                    m_indexes[i] = dbReader.ReadInt32();
                }
            }

            // Copytable
            if (CopyTableSize > 0)
            {
                long end = dbReader.BaseStream.Position + CopyTableSize;
                while (dbReader.BaseStream.Position < end)
                {
                    int id     = dbReader.ReadInt32();
                    int idcopy = dbReader.ReadInt32();

                    if (!Copies.ContainsKey(idcopy))
                    {
                        Copies.Add(idcopy, new List <int>());
                    }

                    Copies[idcopy].Add(id);
                }
            }

            // ColumnMeta
            ColumnMeta = new List <ColumnStructureEntry>();
            for (var i = 0; i < FieldCount; i++)
            {
                ColumnStructureEntry column = new ColumnStructureEntry
                {
                    RecordOffset       = dbReader.ReadUInt16(),
                    Size               = dbReader.ReadUInt16(),
                    AdditionalDataSize = dbReader.ReadUInt32(), // size of pallet / sparse values
                    CompressionType    = (CompressionType)dbReader.ReadUInt32(),
                    BitOffset          = dbReader.ReadInt32(),
                    BitWidth           = dbReader.ReadInt32(),
                    Cardinality        = dbReader.ReadInt32()
                };

                // preload arraysizes
                if (column.CompressionType == CompressionType.None)
                {
                    column.ArraySize = Math.Max(column.Size / FieldStructure[i].BitCount, 1);
                }
                else if (column.CompressionType == CompressionType.PalletArray)
                {
                    column.ArraySize = Math.Max(column.Cardinality, 1);
                }

                ColumnMeta.Add(column);
            }

            // Pallet values
            for (var i = 0; i < ColumnMeta.Count; i++)
            {
                if (ColumnMeta[i].CompressionType == CompressionType.Pallet ||
                    ColumnMeta[i].CompressionType == CompressionType.PalletArray)
                {
                    int elements    = (int)ColumnMeta[i].AdditionalDataSize / 4;
                    int cardinality = Math.Max(ColumnMeta[i].Cardinality, 1);

                    ColumnMeta[i].PalletValues = new List <byte[]>();
                    for (var j = 0; j < elements / cardinality; j++)
                    {
                        ColumnMeta[i].PalletValues.Add(dbReader.ReadBytes(cardinality * 4));
                    }
                }
            }

            // Sparse values
            for (var i = 0; i < ColumnMeta.Count; i++)
            {
                if (ColumnMeta[i].CompressionType == CompressionType.Sparse)
                {
                    ColumnMeta[i].SparseValues = new Dictionary <int, byte[]>();
                    for (var j = 0; j < ColumnMeta[i].AdditionalDataSize / 8; j++)
                    {
                        ColumnMeta[i].SparseValues[dbReader.ReadInt32()] = dbReader.ReadBytes(4);
                    }
                }
            }

            // Relationships
            if (RelationshipDataSize > 0)
            {
                RelationShipData = new RelationShipData
                {
                    Records = dbReader.ReadUInt32(),
                    MinId   = dbReader.ReadUInt32(),
                    MaxId   = dbReader.ReadUInt32(),
                    Entries = new Dictionary <uint, byte[]>()
                };

                for (var i = 0; i < RelationShipData.Records; i++)
                {
                    byte[] foreignKey = dbReader.ReadBytes(4);
                    uint   index      = dbReader.ReadUInt32();
                    // has duplicates just like the copy table does... why?
                    if (!RelationShipData.Entries.ContainsKey(index))
                    {
                        RelationShipData.Entries.Add(index, foreignKey);
                    }
                }

                FieldStructure.Add(new FieldStructureEntry(0, 0));
                ColumnMeta.Add(new ColumnStructureEntry());
            }

            // Record Data
            BitStream bitStream = new(recordData);

            for (var i = 0; i < RecordCount; i++)
            {
                var id = 0;

                if (HasOffsetTable && HasIndexTable)
                {
                    id = m_indexes[CopyTable.Count];
                    var map = offsetmap[i];

                    if (CopyTableSize == 0 && firstindex[map.Item1].HiddenIndex != i) //Ignore duplicates
                    {
                        continue;
                    }

                    dbReader.BaseStream.Position = map.Item1;

                    byte[] data = dbReader.ReadBytes(map.Item2);

                    var recordbytes = BitConverter.GetBytes(id).Concat(data);

                    // append relationship id
                    if (RelationShipData != null)
                    {
                        // seen cases of missing indicies
                        if (RelationShipData.Entries.TryGetValue((uint)i, out byte[] foreignData))
Beispiel #4
0
        public new Dictionary <int, byte[]> ReadOffsetData(BinaryReader dbReader, long pos)
        {
            Dictionary <int, byte[]>          CopyTable        = new Dictionary <int, byte[]>();
            List <Tuple <int, short> >        offsetmap        = new List <Tuple <int, short> >();
            Dictionary <int, OffsetDuplicate> firstindex       = new Dictionary <int, OffsetDuplicate>();
            Dictionary <int, int>             OffsetDuplicates = new Dictionary <int, int>();
            Dictionary <int, List <int> >     Copies           = new Dictionary <int, List <int> >();

            int[] m_indexes = null;

            // OffsetTable
            if (HasOffsetTable && OffsetTableOffset > 0)
            {
                dbReader.BaseStream.Position = OffsetTableOffset;
                for (int i = 0; i < (MaxId - MinId + 1); i++)
                {
                    int   offset = dbReader.ReadInt32();
                    short length = dbReader.ReadInt16();

                    if (offset == 0 || length == 0)
                    {
                        continue;
                    }

                    // special case, may contain duplicates in the offset map that we don't want
                    if (CopyTableSize == 0)
                    {
                        if (!firstindex.ContainsKey(offset))
                        {
                            firstindex.Add(offset, new OffsetDuplicate(offsetmap.Count, firstindex.Count));
                        }
                        else
                        {
                            OffsetDuplicates.Add(MinId + i, firstindex[offset].VisibleIndex);
                        }
                    }

                    offsetmap.Add(new Tuple <int, short>(offset, length));
                }
            }

            // IndexTable
            if (HasIndexTable)
            {
                m_indexes = new int[RecordCount];
                for (int i = 0; i < RecordCount; i++)
                {
                    m_indexes[i] = dbReader.ReadInt32();
                }
            }

            // Copytable
            if (CopyTableSize > 0)
            {
                long end = dbReader.BaseStream.Position + CopyTableSize;
                while (dbReader.BaseStream.Position < end)
                {
                    int id     = dbReader.ReadInt32();
                    int idcopy = dbReader.ReadInt32();

                    if (!Copies.ContainsKey(idcopy))
                    {
                        Copies.Add(idcopy, new List <int>());
                    }

                    Copies[idcopy].Add(id);
                }
            }

            // ColumnMeta
            ColumnMeta = new List <ColumnStructureEntry>();
            for (int i = 0; i < FieldCount; i++)
            {
                var column = new ColumnStructureEntry()
                {
                    RecordOffset       = dbReader.ReadUInt16(),
                    Size               = dbReader.ReadUInt16(),
                    AdditionalDataSize = dbReader.ReadUInt32(),                     // size of pallet / sparse values
                    CompressionType    = (CompressionType)dbReader.ReadUInt32(),
                    BitOffset          = dbReader.ReadInt32(),
                    BitWidth           = dbReader.ReadInt32(),
                    Cardinality        = dbReader.ReadInt32()
                };

                // preload arraysizes
                if (column.CompressionType == CompressionType.None)
                {
                    column.ArraySize = Math.Max(column.Size / FieldStructure[i].BitCount, 1);
                }
                else if (column.CompressionType == CompressionType.PalletArray)
                {
                    column.ArraySize = Math.Max(column.Cardinality, 1);
                }

                ColumnMeta.Add(column);
            }

            // Pallet values
            for (int i = 0; i < ColumnMeta.Count; i++)
            {
                if (ColumnMeta[i].CompressionType == CompressionType.Pallet || ColumnMeta[i].CompressionType == CompressionType.PalletArray)
                {
                    int elements    = (int)ColumnMeta[i].AdditionalDataSize / 4;
                    int cardinality = Math.Max(ColumnMeta[i].Cardinality, 1);

                    ColumnMeta[i].PalletValues = new List <byte[]>();
                    for (int j = 0; j < elements / cardinality; j++)
                    {
                        ColumnMeta[i].PalletValues.Add(dbReader.ReadBytes(cardinality * 4));
                    }
                }
            }

            // Sparse values
            for (int i = 0; i < ColumnMeta.Count; i++)
            {
                if (ColumnMeta[i].CompressionType == CompressionType.Sparse)
                {
                    ColumnMeta[i].SparseValues = new Dictionary <int, byte[]>();
                    for (int j = 0; j < ColumnMeta[i].AdditionalDataSize / 8; j++)
                    {
                        ColumnMeta[i].SparseValues[dbReader.ReadInt32()] = dbReader.ReadBytes(4);
                    }
                }
            }

            // Relationships
            if (RelationshipDataSize > 0)
            {
                RelationShipData = new RelationShipData()
                {
                    Records = dbReader.ReadUInt32(),
                    MinId   = dbReader.ReadUInt32(),
                    MaxId   = dbReader.ReadUInt32(),
                    Entries = new List <RelationShipEntry>()
                };

                for (int i = 0; i < RelationShipData.Records; i++)
                {
                    RelationShipData.Entries.Add(new RelationShipEntry(dbReader.ReadUInt32(), dbReader.ReadUInt32()));
                }

                FieldStructure.Add(new FieldStructureEntry(0, 0));
                ColumnMeta.Add(new ColumnStructureEntry());
            }

            // Record Data
            BitStream bitStream = new BitStream(recordData);

            for (int i = 0; i < RecordCount; i++)
            {
                int id = 0;

                if (HasOffsetTable && HasIndexTable)
                {
                    id = m_indexes[CopyTable.Count];
                    var map = offsetmap[i];

                    if (CopyTableSize == 0 && firstindex[map.Item1].HiddenIndex != i)                     // ignore duplicates
                    {
                        continue;
                    }

                    dbReader.BaseStream.Position = map.Item1;

                    byte[] data = dbReader.ReadBytes(map.Item2);

                    IEnumerable <byte> recordbytes = BitConverter.GetBytes(id).Concat(data);
                    if (RelationShipData != null)
                    {
                        byte[] relation = BitConverter.GetBytes(RelationShipData.Entries.First(x => x.Index == i).Id);
                        recordbytes = recordbytes.Concat(relation);
                    }


                    CopyTable.Add(id, recordbytes.ToArray());

                    if (Copies.ContainsKey(id))
                    {
                        foreach (var copy in Copies[id])
                        {
                            CopyTable.Add(copy, BitConverter.GetBytes(copy).Concat(data).ToArray());
                        }
                    }
                }
                else
                {
                    bitStream.Seek(i * RecordSize, 0);
                    int idOffset = 0;

                    List <byte> data = new List <byte>();

                    if (HasIndexTable)
                    {
                        id = m_indexes[i];
                        data.AddRange(BitConverter.GetBytes(id));
                    }

                    for (int f = 0; f < FieldCount; f++)
                    {
                        int  bitOffset   = ColumnMeta[f].BitOffset;
                        int  bitWidth    = ColumnMeta[f].BitWidth;
                        int  cardinality = ColumnMeta[f].Cardinality;
                        uint palletIndex;

                        switch (ColumnMeta[f].CompressionType)
                        {
                        case CompressionType.None:
                            int bitSize = FieldStructure[f].BitCount;
                            if (!HasIndexTable && f == IdIndex)
                            {
                                idOffset = data.Count;
                                id       = (int)bitStream.ReadUInt32(bitSize);                                   // always read Ids as ints
                                data.AddRange(BitConverter.GetBytes(id));
                            }
                            else
                            {
                                for (int x = 0; x < ColumnMeta[f].ArraySize; x++)
                                {
                                    data.AddRange(bitStream.ReadBytesPadded(bitSize));
                                }
                            }
                            break;

                        case CompressionType.Immediate:
                            if (!HasIndexTable && f == IdIndex)
                            {
                                idOffset = data.Count;
                                id       = (int)bitStream.ReadUInt32(bitWidth);                                   // always read Ids as ints
                                data.AddRange(BitConverter.GetBytes(id));
                                continue;
                            }
                            else
                            {
                                data.AddRange(bitStream.ReadBytesPadded(bitWidth));
                            }
                            break;

                        case CompressionType.Sparse:
                            if (ColumnMeta[f].SparseValues.TryGetValue(id, out byte[] valBytes))