Example #1
0
 public MapBlock( CompressedBlock compressed, AdjacencyTable adjacent )
 {
     //owners = null;
     //tree = null;
     //leafs = null;
     Decompress( compressed, adjacent );
 }
Example #2
0
        public void Decompress( CompressedBlock compressed, AdjacencyTable adjacent )
        {
            int index = 0;

            // Decompress the ID table from the data stream
            DecompressIdTable( compressed.Data, ref index, adjacent );

            // Decompress the tree from the data stream
            int leafcount = DecompressTree( compressed.Data, ref index );
            // Create the leaf array
            this.leafs = new NodeData[leafcount + (leafcount % 8 == 0 ? 0 : 8-(leafcount % 8))];

            // Decompress the ownership table next
            DecompressOwnership( compressed.Data, ref index );

            // Finally, read the color data, which contains the "color" data for each leaf.
            DecompressColorInfo( compressed.Data, ref index );
        }
Example #3
0
 public MapBlock( CompressedBlock compressed, AdjacencyTable adjacent )
 {
     Decompress( compressed, adjacent );
 }
Example #4
0
        public void Decompress( CompressedBlock compressed, AdjacencyTable adjacent )
        {
            int index = 0;

            // Decompress the ID table from the data stream
            Pixel[] owners;
            int ownercount = DecompressIdTable( compressed.Data, ref index, adjacent, out owners );

            // Decompress the tree from the data stream
            tree = DecompressTree( compressed.Data, ref index );
            int leafcount = tree.CalcLeafCount();
            // Create the leaf array
            Pixel[] leafs = new Pixel[leafcount + (leafcount % 8 == 0 ? 0 : 8-(leafcount % 8))];

            // Decompress the ownership table next
            DecompressOwnership( compressed.Data, ref index, leafs, leafcount, owners, ownercount );

            // Finally, read the color data, which contains the "color" data for each leaf.
            DecompressColorInfo( compressed.Data, ref index, leafs, leafcount );

            // Assign the leaf data to the nodes
            int leafIndex = 0;
            PopulateNode( tree, leafs, ref leafIndex );
        }
Example #5
0
        public static ushort[] GetRawIDTable( CompressedBlock compressed )
        {
            byte[] data = compressed.Data;

            // -- Get a list of province ids
            int idCount = -1;
            int index = 0;
            ushort[] idTable = new ushort[IdTableSize];
            do {
                ++idCount;
                idTable[idCount] = (ushort)(data[index] + ((data[index+1] & 127) << 8));
                index += 2;
            } while ( data[index-1] < Terminator );
            ++idCount;

            ushort[] result = new ushort[idCount];
            Array.Copy( idTable, 0, result, 0, idCount );
            return result;
        }
Example #6
0
        public void ReadFrom2( BinaryReader reader )
        {
            // Calculate the number of blocks to read. This depends on the zoomlevel.
            int blockcount = ((BaseWidth >> (5+zoom)) * (BaseHeight >> (BlockFactor+zoom)));

            int[] offsets = new int[blockcount+1];
            for ( int i=0; i<=blockcount; ++i ) {
                offsets[i] = reader.ReadByte() + (reader.ReadByte()<<8) + (reader.ReadByte()<<16);
            }

            // Read last offset (size), which we don't need
            reader.ReadByte();

            // Read all the remaining data into a buffer
            MemoryStream stream = new MemoryStream( blocks.Length * 6 );

            byte[] buffer = new byte[4096];
            int count = 0;
            while ( (count = reader.Read( buffer, 0, 4096 )) > 0 ) {
                stream.Write( buffer, 0, count );
            }

            // Okay, we now have a memorystream were we can search in
            blocks = new GenericMapBlock[blockcount];
            for ( int i=0; i<blockcount; ++i ) {
                blocks[i] = new CompressedBlock( offsets[i], offsets[i+1], stream );
            }
            stream.Close();
        }
Example #7
0
        public void ReadFrom( BinaryReader reader )
        {
            // Calculate the number of blocks to read. This depends on the zoomlevel.
            int blockcount = ((BaseWidth >> (5+zoom)) * (BaseHeight >> (BlockFactor+zoom)));

            blocks = new GenericMapBlock[blockcount];
            int offsetStart = 0, offsetEnd = 0;
            int baseOffset = (int)reader.BaseStream.Position + (blockcount+1) * 3 + 1;
            for ( int i=0; i<blockcount; ++i ) {
                if ( i == 0 ) {
                    offsetStart = reader.ReadByte() + (reader.ReadByte()<<8) + (reader.ReadByte()<<16);
                    offsetEnd = reader.ReadByte() + (reader.ReadByte()<<8) + (reader.ReadByte()<<16);
                }
                else {
                    offsetStart = offsetEnd;
                    offsetEnd = reader.ReadByte() + (reader.ReadByte()<<8) + (reader.ReadByte()<<16);
                }

                blocks[i] = new CompressedBlock( baseOffset+offsetStart, offsetEnd-offsetStart );      // Defer reading the block data till later.
            }

            // Read another byte. The last offset is stored as a 4-byte integer, but only 3 bytes where read in the loop before.
            reader.ReadByte();

            // Read all the actual data afterwards
            for ( int i=0; i<blockcount; ++i ) {
                ((CompressedBlock)blocks[i]).ReadFrom( reader );
            }
        }
Example #8
0
 public RawImage DecodeBlockImage( CompressedBlock block, Point location )
 {
     return DecodeBlockImage( new MapBlock( block, adjacent ), location );
 }
Example #9
0
 public virtual bool Equals( CompressedBlock other )
 {
     return Compare( other ) == CompareResult.Equal;
 }
Example #10
0
        public virtual CompareResult Compare( CompressedBlock other )
        {
            if ( data.Length != other.data.Length ) return CompareResult.SizeMismatch;

            for ( int i=data.Length-1; i>=0; --i ) {
                if ( data[i] != other.data[i] ) return CompareResult.DataMismatch;
            }

            return CompareResult.Equal;
        }