Example #1
0
        public void Write(UnityBinaryWriter writer)
        {
            writer.WriteInt(ClassID);
            writer.WriteByte((byte)(IsStrippedType ? 1 : 0));
            writer.WriteShort(ScriptTypeIndex);

            if (ClassID == (int)ClassIDType.MonoBehaviour)
            {
                writer.WriteBytes(ScriptID, 0, 16);
            }
            writer.WriteBytes(OldTypeHash, 0, 16);
        }
 public void Write(UnityBinaryWriter writer)
 {
     writer.WriteStringToNull("");
     writer.WriteBytes(Guid.ToByteArray());
     writer.WriteInt(Type);
     writer.WriteStringToNull(PathName);
 }
Example #3
0
        public void Write(UnityBinaryWriter writer)
        {
            // Skip header since strtable_length is unknown
            int header_position = writer.Position;

            writer.Position += 8;

            StringTableBuilder strtable = new StringTableBuilder();

            // Write Nodes
            for (int i = 0; i < Nodes.Length; i++)
            {
                writer.WriteUShort(Nodes[i].Version);
                writer.WriteByte(Nodes[i].Level);
                writer.WriteByte((byte)(Nodes[i].IsArray ? 1 : 0));

                // Write TypeName
                int TypeNameOffset = GetCommonStringID(Nodes[i].Type);
                if (TypeNameOffset == -1)  // Not a common string
                {
                    writer.WriteUShort(strtable.AddString(Nodes[i].Type));
                    writer.WriteUShort(0);
                }
                else
                {
                    writer.WriteUShort((ushort)TypeNameOffset);
                    writer.WriteUShort(0x8000);
                }

                // Write Name
                int NameOffset = GetCommonStringID(Nodes[i].Name);
                if (NameOffset == -1)   // Not a common string
                {
                    writer.WriteUShort(strtable.AddString(Nodes[i].Name));
                    writer.WriteUShort(0);
                }
                else
                {
                    writer.WriteUShort((ushort)NameOffset);
                    writer.WriteUShort(0x8000);
                }

                writer.WriteInt(Nodes[i].ByteSize);
                writer.WriteInt(Nodes[i].Index);
                writer.WriteInt(Nodes[i].MetaFlag);
            }

            // Write StringTable
            byte[] strtable_bytes = strtable.ToBytes();
            writer.WriteBytes(strtable_bytes);

            // Write node_count and strtable_length
            int final_pos = writer.Position;

            writer.Position = header_position;
            writer.WriteInt(Nodes.Length);
            writer.WriteInt(strtable_bytes.Length);
            writer.Position = final_pos;
        }
Example #4
0
 public void Write(UnityBinaryWriter writer)
 {
     writer.WriteIntBE(MetadataSize);
     writer.WriteIntBE(FileSize);
     writer.WriteIntBE(Version);
     writer.WriteIntBE(DataOffset);
     writer.WriteByte((byte)(IsBigEndian ? 1 : 0));
     writer.WriteBytes(Reserved, 0, 3);
 }
Example #5
0
        public void Write(UnityBinaryWriter writer)
        {
            // Skip Header since MetadataSize and DataOffset are unknown
            int header_pos = writer.Position;

            writer.Position += Header.CalcSize();

            // Write Metadata
            writeMetadata(writer);

            // Write Objects
            byte[] body = writeObjects(writer);

            // Write Scripts
            writeScripts(writer);

            // Write Externals
            writeExternals(writer);

            // Write UserInformation
            writer.WriteStringToNull(UserInformation);

            Header.MetadataSize = writer.Position - Header.CalcSize();

            // Align body
            if (writer.Position < 0x1000)
            {
                writer.Position = 0x1000;
            }
            else
            {
                writer.Align(16);
            }
            Header.DataOffset = writer.Position;

            // Write body
            writer.WriteBytes(body);

            // Write Header
            Header.FileSize = writer.Position;
            writer.Position = header_pos;
            Header.Write(writer);
        }
Example #6
0
        private byte[] writeObjects(UnityBinaryWriter writer)
        {
            writer.WriteInt(Objects.Length);
            UnityBinaryWriter objectwriter = new UnityBinaryWriter();

            for (int i = 0; i < Objects.Length; i++)
            {
                // objects alignment is 8byte
                writer.Align(4);
                writer.WriteLong(Objects[i].PathID);
                objectwriter.Align(8);
                writer.WriteInt(objectwriter.Position);
                writer.WriteInt(Objects[i].Data.Length);
                writer.WriteInt(Objects[i].TypeID);

                objectwriter.WriteBytes(Objects[i].Data);
            }

            // return body
            return(objectwriter.ToBytes());
        }
Example #7
0
        private void writeFiles(UnityBinaryWriter writer)
        {
            // calc total size
            int totalsize = Files.Sum(f => f.Data.Length);

            // teardown into blocks
            // 1.File is not aligned to block boundary (Simple concatation)
            // 2.Maximum block size is BLOCK_SIZE

            // Calculate block count
            var totalbytes = Files.Sum(f => f.Data.Length);
            var blockcount = totalbytes / BLOCK_SIZE + (totalbytes % BLOCK_SIZE != 0 ? 1 : 0);

            // Build blockinfo
            BlockInfo[] blockinfos = new BlockInfo[blockcount];
            short       blockflag  = EnableCompression ? (short)2 : (short)0;

            for (int i = 0; i < blockcount; i++)
            {
                blockinfos[i].uncompressedSize = BLOCK_SIZE;
                blockinfos[i].compressedSize   = BLOCK_SIZE;
                blockinfos[i].flag             = blockflag;
            }
            if (totalbytes % BLOCK_SIZE != 0)
            {
                blockinfos[blockcount - 1].uncompressedSize = totalbytes % BLOCK_SIZE;
                blockinfos[blockcount - 1].compressedSize   = totalbytes % BLOCK_SIZE;
            }

            // Seek Writer (Skip Info)
            //int infoheadersize = 4 + 4 + 4;
            int blockinfosize = 0x10 + 4 + (4 + 4 + 2) * blockinfos.Length;
            int fileinfosize  = 4 + Files.Sum(f => f.CalcInfoSize());
            int info_offset   = writer.Position;

            // Write Blocks

            // If no compression required, just copy all files
            if (!EnableCompression)
            {
                // Write Header

                // Info Header
                writer.WriteIntBE(blockinfosize + fileinfosize);
                writer.WriteIntBE(blockinfosize + fileinfosize);
                writer.WriteIntBE(0x40);
                writer.Position += 0x10;
                // BlockInfo
                writer.WriteIntBE(blockcount);
                blockinfos.Write(writer);
                // FileInfo
                writer.WriteIntBE(Files.Length);
                int curoffset = 0;
                for (int i = 0; i < Files.Length; i++)
                {
                    writer.WriteLongBE(curoffset);
                    writer.WriteLongBE(Files[i].Data.LongLength);
                    writer.WriteIntBE(4);
                    writer.WriteStringToNull(Files[i].Name);
                    curoffset += Files[i].Data.Length;
                }

                // Write Files
                for (int i = 0; i < Files.Length; i++)
                {
                    writer.WriteBytes(Files[i].Data);
                }
            }
            // In compression mode, try to parallelize the compression
            else
            {
                // First of all, Prepare buffer for compression
                byte[] compbuf = MemoryPool <AssetBundleFile> .GetBuffer(blockcount *BLOCK_SIZE);

                // don't parallelize when block count is small
                if (blockcount < 128)
                {
                    byte[] boundarybuf = MiniMemoryPool <AssetBundleFile> .GetBuffer(BLOCK_SIZE);

                    int remainlength = 0;
                    int curblock     = 0;
                    for (int i = 0; i < Files.Length; i++)
                    {
                        // If previous file has overflow, concat and compress
                        if (remainlength > 0)
                        {
                            Buffer.BlockCopy(Files[i].Data, 0, boundarybuf, remainlength, BLOCK_SIZE - remainlength);
                            blockinfos[curblock].compressedSize = TryLZ4Compress(boundarybuf, 0, compbuf, curblock * BLOCK_SIZE, BLOCK_SIZE);
                            if (blockinfos[curblock].compressedSize == BLOCK_SIZE)
                            {
                                blockinfos[curblock].flag &= ~0x3F;
                            }
                            curblock++;
                        }

                        // update remainlength
                        int blockstart = 0;
                        if (remainlength > 0)
                        {
                            blockstart = BLOCK_SIZE - remainlength;
                        }

                        // compress fullblocks
                        int fullblockcount = (Files[i].Data.Length - blockstart) / BLOCK_SIZE;
                        for (int j = 0; j < fullblockcount; j++, curblock++)
                        {
                            blockinfos[curblock].compressedSize = TryLZ4Compress(Files[i].Data, blockstart + j * BLOCK_SIZE, compbuf, curblock * BLOCK_SIZE, BLOCK_SIZE);
                            if (blockinfos[curblock].compressedSize == BLOCK_SIZE)
                            {
                                blockinfos[curblock].flag &= ~0x3F;
                            }
                        }

                        // If the file has remaindata, buffer them
                        remainlength = (Files[i].Data.Length - blockstart) % BLOCK_SIZE;
                        if (remainlength > 0)
                        {
                            Buffer.BlockCopy(Files[i].Data, Files[i].Data.Length - remainlength, boundarybuf, 0, remainlength);
                        }
                    }
                    if (remainlength > 0)  // Process last block
                    {
                        blockinfos[curblock].compressedSize = TryLZ4Compress(boundarybuf, 0, compbuf, curblock * BLOCK_SIZE, remainlength);
                        if (blockinfos[curblock].compressedSize == remainlength)
                        {
                            blockinfos[curblock].flag &= ~0x3F;
                        }
                    }
                }
                else
                {
                    // Create CompressionInfo & Compress file boundary
                    CompressionInfo[] compinfos   = new CompressionInfo[blockcount];
                    byte[]            boundarybuf = MiniMemoryPool <AssetBundleFile> .GetBuffer(BLOCK_SIZE);

                    int curblock     = 0;
                    int remainlength = 0;
                    for (int i = 0; i < Files.Length; i++)
                    {
                        // If previous file has overflow, concat and compress
                        if (remainlength > 0)
                        {
                            Buffer.BlockCopy(Files[i].Data, 0, boundarybuf, remainlength, BLOCK_SIZE - remainlength);
                            blockinfos[curblock].compressedSize = TryLZ4Compress(boundarybuf, 0, compbuf, curblock * BLOCK_SIZE, BLOCK_SIZE);
                            if (blockinfos[curblock].compressedSize == BLOCK_SIZE)
                            {
                                blockinfos[curblock].flag &= ~0x3F;
                            }
                            curblock++;
                        }

                        int blockstart = 0;
                        if (remainlength > 0)
                        {
                            blockstart = BLOCK_SIZE - remainlength;
                        }

                        int fullblockcount = (Files[i].Data.Length - blockstart) / BLOCK_SIZE;
                        for (int j = 0; j < fullblockcount; j++, curblock++)
                        {
                            compinfos[curblock].data   = Files[i].Data;
                            compinfos[curblock].length = BLOCK_SIZE;
                            compinfos[curblock].offset = blockstart + j * BLOCK_SIZE;
                        }

                        // If the file has remaindata, buffer them
                        remainlength = (Files[i].Data.Length - blockstart) % BLOCK_SIZE;
                        if (remainlength > 0)
                        {
                            Buffer.BlockCopy(Files[i].Data, Files[i].Data.Length - remainlength, boundarybuf, 0, remainlength);
                        }
                    }
                    if (remainlength > 0)   // Process last block
                    {
                        blockinfos[curblock].compressedSize =
                            LZ4.LZ4Codec.Encode(boundarybuf, 0, remainlength,
                                                compbuf, curblock * BLOCK_SIZE, remainlength);
                        // If compression is no use, just copy
                        if (blockinfos[curblock].compressedSize == 0)
                        {
                            blockinfos[curblock].compressedSize = remainlength;
                            blockinfos[curblock].flag          &= ~0x3F;
                            Buffer.BlockCopy(boundarybuf, 0, compbuf, curblock * BLOCK_SIZE, BLOCK_SIZE);
                        }
                    }

                    // Parallelly compress the data
                    Parallel.For(0, blockcount, i => {
                        if (compinfos[i].data == null)
                        {
                            return;
                        }
                        blockinfos[i].compressedSize = TryLZ4Compress(compinfos[i].data, compinfos[i].offset, compbuf, i * BLOCK_SIZE, compinfos[i].length);
                        if (blockinfos[i].compressedSize == BLOCK_SIZE)
                        {
                            blockinfos[i].flag &= ~0x3F;
                        }
                    });
                }

                // Write Headers
                UnityBinaryWriter headerwriter = new UnityBinaryWriter();
                // Info Header
                headerwriter.Position += 0x10;
                // BlockInfo
                headerwriter.WriteIntBE(blockcount);
                blockinfos.Write(headerwriter);
                // FileInfo
                headerwriter.WriteIntBE(Files.Length);
                int curoffset = 0;
                for (int i = 0; i < Files.Length; i++)
                {
                    headerwriter.WriteLongBE(curoffset);
                    headerwriter.WriteLongBE(Files[i].Data.LongLength);
                    headerwriter.WriteIntBE(4);
                    headerwriter.WriteStringToNull(Files[i].Name);
                    curoffset += Files[i].Data.Length;
                }

                // Compress and write header
                writer.Position += 4 + 4 + 4;
                int header_compsize = writer.WriteLZ4Data(headerwriter.ToBytes());
                int final_pos       = writer.Position;
                writer.Position = info_offset;
                writer.WriteIntBE(header_compsize);
                writer.WriteIntBE(blockinfosize + fileinfosize);
                writer.WriteIntBE(0x42);
                writer.Position = final_pos;

                // Write Blocks
                for (int i = 0; i < blockcount; i++)
                {
                    writer.WriteBytes(compbuf, i * BLOCK_SIZE, blockinfos[i].compressedSize);
                }
            }
        }