private ShaderSubProgramBlob ReadSubProgramBlobs(AssetLayout layout, MemoryStream memStream, uint offset, uint compressedLength, uint decompressedLength) { memStream.Position = offset; byte[] decompressedBuffer = new byte[decompressedLength]; using (Lz4DecodeStream lz4Stream = new Lz4DecodeStream(memStream, (int)compressedLength)) { lz4Stream.ReadBuffer(decompressedBuffer, 0, decompressedBuffer.Length); } using (MemoryStream blobMem = new MemoryStream(decompressedBuffer)) { using (AssetReader blobReader = new AssetReader(blobMem, EndianType.LittleEndian, layout)) { ShaderSubProgramBlob blob = new ShaderSubProgramBlob(); blob.Read(blobReader); return(blob); } } }
private void ReadBlob(AssetLayout layout, MemoryStream memStream, uint compressedLength, uint decompressedLength, int segment) { byte[] decompressedBuffer = new byte[decompressedLength]; using (Lz4DecodeStream lz4Stream = new Lz4DecodeStream(memStream, compressedLength)) { lz4Stream.ReadBuffer(decompressedBuffer, 0, decompressedBuffer.Length); } using (MemoryStream blobMem = new MemoryStream(decompressedBuffer)) { using (AssetReader blobReader = new AssetReader(blobMem, EndianType.LittleEndian, layout)) { if (segment == 0) { Entries = blobReader.ReadAssetArray <ShaderSubProgramEntry>(); SubPrograms = new ShaderSubProgram[Entries.Length]; } ReadSegment(blobReader, segment); } } }
public override void Read(AssetReader reader) { if (IsSerialized(reader.Version)) { ReadBase(reader); ParsedForm.Read(reader); m_platforms = reader.ReadEnum32Array((t) => (GPUPlatform)t); uint[] offsets = reader.ReadUInt32Array(); uint[] compressedLengths = reader.ReadUInt32Array(); uint[] decompressedLengths = reader.ReadUInt32Array(); byte[] compressedBlob = reader.ReadByteArray(); reader.AlignStream(AlignType.Align4); m_subProgramBlobs = new ShaderSubProgramBlob[m_platforms.Length]; using (MemoryStream memStream = new MemoryStream(compressedBlob)) { for (int i = 0; i < m_platforms.Length; i++) { uint offset = offsets[i]; uint compressedLength = compressedLengths[i]; uint decompressedLength = decompressedLengths[i]; memStream.Position = offset; byte[] decompressedBuffer = new byte[decompressedLength]; using (Lz4DecodeStream lz4Stream = new Lz4DecodeStream(memStream, (int)compressedLength)) { int read = lz4Stream.Read(decompressedBuffer, 0, decompressedBuffer.Length); if (read != decompressedLength) { throw new Exception($"Can't properly decode shader blob. Read {read} but expected {decompressedLength}"); } } using (MemoryStream blobMem = new MemoryStream(decompressedBuffer)) { using (AssetReader blobReader = new AssetReader(blobMem, reader.Version, reader.Platform, reader.Flags)) { ShaderSubProgramBlob blob = new ShaderSubProgramBlob(); blob.Read(blobReader); m_subProgramBlobs[i] = blob; } } } } } else { base.Read(reader); if (IsEncoded(reader.Version)) { uint decompressedSize = reader.ReadUInt32(); int comressedSize = reader.ReadInt32(); byte[] subProgramBlob = new byte[comressedSize]; reader.Read(subProgramBlob, 0, comressedSize); reader.AlignStream(AlignType.Align4); if (comressedSize > 0 && decompressedSize > 0) { byte[] decompressedBuffer = new byte[decompressedSize]; using (MemoryStream memStream = new MemoryStream(subProgramBlob)) { using (Lz4DecodeStream lz4Stream = new Lz4DecodeStream(memStream)) { int read = lz4Stream.Read(decompressedBuffer, 0, decompressedBuffer.Length); if (read != decompressedSize) { throw new Exception($"Can't properly decode sub porgram blob. Read {read} but expected {decompressedSize}"); } } } using (MemoryStream memStream = new MemoryStream(decompressedBuffer)) { using (AssetReader blobReader = new AssetReader(memStream, reader.Version, reader.Platform, reader.Flags)) { SubProgramBlob.Read(blobReader); } } } } if (IsReadFallback(reader.Version)) { Fallback.Read(reader); } if (IsReadDefaultProperties(reader.Version)) { DefaultProperties.Read(reader); } if (IsReadStaticProperties(reader.Version)) { StaticProperties.Read(reader); } } if (IsReadDependencies(reader.Version)) { m_dependencies = reader.ReadAssetArray <PPtr <Shader> >(); } if (IsReadNonModifiableTextures(reader.Version)) { m_nonModifiableTextures = reader.ReadAssetArray <PPtr <Texture> >(); } if (IsReadShaderIsBaked(reader.Version)) { ShaderIsBaked = reader.ReadBoolean(); reader.AlignStream(AlignType.Align4); } }
private void Read530Blocks(SmartStream dataStream) { if (Header.Flags.IsMetadataAtTheEnd()) { dataStream.Position = Header.HeaderSize; } int cachedBlock = -1; long dataOffset = dataStream.Position; // If MemoryStream has compressed block then we need to create individual streams for each entry and copy its data into it bool createIndividualStreams = dataStream.StreamType == SmartStreamType.Memory; if (createIndividualStreams) { // find out if this bundle file has compressed blocks foreach (BlockInfo block in Metadata.BlockInfos) { if (block.Flags.GetCompression() != BundleCompressType.None) { createIndividualStreams = true; break; } } } using (SmartStream blockStream = SmartStream.CreateNull()) { foreach (BundleFileEntry entry in Metadata.Entries.Values) { // find out block offsets long blockCompressedOffset = 0; long blockDecompressedOffset = 0; int blockIndex = 0; while (blockDecompressedOffset + Metadata.BlockInfos[blockIndex].DecompressedSize <= entry.Offset) { blockCompressedOffset += Metadata.BlockInfos[blockIndex].CompressedSize; blockDecompressedOffset += Metadata.BlockInfos[blockIndex].DecompressedSize; blockIndex++; } // if at least one block of this entry is compressed or acording to the rule above // we should copy the data of current entry to a separate stream bool needToCopy = createIndividualStreams; if (!needToCopy) { // check if this entry has compressed blocks long entrySize = 0; for (int bi = blockIndex; entrySize < entry.Size; bi++) { if (Metadata.BlockInfos[bi].Flags.GetCompression() != BundleCompressType.None) { // it does. then we need to create individual stream and decomress its data into it needToCopy = true; break; } entrySize += Metadata.BlockInfos[bi].DecompressedSize; } } long entryOffsetInsideBlock = entry.Offset - blockDecompressedOffset; if (needToCopy) { // well, at leat one block is compressed so we should copy data of current entry to a separate stream using (SmartStream entryStream = CreateStream(entry.Size)) { long left = entry.Size; dataStream.Position = dataOffset + blockCompressedOffset; // copy data of all blocks used by current entry to new stream for (int bi = blockIndex; left > 0; bi++) { long blockOffset = 0; BlockInfo block = Metadata.BlockInfos[bi]; if (cachedBlock == bi) { // some data of previous entry is in the same block as this one // so we don't need to unpack it once again. Instead we can use cached stream dataStream.Position += block.CompressedSize; } else { BundleCompressType compressType = block.Flags.GetCompression(); switch (compressType) { case BundleCompressType.None: blockOffset = dataOffset + blockCompressedOffset; blockStream.Assign(dataStream); break; case BundleCompressType.LZMA: blockStream.Move(CreateStream(block.DecompressedSize)); SevenZipHelper.DecompressLZMAStream(dataStream, block.CompressedSize, blockStream, block.DecompressedSize); break; case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: blockStream.Move(CreateStream(block.DecompressedSize)); using (Lz4DecodeStream lzStream = new Lz4DecodeStream(dataStream, block.CompressedSize)) { long read = lzStream.Read(blockStream, block.DecompressedSize); if (read != block.DecompressedSize || lzStream.IsDataLeft) { throw new Exception($"Read {read} but expected {block.DecompressedSize}"); } } break; default: throw new NotImplementedException($"Bundle compression '{compressType}' isn't supported"); } cachedBlock = bi; } // consider next offsets: // 1) block - if it is new stream then offset is 0, otherwise offset of this block in the bundle file // 2) entry - if this is first block for current entry then it is offset of this entry related to this block // otherwise 0 long fragmentSize = block.DecompressedSize - entryOffsetInsideBlock; blockStream.Position = blockOffset + entryOffsetInsideBlock; entryOffsetInsideBlock = 0; long size = Math.Min(fragmentSize, left); blockStream.CopyStream(entryStream, size); blockCompressedOffset += block.CompressedSize; left -= size; } if (left < 0) { throw new Exception($"Read more than expected"); } FileEntryOffset feOffset = new FileEntryOffset(entryStream.CreateReference(), 0); m_entryStreams.Add(entry, feOffset); } } else { // no compressed blocks was found so we can use original bundle stream // since FileEntry.Offset contains decompressedOffset we need to preliminarily subtract it FileEntryOffset feOffset = new FileEntryOffset(dataStream.CreateReference(), dataOffset + blockCompressedOffset + entryOffsetInsideBlock); m_entryStreams.Add(entry, feOffset); } } } }
private SmartStream Read530Metadata(BundleFileReader reader) { if (Header.Flags.IsMetadataAtTheEnd()) { reader.BaseStream.Position = Header.BundleSize - Header.MetadataCompressedSize; } BundleCompressType metaCompression = Header.Flags.GetCompression(); switch (metaCompression) { case BundleCompressType.None: { Metadata.Read(reader); long expectedPosition = Header.Flags.IsMetadataAtTheEnd() ? Header.BundleSize : Header.HeaderSize + Header.MetadataDecompressedSize; if (reader.BaseStream.Position != expectedPosition) { throw new Exception($"Read {reader.BaseStream.Position - Header.HeaderSize} but expected {Header.MetadataDecompressedSize}"); } } break; case BundleCompressType.LZMA: { using (MemoryStream stream = new MemoryStream(new byte[Header.MetadataDecompressedSize])) { SevenZipHelper.DecompressLZMASizeStream(reader.BaseStream, Header.MetadataCompressedSize, stream); using (BundleFileReader decompressReader = new BundleFileReader(stream, reader.EndianType, reader.Generation)) { Metadata.Read(decompressReader); } if (stream.Position != Header.MetadataDecompressedSize) { throw new Exception($"Read {stream.Position} but expected {Header.MetadataDecompressedSize}"); } } } break; case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: { using (MemoryStream stream = new MemoryStream(new byte[Header.MetadataDecompressedSize])) { using (Lz4DecodeStream decodeStream = new Lz4DecodeStream(reader.BaseStream, Header.MetadataCompressedSize)) { long read = decodeStream.Read(stream, Header.MetadataDecompressedSize); if (read != Header.MetadataDecompressedSize || decodeStream.IsDataLeft) { throw new Exception($"Read {read} but expected {Header.MetadataDecompressedSize}"); } } stream.Position = 0; using (BundleFileReader decompressReader = new BundleFileReader(stream, reader.EndianType, reader.Generation)) { Metadata.Read(decompressReader); } if (stream.Position != Header.MetadataDecompressedSize) { throw new Exception($"Read {stream.Position} but expected {Header.MetadataDecompressedSize}"); } } } break; default: throw new NotSupportedException($"Bundle compression '{metaCompression}' isn't supported"); } return(m_stream.CreateReference()); }
public SmartStream ReadEntry(BundleFileEntry entry) { if (m_isDisposed) { throw new ObjectDisposedException(nameof(BundleFileBlockReader)); } // find out block offsets int blockIndex; long blockCompressedOffset = 0; long blockDecompressedOffset = 0; for (blockIndex = 0; blockDecompressedOffset + m_metadata.BlockInfos[blockIndex].DecompressedSize <= entry.Offset; blockIndex++) { blockCompressedOffset += m_metadata.BlockInfos[blockIndex].CompressedSize; blockDecompressedOffset += m_metadata.BlockInfos[blockIndex].DecompressedSize; } long entryOffsetInsideBlock = entry.Offset - blockDecompressedOffset; using (SmartStream entryStream = CreateStream(entry.Size)) { long left = entry.Size; m_stream.Position = m_dataOffset + blockCompressedOffset; // copy data of all blocks used by current entry to new stream while (left > 0) { long blockStreamOffset; Stream blockStream; BlockInfo block = m_metadata.BlockInfos[blockIndex]; if (m_cachedBlockIndex == blockIndex) { // data of the previous entry is in the same block as this one // so we don't need to unpack it once again. Instead we can use cached stream blockStreamOffset = 0; blockStream = m_cachedBlockStream; m_stream.Position += block.CompressedSize; } else { BundleCompressType compressType = block.Flags.GetCompression(); if (compressType == BundleCompressType.None) { blockStreamOffset = m_dataOffset + blockCompressedOffset; blockStream = m_stream; } else { blockStreamOffset = 0; m_cachedBlockIndex = blockIndex; m_cachedBlockStream.Move(CreateStream(block.DecompressedSize)); switch (compressType) { case BundleCompressType.LZMA: SevenZipHelper.DecompressLZMAStream(m_stream, block.CompressedSize, m_cachedBlockStream, block.DecompressedSize); break; case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: using (Lz4DecodeStream lzStream = new Lz4DecodeStream(m_stream, block.CompressedSize)) { lzStream.ReadBuffer(m_cachedBlockStream, block.DecompressedSize); } break; default: throw new NotImplementedException($"Bundle compression '{compressType}' isn't supported"); } blockStream = m_cachedBlockStream; } } // consider next offsets: // 1) block - if it is new stream then offset is 0, otherwise offset of this block in the bundle file // 2) entry - if this is first block for current entry then it is offset of this entry related to this block // otherwise 0 long blockSize = block.DecompressedSize - entryOffsetInsideBlock; blockStream.Position = blockStreamOffset + entryOffsetInsideBlock; entryOffsetInsideBlock = 0; long size = Math.Min(blockSize, left); blockStream.CopyStream(entryStream, size); blockIndex++; blockCompressedOffset += block.CompressedSize; left -= size; } if (left < 0) { throw new Exception($"Read more than expected"); } entryStream.Position = 0; return(entryStream.CreateReference()); } }
public override void Read(AssetReader reader) { if (IsSerialized(reader.Version)) { ReadNamedObject(reader); ParsedForm.Read(reader); Platforms = reader.ReadArray((t) => (GPUPlatform)t); uint[] offsets = reader.ReadUInt32Array(); uint[] compressedLengths = reader.ReadUInt32Array(); uint[] decompressedLengths = reader.ReadUInt32Array(); byte[] compressedBlob = reader.ReadByteArray(); reader.AlignStream(); SubProgramBlobs = new ShaderSubProgramBlob[Platforms.Length]; using (MemoryStream memStream = new MemoryStream(compressedBlob)) { for (int i = 0; i < Platforms.Length; i++) { uint offset = offsets[i]; uint compressedLength = compressedLengths[i]; uint decompressedLength = decompressedLengths[i]; memStream.Position = offset; byte[] decompressedBuffer = new byte[decompressedLength]; using (Lz4DecodeStream lz4Stream = new Lz4DecodeStream(memStream, (int)compressedLength)) { lz4Stream.ReadBuffer(decompressedBuffer, 0, decompressedBuffer.Length); } using (MemoryStream blobMem = new MemoryStream(decompressedBuffer)) { using (AssetReader blobReader = new AssetReader(blobMem, EndianType.LittleEndian, reader.Version, reader.Platform, reader.Flags)) { ShaderSubProgramBlob blob = new ShaderSubProgramBlob(); blob.Read(blobReader); SubProgramBlobs[i] = blob; } } } } } else { base.Read(reader); if (IsEncoded(reader.Version)) { uint decompressedSize = reader.ReadUInt32(); int comressedSize = reader.ReadInt32(); if (comressedSize > 0 && decompressedSize > 0) { byte[] subProgramBlob = new byte[comressedSize]; reader.ReadBuffer(subProgramBlob, 0, comressedSize); byte[] decompressedBuffer = new byte[decompressedSize]; using (MemoryStream memStream = new MemoryStream(subProgramBlob)) { using (Lz4DecodeStream lz4Stream = new Lz4DecodeStream(memStream)) { lz4Stream.ReadBuffer(decompressedBuffer, 0, decompressedBuffer.Length); } } using (MemoryStream memStream = new MemoryStream(decompressedBuffer)) { using (AssetReader blobReader = new AssetReader(memStream, EndianType.LittleEndian, reader.Version, reader.Platform, reader.Flags)) { SubProgramBlob.Read(blobReader); } } } reader.AlignStream(); } if (HasFallback(reader.Version)) { Fallback.Read(reader); } if (HasDefaultProperties(reader.Version)) { DefaultProperties.Read(reader); } if (HasStaticProperties(reader.Version)) { StaticProperties.Read(reader); } } if (HasDependencies(reader.Version)) { Dependencies = reader.ReadAssetArray <PPtr <Shader> >(); } if (HasNonModifiableTextures(reader.Version)) { NonModifiableTextures = new Dictionary <string, PPtr <Texture> >(); NonModifiableTextures.Read(reader); } if (HasShaderIsBaked(reader.Version)) { ShaderIsBaked = reader.ReadBoolean(); reader.AlignStream(); } #if UNIVERSAL if (HasErrors(reader.Version, reader.Flags)) { Errors = reader.ReadAssetArray <ShaderError>(); } if (HasDefaultTextures(reader.Version, reader.Flags)) { DefaultTextures = new Dictionary <string, PPtr <Texture> >(); DefaultTextures.Read(reader); } if (HasCompileInfo(reader.Version, reader.Flags)) { CompileInfo.Read(reader); } #endif }
private void Read530Blocks(SmartStream bundleStream, BlockInfo[] blockInfos) { int cachedBlock = -1; long dataOffset = bundleStream.Position; BundleFileEntry[] newEntries = new BundleFileEntry[Metadata.Entries.Count]; using (SmartStream blockStream = SmartStream.CreateNull()) { for (int ei = 0; ei < Metadata.Entries.Count; ei++) { BundleFileEntry entry = Metadata.Entries[ei]; // find block corresponding to current entry int blockIndex = 0; long compressedOffset = 0; long decompressedOffset = 0; while (true) { BlockInfo block = blockInfos[blockIndex]; if (decompressedOffset + block.DecompressedSize > entry.Offset) { break; } blockIndex++; compressedOffset += block.CompressedSize; decompressedOffset += block.DecompressedSize; } // check does this entry use any compressed blocks long entrySize = 0; bool isCompressed = false; for (int bi = blockIndex; entrySize < entry.Size; bi++) { BlockInfo block = blockInfos[bi]; entrySize += block.DecompressedSize; if (block.Flags.GetCompression() != BundleCompressType.None) { isCompressed = true; break; } } if (isCompressed) { // well, at leat one block is compressed so we should copy data of current entry to separate stream using (SmartStream entryStream = CreateStream(entry.Size)) { long left = entry.Size; long entryOffset = entry.Offset - decompressedOffset; bundleStream.Position = dataOffset + compressedOffset; // copy data of all blocks used by current entry to created stream for (int bi = blockIndex; left > 0; bi++) { long blockOffset = 0; BlockInfo block = blockInfos[bi]; if (cachedBlock == bi) { // some data of previous entry is in the same block as this one // so we don't need to unpack it once again but can use cached stream bundleStream.Position += block.CompressedSize; } else { BundleCompressType compressType = block.Flags.GetCompression(); switch (compressType) { case BundleCompressType.None: blockOffset = dataOffset + compressedOffset; blockStream.Assign(bundleStream); break; case BundleCompressType.LZMA: blockStream.Move(CreateStream(block.DecompressedSize)); SevenZipHelper.DecompressLZMAStream(bundleStream, block.CompressedSize, blockStream, block.DecompressedSize); break; case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: blockStream.Move(CreateStream(block.DecompressedSize)); using (Lz4DecodeStream lzStream = new Lz4DecodeStream(bundleStream, block.CompressedSize)) { long read = lzStream.Read(blockStream, block.DecompressedSize); if (read != block.DecompressedSize) { throw new Exception($"Read {read} but expected {block.CompressedSize}"); } } break; default: throw new NotImplementedException($"Bundle compression '{compressType}' isn't supported"); } cachedBlock = bi; } // consider next offsets: // 1) block - if it is new stream then offset is 0, otherwise offset of this block in bundle file // 2) entry - if this is first block for current entry then it is offset of this entry related to this block // otherwise 0 long fragmentSize = block.DecompressedSize - entryOffset; blockStream.Position = blockOffset + entryOffset; entryOffset = 0; long size = Math.Min(fragmentSize, left); blockStream.CopyStream(entryStream, size); compressedOffset += block.CompressedSize; left -= size; } if (left < 0) { throw new Exception($"Read more than expected"); } newEntries[ei] = new BundleFileEntry(entryStream, entry.FilePath, entry.Name, 0, entry.Size); } } else { // no compressed blocks was found so we can use original bundle stream newEntries[ei] = new BundleFileEntry(entry, dataOffset + entry.Offset); } } } Metadata.Dispose(); Metadata = new BundleMetadata(m_filePath, newEntries); }
private void Read530Metadata(EndianReader reader, long basePosition) { SmartStream bundleStream = (SmartStream)reader.BaseStream; long dataPosition = bundleStream.Position; if (Header.Flags.IsMetadataAtTheEnd()) { bundleStream.Position = basePosition + Header.BundleSize - Header.MetadataCompressedSize; } else { dataPosition += Header.MetadataCompressedSize; } BlockInfo[] blockInfos; BundleCompressType metaCompression = Header.Flags.GetCompression(); switch (metaCompression) { case BundleCompressType.None: { long metaPosition = bundleStream.Position; // unknown 0x10 bundleStream.Position += 0x10; blockInfos = reader.ReadEndianArray <BlockInfo>(); Metadata = new BundleMetadata(m_filePath); Metadata.Read530(reader, bundleStream); if (bundleStream.Position != metaPosition + Header.MetadataDecompressedSize) { throw new Exception($"Read {bundleStream.Position - metaPosition} but expected {Header.MetadataDecompressedSize}"); } break; } case BundleCompressType.LZMA: { using (MemoryStream metaStream = new MemoryStream(new byte[Header.MetadataDecompressedSize])) { SevenZipHelper.DecompressLZMASizeStream(bundleStream, Header.MetadataCompressedSize, metaStream); using (EndianReader metaReader = new EndianReader(metaStream, EndianType.BigEndian)) { // unknown 0x10 metaReader.BaseStream.Position += 0x10; blockInfos = metaReader.ReadEndianArray <BlockInfo>(); Metadata = new BundleMetadata(m_filePath); Metadata.Read530(metaReader, bundleStream); } if (metaStream.Position != metaStream.Length) { throw new Exception($"Read {metaStream.Position} but expected {metaStream.Length}"); } } break; } case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: { using (MemoryStream metaStream = new MemoryStream(new byte[Header.MetadataDecompressedSize])) { using (Lz4DecodeStream lzStream = new Lz4DecodeStream(bundleStream, Header.MetadataCompressedSize)) { long read = lzStream.Read(metaStream, Header.MetadataDecompressedSize); metaStream.Position = 0; if (read != Header.MetadataDecompressedSize) { throw new Exception($"Read {read} but expected {Header.MetadataDecompressedSize}"); } } using (EndianReader metaReader = new EndianReader(metaStream, EndianType.BigEndian)) { // unknown 0x10 metaReader.BaseStream.Position += 0x10; blockInfos = metaReader.ReadEndianArray <BlockInfo>(); Metadata = new BundleMetadata(m_filePath); Metadata.Read530(metaReader, bundleStream); } if (metaStream.Position != metaStream.Length) { throw new Exception($"Read {metaStream.Position} but expected {metaStream.Length}"); } } break; } default: throw new NotSupportedException($"Bundle compression '{metaCompression}' isn't supported"); } bundleStream.Position = dataPosition; Read530Blocks(bundleStream, blockInfos); }