private void Read530Blocks(SmartStream dataStream) { if (Header.Flags.IsMetadataAtTheEnd()) { dataStream.Position = Header.HeaderSize; } int cachedBlock = -1; long dataOffset = dataStream.Position; // If MemoryStream has compressed block then we need to create individual streams for each entry and copy its data into it bool createIndividualStreams = dataStream.StreamType == SmartStreamType.Memory; if (createIndividualStreams) { // find out if this bundle file has compressed blocks foreach (BlockInfo block in Metadata.BlockInfos) { if (block.Flags.GetCompression() != BundleCompressType.None) { createIndividualStreams = true; break; } } } using (SmartStream blockStream = SmartStream.CreateNull()) { foreach (BundleFileEntry entry in Metadata.Entries.Values) { // find out block offsets long blockCompressedOffset = 0; long blockDecompressedOffset = 0; int blockIndex = 0; while (blockDecompressedOffset + Metadata.BlockInfos[blockIndex].DecompressedSize <= entry.Offset) { blockCompressedOffset += Metadata.BlockInfos[blockIndex].CompressedSize; blockDecompressedOffset += Metadata.BlockInfos[blockIndex].DecompressedSize; blockIndex++; } // if at least one block of this entry is compressed or acording to the rule above // we should copy the data of current entry to a separate stream bool needToCopy = createIndividualStreams; if (!needToCopy) { // check if this entry has compressed blocks long entrySize = 0; for (int bi = blockIndex; entrySize < entry.Size; bi++) { if (Metadata.BlockInfos[bi].Flags.GetCompression() != BundleCompressType.None) { // it does. then we need to create individual stream and decomress its data into it needToCopy = true; break; } entrySize += Metadata.BlockInfos[bi].DecompressedSize; } } long entryOffsetInsideBlock = entry.Offset - blockDecompressedOffset; if (needToCopy) { // well, at leat one block is compressed so we should copy data of current entry to a separate stream using (SmartStream entryStream = CreateStream(entry.Size)) { long left = entry.Size; dataStream.Position = dataOffset + blockCompressedOffset; // copy data of all blocks used by current entry to new stream for (int bi = blockIndex; left > 0; bi++) { long blockOffset = 0; BlockInfo block = Metadata.BlockInfos[bi]; if (cachedBlock == bi) { // some data of previous entry is in the same block as this one // so we don't need to unpack it once again. Instead we can use cached stream dataStream.Position += block.CompressedSize; } else { BundleCompressType compressType = block.Flags.GetCompression(); switch (compressType) { case BundleCompressType.None: blockOffset = dataOffset + blockCompressedOffset; blockStream.Assign(dataStream); break; case BundleCompressType.LZMA: blockStream.Move(CreateStream(block.DecompressedSize)); SevenZipHelper.DecompressLZMAStream(dataStream, block.CompressedSize, blockStream, block.DecompressedSize); break; case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: blockStream.Move(CreateStream(block.DecompressedSize)); using (Lz4DecodeStream lzStream = new Lz4DecodeStream(dataStream, block.CompressedSize)) { long read = lzStream.Read(blockStream, block.DecompressedSize); if (read != block.DecompressedSize || lzStream.IsDataLeft) { throw new Exception($"Read {read} but expected {block.DecompressedSize}"); } } break; default: throw new NotImplementedException($"Bundle compression '{compressType}' isn't supported"); } cachedBlock = bi; } // consider next offsets: // 1) block - if it is new stream then offset is 0, otherwise offset of this block in the bundle file // 2) entry - if this is first block for current entry then it is offset of this entry related to this block // otherwise 0 long fragmentSize = block.DecompressedSize - entryOffsetInsideBlock; blockStream.Position = blockOffset + entryOffsetInsideBlock; entryOffsetInsideBlock = 0; long size = Math.Min(fragmentSize, left); blockStream.CopyStream(entryStream, size); blockCompressedOffset += block.CompressedSize; left -= size; } if (left < 0) { throw new Exception($"Read more than expected"); } FileEntryOffset feOffset = new FileEntryOffset(entryStream.CreateReference(), 0); m_entryStreams.Add(entry, feOffset); } } else { // no compressed blocks was found so we can use original bundle stream // since FileEntry.Offset contains decompressedOffset we need to preliminarily subtract it FileEntryOffset feOffset = new FileEntryOffset(dataStream.CreateReference(), dataOffset + blockCompressedOffset + entryOffsetInsideBlock); m_entryStreams.Add(entry, feOffset); } } } }
private void Read530Blocks(EndianStream stream, bool isClosable, BlockInfo[] blockInfos, BundleMetadata metadata) { // Special case. If bundle has no compressed blocks then pass it as is if (blockInfos.All(t => t.Flags.GetCompression() == BundleCompressType.None)) { Metadatas = new BundleMetadata[] { metadata }; return; } long dataPosisition = stream.BaseStream.Position; long decompressedSize = blockInfos.Sum(t => t.DecompressedSize); Stream bufferStream; if (decompressedSize > int.MaxValue) { string tempFile = Path.GetTempFileName(); bufferStream = new FileStream(tempFile, FileMode.Open, FileAccess.ReadWrite, FileShare.None, 4096, FileOptions.DeleteOnClose); } else { bufferStream = new MemoryStream((int)decompressedSize); } foreach (BlockInfo blockInfo in blockInfos) { BundleCompressType compressType = blockInfo.Flags.GetCompression(); switch (compressType) { case BundleCompressType.None: stream.BaseStream.CopyStream(bufferStream, blockInfo.DecompressedSize); break; case BundleCompressType.LZMA: SevenZipHelper.DecompressLZMAStream(stream.BaseStream, blockInfo.CompressedSize, bufferStream, blockInfo.DecompressedSize); break; case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: using (Lz4Stream lzStream = new Lz4Stream(stream.BaseStream, blockInfo.CompressedSize)) { long read = lzStream.Read(bufferStream, blockInfo.DecompressedSize); if (read != blockInfo.DecompressedSize) { throw new Exception($"Read {read} but expected {blockInfo.CompressedSize}"); } } break; default: throw new NotImplementedException($"Bundle compression '{compressType}' isn't supported"); } } if (isClosable) { stream.Dispose(); } BundleFileEntry[] entries = new BundleFileEntry[metadata.Entries.Count]; for (int i = 0; i < metadata.Entries.Count; i++) { BundleFileEntry bundleEntry = metadata.Entries[i]; string name = bundleEntry.Name; long offset = bundleEntry.Offset - dataPosisition; long size = bundleEntry.Size; BundleFileEntry streamEntry = new BundleFileEntry(bufferStream, m_filePath, name, offset, size, true); entries[i] = streamEntry; } BundleMetadata streamMetadata = new BundleMetadata(bufferStream, m_filePath, false, entries); Metadatas = new BundleMetadata[] { streamMetadata }; }
private SmartStream Read530Metadata(BundleFileReader reader) { if (Header.Flags.IsMetadataAtTheEnd()) { reader.BaseStream.Position = Header.BundleSize - Header.MetadataCompressedSize; } BundleCompressType metaCompression = Header.Flags.GetCompression(); switch (metaCompression) { case BundleCompressType.None: { Metadata.Read(reader); long expectedPosition = Header.Flags.IsMetadataAtTheEnd() ? Header.BundleSize : Header.HeaderSize + Header.MetadataDecompressedSize; if (reader.BaseStream.Position != expectedPosition) { throw new Exception($"Read {reader.BaseStream.Position - Header.HeaderSize} but expected {Header.MetadataDecompressedSize}"); } } break; case BundleCompressType.LZMA: { using (MemoryStream stream = new MemoryStream(new byte[Header.MetadataDecompressedSize])) { SevenZipHelper.DecompressLZMASizeStream(reader.BaseStream, Header.MetadataCompressedSize, stream); using (BundleFileReader decompressReader = new BundleFileReader(stream, reader.EndianType, reader.Generation)) { Metadata.Read(decompressReader); } if (stream.Position != Header.MetadataDecompressedSize) { throw new Exception($"Read {stream.Position} but expected {Header.MetadataDecompressedSize}"); } } } break; case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: { using (MemoryStream stream = new MemoryStream(new byte[Header.MetadataDecompressedSize])) { using (Lz4DecodeStream decodeStream = new Lz4DecodeStream(reader.BaseStream, Header.MetadataCompressedSize)) { long read = decodeStream.Read(stream, Header.MetadataDecompressedSize); if (read != Header.MetadataDecompressedSize || decodeStream.IsDataLeft) { throw new Exception($"Read {read} but expected {Header.MetadataDecompressedSize}"); } } stream.Position = 0; using (BundleFileReader decompressReader = new BundleFileReader(stream, reader.EndianType, reader.Generation)) { Metadata.Read(decompressReader); } if (stream.Position != Header.MetadataDecompressedSize) { throw new Exception($"Read {stream.Position} but expected {Header.MetadataDecompressedSize}"); } } } break; default: throw new NotSupportedException($"Bundle compression '{metaCompression}' isn't supported"); } return(m_stream.CreateReference()); }
private void Read530Blocks(EndianStream stream, bool isClosable, BlockInfo[] blockInfos, BundleMetadata metadata) { // Special case. If bundle has no compressed blocks then pass it as a stream if (blockInfos.All(t => t.Flags.GetCompression() == BundleCompressType.None)) { Metadatas = new BundleMetadata[] { metadata }; } long dataPosisition = stream.BaseStream.Position; long decompressedSize = blockInfos.Sum(t => t.DecompressedSize); if (decompressedSize > int.MaxValue) { throw new Exception("How to read such big data? Save to file and then read?"); } MemoryStream memStream = new MemoryStream((int)decompressedSize); foreach (BlockInfo blockInfo in blockInfos) { BundleCompressType compressType = blockInfo.Flags.GetCompression(); switch (compressType) { case BundleCompressType.None: stream.BaseStream.CopyStream(memStream, blockInfo.DecompressedSize); break; case BundleCompressType.LZMA: SevenZipHelper.DecompressLZMAStream(stream.BaseStream, blockInfo.CompressedSize, memStream, blockInfo.DecompressedSize); break; case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: using (Lz4Stream lzStream = new Lz4Stream(stream.BaseStream, blockInfo.CompressedSize)) { long read = lzStream.Read(memStream, blockInfo.DecompressedSize); if (read != blockInfo.DecompressedSize) { throw new Exception($"Read {read} but expected {blockInfo.CompressedSize}"); } } break; default: throw new NotImplementedException($"Bundle compression '{compressType}' isn't supported"); } } if (isClosable) { stream.Dispose(); } BundleFileEntry[] entries = new BundleFileEntry[metadata.Entries.Count]; for (int i = 0; i < metadata.Entries.Count; i++) { BundleFileEntry bundleEntry = metadata.Entries[i]; string name = bundleEntry.Name; long offset = bundleEntry.Offset - dataPosisition; long size = bundleEntry.Size; BundleFileEntry streamEntry = new BundleFileEntry(memStream, m_filePath, name, offset, size); entries[i] = streamEntry; } BundleMetadata streamMetadata = new BundleMetadata(memStream, m_filePath, true, entries); Metadatas = new BundleMetadata[] { streamMetadata }; }
private void Read530Metadata(EndianStream stream, bool isClosable, long basePosition) { long dataPosition = stream.BaseStream.Position; if (Header.Flags.IsMetadataAtTheEnd()) { stream.BaseStream.Position = basePosition + Header.BundleSize - Header.MetadataCompressedSize; } else { dataPosition += Header.MetadataCompressedSize; } BlockInfo[] blockInfos; BundleMetadata metadata; BundleCompressType metaCompress = Header.Flags.GetCompression(); switch (metaCompress) { case BundleCompressType.None: { long metaPosition = stream.BaseStream.Position; // unknown 0x10 stream.BaseStream.Position += 0x10; blockInfos = stream.ReadArray <BlockInfo>(); metadata = new BundleMetadata(stream.BaseStream, m_filePath, isClosable); metadata.Read530(stream, dataPosition); if (stream.BaseStream.Position != metaPosition + Header.MetadataDecompressedSize) { throw new Exception($"Read {stream.BaseStream.Position - metaPosition} but expected {Header.MetadataDecompressedSize}"); } break; } case BundleCompressType.LZMA: { using (MemoryStream memStream = new MemoryStream(Header.MetadataDecompressedSize)) { SevenZipHelper.DecompressLZMASizeStream(stream.BaseStream, Header.MetadataCompressedSize, memStream); memStream.Position = 0; using (EndianStream metadataStream = new EndianStream(memStream, EndianType.BigEndian)) { // unknown 0x10 metadataStream.BaseStream.Position += 0x10; blockInfos = metadataStream.ReadArray <BlockInfo>(); metadata = new BundleMetadata(stream.BaseStream, m_filePath, isClosable); metadata.Read530(metadataStream, dataPosition); if (memStream.Position != memStream.Length) { throw new Exception($"Read {memStream.Position} but expected {memStream.Length}"); } } } break; } case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: { using (MemoryStream memStream = new MemoryStream(Header.MetadataDecompressedSize)) { using (Lz4Stream lzStream = new Lz4Stream(stream.BaseStream, Header.MetadataCompressedSize)) { long read = lzStream.Read(memStream, Header.MetadataDecompressedSize); memStream.Position = 0; if (read != Header.MetadataDecompressedSize) { throw new Exception($"Read {read} but expected {Header.MetadataDecompressedSize}"); } } using (EndianStream metadataStream = new EndianStream(memStream, EndianType.BigEndian)) { // unknown 0x10 metadataStream.BaseStream.Position += 0x10; blockInfos = metadataStream.ReadArray <BlockInfo>(); metadata = new BundleMetadata(stream.BaseStream, m_filePath, isClosable); metadata.Read530(metadataStream, dataPosition); if (memStream.Position != memStream.Length) { throw new Exception($"Read {memStream.Position} but expected {memStream.Length}"); } } } break; } default: throw new NotSupportedException($"Bundle compression '{metaCompress}' isn't supported"); } stream.BaseStream.Position = dataPosition; Read530Blocks(stream, isClosable, blockInfos, metadata); }
private void Read530Blocks(SmartStream bundleStream, BlockInfo[] blockInfos) { int cachedBlock = -1; long dataOffset = bundleStream.Position; BundleFileEntry[] newEntries = new BundleFileEntry[Metadata.Entries.Count]; using (SmartStream blockStream = SmartStream.CreateNull()) { for (int ei = 0; ei < Metadata.Entries.Count; ei++) { BundleFileEntry entry = Metadata.Entries[ei]; // find block corresponding to current entry int blockIndex = 0; long compressedOffset = 0; long decompressedOffset = 0; while (true) { BlockInfo block = blockInfos[blockIndex]; if (decompressedOffset + block.DecompressedSize > entry.Offset) { break; } blockIndex++; compressedOffset += block.CompressedSize; decompressedOffset += block.DecompressedSize; } // check does this entry use any compressed blocks long entrySize = 0; bool isCompressed = false; for (int bi = blockIndex; entrySize < entry.Size; bi++) { BlockInfo block = blockInfos[bi]; entrySize += block.DecompressedSize; if (block.Flags.GetCompression() != BundleCompressType.None) { isCompressed = true; break; } } if (isCompressed) { // well, at leat one block is compressed so we should copy data of current entry to separate stream using (SmartStream entryStream = CreateStream(entry.Size)) { long left = entry.Size; long entryOffset = entry.Offset - decompressedOffset; bundleStream.Position = dataOffset + compressedOffset; // copy data of all blocks used by current entry to created stream for (int bi = blockIndex; left > 0; bi++) { long blockOffset = 0; BlockInfo block = blockInfos[bi]; if (cachedBlock == bi) { // some data of previous entry is in the same block as this one // so we don't need to unpack it once again but can use cached stream bundleStream.Position += block.CompressedSize; } else { BundleCompressType compressType = block.Flags.GetCompression(); switch (compressType) { case BundleCompressType.None: blockOffset = dataOffset + compressedOffset; blockStream.Assign(bundleStream); break; case BundleCompressType.LZMA: blockStream.Move(CreateStream(block.DecompressedSize)); SevenZipHelper.DecompressLZMAStream(bundleStream, block.CompressedSize, blockStream, block.DecompressedSize); break; case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: blockStream.Move(CreateStream(block.DecompressedSize)); using (Lz4Stream lzStream = new Lz4Stream(bundleStream, block.CompressedSize)) { long read = lzStream.Read(blockStream, block.DecompressedSize); if (read != block.DecompressedSize) { throw new Exception($"Read {read} but expected {block.CompressedSize}"); } } break; default: throw new NotImplementedException($"Bundle compression '{compressType}' isn't supported"); } cachedBlock = bi; } // consider next offsets: // 1) block - if it is new stream then offset is 0, otherwise offset of this block in bundle file // 2) entry - if this is first block for current entry then it is offset of this entry related to this block // otherwise 0 long fragmentSize = block.DecompressedSize - entryOffset; blockStream.Position = blockOffset + entryOffset; entryOffset = 0; long size = Math.Min(fragmentSize, left); blockStream.CopyStream(entryStream, size); compressedOffset += block.CompressedSize; left -= size; } if (left < 0) { throw new Exception($"Read more than expected"); } newEntries[ei] = new BundleFileEntry(entryStream, entry.FilePath, entry.Name, 0, entry.Size); } } else { // no compressed blocks was found so we can use original bundle stream newEntries[ei] = new BundleFileEntry(entry, dataOffset + entry.Offset); } } } Metadata.Dispose(); Metadata = new BundleMetadata(m_filePath, newEntries); }
private void Read530Metadata(EndianReader reader, long basePosition) { SmartStream bundleStream = (SmartStream)reader.BaseStream; long dataPosition = bundleStream.Position; if (Header.Flags.IsMetadataAtTheEnd()) { bundleStream.Position = basePosition + Header.BundleSize - Header.MetadataCompressedSize; } else { dataPosition += Header.MetadataCompressedSize; } BlockInfo[] blockInfos; BundleCompressType metaCompression = Header.Flags.GetCompression(); switch (metaCompression) { case BundleCompressType.None: { long metaPosition = bundleStream.Position; // unknown 0x10 bundleStream.Position += 0x10; blockInfos = reader.ReadArray <BlockInfo>(); Metadata = new BundleMetadata(m_filePath); Metadata.Read530(reader, bundleStream); if (bundleStream.Position != metaPosition + Header.MetadataDecompressedSize) { throw new Exception($"Read {bundleStream.Position - metaPosition} but expected {Header.MetadataDecompressedSize}"); } break; } case BundleCompressType.LZMA: { using (MemoryStream metaStream = new MemoryStream(new byte[Header.MetadataDecompressedSize])) { SevenZipHelper.DecompressLZMASizeStream(bundleStream, Header.MetadataCompressedSize, metaStream); using (EndianReader metaReader = new EndianReader(metaStream, EndianType.BigEndian)) { // unknown 0x10 metaReader.BaseStream.Position += 0x10; blockInfos = metaReader.ReadArray <BlockInfo>(); Metadata = new BundleMetadata(m_filePath); Metadata.Read530(metaReader, bundleStream); } if (metaStream.Position != metaStream.Length) { throw new Exception($"Read {metaStream.Position} but expected {metaStream.Length}"); } } break; } case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: { using (MemoryStream metaStream = new MemoryStream(new byte[Header.MetadataDecompressedSize])) { using (Lz4Stream lzStream = new Lz4Stream(bundleStream, Header.MetadataCompressedSize)) { long read = lzStream.Read(metaStream, Header.MetadataDecompressedSize); metaStream.Position = 0; if (read != Header.MetadataDecompressedSize) { throw new Exception($"Read {read} but expected {Header.MetadataDecompressedSize}"); } } using (EndianReader metaReader = new EndianReader(metaStream, EndianType.BigEndian)) { // unknown 0x10 metaReader.BaseStream.Position += 0x10; blockInfos = metaReader.ReadArray <BlockInfo>(); Metadata = new BundleMetadata(m_filePath); Metadata.Read530(metaReader, bundleStream); } if (metaStream.Position != metaStream.Length) { throw new Exception($"Read {metaStream.Position} but expected {metaStream.Length}"); } } break; } default: throw new NotSupportedException($"Bundle compression '{metaCompression}' isn't supported"); } bundleStream.Position = dataPosition; Read530Blocks(bundleStream, blockInfos); }
public SmartStream ReadEntry(BundleFileEntry entry) { if (m_isDisposed) { throw new ObjectDisposedException(nameof(BundleFileBlockReader)); } // find out block offsets int blockIndex; long blockCompressedOffset = 0; long blockDecompressedOffset = 0; for (blockIndex = 0; blockDecompressedOffset + m_metadata.BlockInfos[blockIndex].DecompressedSize <= entry.Offset; blockIndex++) { blockCompressedOffset += m_metadata.BlockInfos[blockIndex].CompressedSize; blockDecompressedOffset += m_metadata.BlockInfos[blockIndex].DecompressedSize; } long entryOffsetInsideBlock = entry.Offset - blockDecompressedOffset; using (SmartStream entryStream = CreateStream(entry.Size)) { long left = entry.Size; m_stream.Position = m_dataOffset + blockCompressedOffset; // copy data of all blocks used by current entry to new stream while (left > 0) { long blockStreamOffset; Stream blockStream; BlockInfo block = m_metadata.BlockInfos[blockIndex]; if (m_cachedBlockIndex == blockIndex) { // data of the previous entry is in the same block as this one // so we don't need to unpack it once again. Instead we can use cached stream blockStreamOffset = 0; blockStream = m_cachedBlockStream; m_stream.Position += block.CompressedSize; } else { BundleCompressType compressType = block.Flags.GetCompression(); if (compressType == BundleCompressType.None) { blockStreamOffset = m_dataOffset + blockCompressedOffset; blockStream = m_stream; } else { blockStreamOffset = 0; m_cachedBlockIndex = blockIndex; m_cachedBlockStream.Move(CreateStream(block.DecompressedSize)); switch (compressType) { case BundleCompressType.LZMA: SevenZipHelper.DecompressLZMAStream(m_stream, block.CompressedSize, m_cachedBlockStream, block.DecompressedSize); break; case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: using (Lz4DecodeStream lzStream = new Lz4DecodeStream(m_stream, block.CompressedSize)) { lzStream.ReadBuffer(m_cachedBlockStream, block.DecompressedSize); } break; default: throw new NotImplementedException($"Bundle compression '{compressType}' isn't supported"); } blockStream = m_cachedBlockStream; } } // consider next offsets: // 1) block - if it is new stream then offset is 0, otherwise offset of this block in the bundle file // 2) entry - if this is first block for current entry then it is offset of this entry related to this block // otherwise 0 long blockSize = block.DecompressedSize - entryOffsetInsideBlock; blockStream.Position = blockStreamOffset + entryOffsetInsideBlock; entryOffsetInsideBlock = 0; long size = Math.Min(blockSize, left); blockStream.CopyStream(entryStream, size); blockIndex++; blockCompressedOffset += block.CompressedSize; left -= size; } if (left < 0) { throw new Exception($"Read more than expected"); } entryStream.Position = 0; return(entryStream.CreateReference()); } }
/// <summary> /// Read block infos and create uncompressed stream with corresponding data /// </summary> /// <param name="blockStream">Stream with block infos</param> /// <param name="blockPosition">BlockInfos position within block stream</param> /// <param name="dataStream">Stream with compressed data</param> /// <param name="dataPosition">CompressedData position within data stream</param> /// <returns>Uncompressed data stream</returns> private EndianStream ParseBundle6BlockInfo(EndianStream blockStream, ref long blockPosition, EndianStream dataStream, ref long dataPosition) { // dataStream and blockStream could be same stream. so we should handle this situation properly MemoryStream memStream = null; EndianStream resultStream = null; long read = 0; blockStream.BaseStream.Position = blockPosition; blockStream.BaseStream.Position += 0x10; int blockCount = blockStream.ReadInt32(); blockPosition = blockStream.BaseStream.Position; for (int i = 0; i < blockCount; i++) { // prepare block position blockStream.BaseStream.Position = blockPosition; int decompressSize = blockStream.ReadInt32(); int compressSize = blockStream.ReadInt32(); int flag = blockStream.ReadInt16(); blockPosition = blockStream.BaseStream.Position; dataStream.BaseStream.Position = dataPosition; BundleCompressType compressType = (BundleCompressType)(flag & 0x3F); if (i == 0) { if (compressType == BundleCompressType.None) { resultStream = dataStream; m_isNewDataStream = false; } else { memStream = new MemoryStream(); resultStream = new EndianStream(memStream, EndianType.BigEndian); m_isNewDataStream = true; } } else { if (compressType != BundleCompressType.None && !m_isNewDataStream) { // TODO: if first block is none compressed then we should create stream and copy all previous blocks into it // but for now just throw exception throw new NotImplementedException("None compression"); } } switch (compressType) { case BundleCompressType.None: if (m_isNewDataStream) { if (decompressSize != compressSize) { throw new Exception($"Compressed {compressSize} and decompressed {decompressSize} sizes differ"); } dataStream.BaseStream.CopyStream(memStream, compressSize); } break; case BundleCompressType.LZMA: SevenZipHelper.DecompressLZMAStream(dataStream.BaseStream, compressSize, memStream, decompressSize); break; case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: using (Lz4Stream lzStream = new Lz4Stream(dataStream.BaseStream, compressSize)) { lzStream.Read(memStream, decompressSize); } break; default: throw new NotImplementedException($"Bundle compression '{compressType}' isn't supported"); } dataPosition += compressSize; read += compressSize; if (m_isNewDataStream) { if (dataPosition != dataStream.BaseStream.Position) { throw new Exception($"Read data length is differ from compressed size for {i}th block"); } } } // update position acording to result stream if (m_isNewDataStream) { dataPosition = 0; } else { dataPosition -= read; } return(resultStream); }
private void ParseBundleFormat6(EndianStream stream, bool isPadding) { long bundleSize = stream.ReadInt64(); int compressSize = stream.ReadInt32(); int decompressSize = stream.ReadInt32(); int flag = stream.ReadInt32(); if (isPadding) { stream.BaseStream.Position++; } long blockPosition; long dataPosition; int isDataFirst = flag & 0x80; if (isDataFirst != 0) { blockPosition = stream.BaseStream.Length - compressSize; dataPosition = stream.BaseStream.Position; } else { blockPosition = stream.BaseStream.Position; dataPosition = stream.BaseStream.Position + compressSize; } stream.BaseStream.Position = blockPosition; BundleCompressType compressType = (BundleCompressType)(flag & 0x3F); switch (compressType) { case BundleCompressType.None: ParseBundle6BlockInfo(stream, ref blockPosition, stream, ref dataPosition); ParseBundle6Files(stream, ref blockPosition, stream, ref dataPosition); break; case BundleCompressType.LZMA: using (MemoryStream memStream = SevenZipHelper.DecompressLZMASSizeStream(stream.BaseStream, compressSize)) { using (EndianStream decBlockStream = new EndianStream(memStream, EndianType.BigEndian)) { // update position acerding to newly created stream blockPosition = 0; using (EndianStream decDataStream = ParseBundle6BlockInfo(decBlockStream, ref blockPosition, stream, ref dataPosition)) { ParseBundle6Files(decBlockStream, ref blockPosition, decDataStream, ref dataPosition); } } } break; case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: using (MemoryStream memStream = new MemoryStream(decompressSize)) { using (Lz4Stream lzStream = new Lz4Stream(stream.BaseStream, compressSize)) { lzStream.Read(memStream, decompressSize); } using (EndianStream decBlockStream = new EndianStream(memStream, EndianType.BigEndian)) { // update position acerding to newly created stream blockPosition = 0; using (EndianStream decDataStream = ParseBundle6BlockInfo(decBlockStream, ref blockPosition, stream, ref dataPosition)) { ParseBundle6Files(decBlockStream, ref blockPosition, decDataStream, ref dataPosition); } } } break; default: throw new NotImplementedException($"Bundle compression '{compressType}' isn't supported"); } }