public BundleFileData(BundleMetadata metadata) { if (metadata == null) { throw new ArgumentNullException(nameof(metadata)); } m_metadatas = new[] { metadata }; }
private void ReadPre530Metadata(EndianStream stream, bool isClosable) { switch (Header.Type) { case BundleType.UnityRaw: { if (Header.ChunkInfos.Count > 1) { throw new NotSupportedException($"Raw data with several chunks {Header.ChunkInfos.Count} isn't supported"); } BundleMetadata metadata = new BundleMetadata(stream.BaseStream, m_filePath, isClosable); metadata.ReadPre530(stream); Metadatas = new BundleMetadata[] { metadata }; } break; case BundleType.UnityWeb: case BundleType.HexFA: { BundleMetadata[] metadatas = new BundleMetadata[Header.ChunkInfos.Count]; for (int i = 0; i < Header.ChunkInfos.Count; i++) { ChunkInfo chunkInfo = Header.ChunkInfos[i]; MemoryStream memStream = new MemoryStream(new byte[chunkInfo.DecompressedSize]); SevenZipHelper.DecompressLZMASizeStream(stream.BaseStream, chunkInfo.CompressedSize, memStream); BundleMetadata metadata = new BundleMetadata(memStream, m_filePath, true); using (EndianStream decompressStream = new EndianStream(memStream, EndianType.BigEndian)) { metadata.ReadPre530(decompressStream); } metadatas[i] = metadata; } Metadatas = metadatas; if (isClosable) { stream.Dispose(); } } break; default: throw new NotSupportedException($"Bundle type {Header.Type} isn't supported before 530 generation"); } }
private void ReadPre530Metadata(EndianReader reader) { switch (Header.Type) { case BundleType.UnityRaw: { if (Header.ChunkInfos.Count > 1) { throw new NotSupportedException($"Raw data with several chunks {Header.ChunkInfos.Count} isn't supported"); } Metadata = new BundleMetadata(m_filePath); Metadata.ReadPre530(reader); } break; case BundleType.UnityWeb: case BundleType.HexFA: { // read only last chunk. wtf? ChunkInfo chunkInfo = Header.ChunkInfos[Header.ChunkInfos.Count - 1]; using (SmartStream stream = SmartStream.CreateMemory(new byte[chunkInfo.DecompressedSize])) { SevenZipHelper.DecompressLZMASizeStream(reader.BaseStream, chunkInfo.CompressedSize, stream); Metadata = new BundleMetadata(m_filePath); using (EndianReader decompressReader = new EndianReader(stream, EndianType.BigEndian)) { Metadata.ReadPre530(decompressReader); } } } break; default: throw new NotSupportedException($"Bundle type {Header.Type} isn't supported before 530 generation"); } }
private void Read530Blocks(EndianStream stream, bool isClosable, BlockInfo[] blockInfos, BundleMetadata metadata) { // Special case. If bundle has no compressed blocks then pass it as is if (blockInfos.All(t => t.Flags.GetCompression() == BundleCompressType.None)) { Metadatas = new BundleMetadata[] { metadata }; return; } long dataPosisition = stream.BaseStream.Position; long decompressedSize = blockInfos.Sum(t => t.DecompressedSize); Stream bufferStream; if (decompressedSize > int.MaxValue) { string tempFile = Path.GetTempFileName(); bufferStream = new FileStream(tempFile, FileMode.Open, FileAccess.ReadWrite, FileShare.None, 4096, FileOptions.DeleteOnClose); } else { bufferStream = new MemoryStream((int)decompressedSize); } foreach (BlockInfo blockInfo in blockInfos) { BundleCompressType compressType = blockInfo.Flags.GetCompression(); switch (compressType) { case BundleCompressType.None: stream.BaseStream.CopyStream(bufferStream, blockInfo.DecompressedSize); break; case BundleCompressType.LZMA: SevenZipHelper.DecompressLZMAStream(stream.BaseStream, blockInfo.CompressedSize, bufferStream, blockInfo.DecompressedSize); break; case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: using (Lz4Stream lzStream = new Lz4Stream(stream.BaseStream, blockInfo.CompressedSize)) { long read = lzStream.Read(bufferStream, blockInfo.DecompressedSize); if (read != blockInfo.DecompressedSize) { throw new Exception($"Read {read} but expected {blockInfo.CompressedSize}"); } } break; default: throw new NotImplementedException($"Bundle compression '{compressType}' isn't supported"); } } if (isClosable) { stream.Dispose(); } BundleFileEntry[] entries = new BundleFileEntry[metadata.Entries.Count]; for (int i = 0; i < metadata.Entries.Count; i++) { BundleFileEntry bundleEntry = metadata.Entries[i]; string name = bundleEntry.Name; long offset = bundleEntry.Offset - dataPosisition; long size = bundleEntry.Size; BundleFileEntry streamEntry = new BundleFileEntry(bufferStream, m_filePath, name, offset, size, true); entries[i] = streamEntry; } BundleMetadata streamMetadata = new BundleMetadata(bufferStream, m_filePath, false, entries); Metadatas = new BundleMetadata[] { streamMetadata }; }
private void Read530Metadata(EndianStream stream, bool isClosable, long basePosition) { long dataPosition = stream.BaseStream.Position; if (Header.Flags.IsMetadataAtTheEnd()) { stream.BaseStream.Position = basePosition + Header.BundleSize - Header.MetadataCompressedSize; } else { dataPosition += Header.MetadataCompressedSize; } BlockInfo[] blockInfos; BundleMetadata metadata; BundleCompressType metaCompress = Header.Flags.GetCompression(); switch (metaCompress) { case BundleCompressType.None: { long metaPosition = stream.BaseStream.Position; // unknown 0x10 stream.BaseStream.Position += 0x10; blockInfos = stream.ReadArray <BlockInfo>(); metadata = new BundleMetadata(stream.BaseStream, m_filePath, isClosable); metadata.Read530(stream, dataPosition); if (stream.BaseStream.Position != metaPosition + Header.MetadataDecompressedSize) { throw new Exception($"Read {stream.BaseStream.Position - metaPosition} but expected {Header.MetadataDecompressedSize}"); } break; } case BundleCompressType.LZMA: { using (MemoryStream memStream = new MemoryStream(Header.MetadataDecompressedSize)) { SevenZipHelper.DecompressLZMASizeStream(stream.BaseStream, Header.MetadataCompressedSize, memStream); memStream.Position = 0; using (EndianStream metadataStream = new EndianStream(memStream, EndianType.BigEndian)) { // unknown 0x10 metadataStream.BaseStream.Position += 0x10; blockInfos = metadataStream.ReadArray <BlockInfo>(); metadata = new BundleMetadata(stream.BaseStream, m_filePath, isClosable); metadata.Read530(metadataStream, dataPosition); if (memStream.Position != memStream.Length) { throw new Exception($"Read {memStream.Position} but expected {memStream.Length}"); } } } break; } case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: { using (MemoryStream memStream = new MemoryStream(Header.MetadataDecompressedSize)) { using (Lz4Stream lzStream = new Lz4Stream(stream.BaseStream, Header.MetadataCompressedSize)) { long read = lzStream.Read(memStream, Header.MetadataDecompressedSize); memStream.Position = 0; if (read != Header.MetadataDecompressedSize) { throw new Exception($"Read {read} but expected {Header.MetadataDecompressedSize}"); } } using (EndianStream metadataStream = new EndianStream(memStream, EndianType.BigEndian)) { // unknown 0x10 metadataStream.BaseStream.Position += 0x10; blockInfos = metadataStream.ReadArray <BlockInfo>(); metadata = new BundleMetadata(stream.BaseStream, m_filePath, isClosable); metadata.Read530(metadataStream, dataPosition); if (memStream.Position != memStream.Length) { throw new Exception($"Read {memStream.Position} but expected {memStream.Length}"); } } } break; } default: throw new NotSupportedException($"Bundle compression '{metaCompress}' isn't supported"); } stream.BaseStream.Position = dataPosition; Read530Blocks(stream, isClosable, blockInfos, metadata); }
private void Read530Blocks(EndianStream stream, bool isClosable, BlockInfo[] blockInfos, BundleMetadata metadata) { // Special case. If bundle has no compressed blocks then pass it as a stream if (blockInfos.All(t => t.Flags.GetCompression() == BundleCompressType.None)) { Metadatas = new BundleMetadata[] { metadata }; } long dataPosisition = stream.BaseStream.Position; long decompressedSize = blockInfos.Sum(t => t.DecompressedSize); if (decompressedSize > int.MaxValue) { throw new Exception("How to read such big data? Save to file and then read?"); } MemoryStream memStream = new MemoryStream((int)decompressedSize); foreach (BlockInfo blockInfo in blockInfos) { BundleCompressType compressType = blockInfo.Flags.GetCompression(); switch (compressType) { case BundleCompressType.None: stream.BaseStream.CopyStream(memStream, blockInfo.DecompressedSize); break; case BundleCompressType.LZMA: SevenZipHelper.DecompressLZMAStream(stream.BaseStream, blockInfo.CompressedSize, memStream, blockInfo.DecompressedSize); break; case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: using (Lz4Stream lzStream = new Lz4Stream(stream.BaseStream, blockInfo.CompressedSize)) { long read = lzStream.Read(memStream, blockInfo.DecompressedSize); if (read != blockInfo.DecompressedSize) { throw new Exception($"Read {read} but expected {blockInfo.CompressedSize}"); } } break; default: throw new NotImplementedException($"Bundle compression '{compressType}' isn't supported"); } } if (isClosable) { stream.Dispose(); } BundleFileEntry[] entries = new BundleFileEntry[metadata.Entries.Count]; for (int i = 0; i < metadata.Entries.Count; i++) { BundleFileEntry bundleEntry = metadata.Entries[i]; string name = bundleEntry.Name; long offset = bundleEntry.Offset - dataPosisition; long size = bundleEntry.Size; BundleFileEntry streamEntry = new BundleFileEntry(memStream, m_filePath, name, offset, size); entries[i] = streamEntry; } BundleMetadata streamMetadata = new BundleMetadata(memStream, m_filePath, true, entries); Metadatas = new BundleMetadata[] { streamMetadata }; }
private void Read530Blocks(SmartStream bundleStream, BlockInfo[] blockInfos) { int cachedBlock = -1; long dataOffset = bundleStream.Position; BundleFileEntry[] newEntries = new BundleFileEntry[Metadata.Entries.Count]; using (SmartStream blockStream = SmartStream.CreateNull()) { for (int ei = 0; ei < Metadata.Entries.Count; ei++) { BundleFileEntry entry = Metadata.Entries[ei]; // find block corresponding to current entry int blockIndex = 0; long compressedOffset = 0; long decompressedOffset = 0; while (true) { BlockInfo block = blockInfos[blockIndex]; if (decompressedOffset + block.DecompressedSize > entry.Offset) { break; } blockIndex++; compressedOffset += block.CompressedSize; decompressedOffset += block.DecompressedSize; } // check does this entry use any compressed blocks long entrySize = 0; bool isCompressed = false; for (int bi = blockIndex; entrySize < entry.Size; bi++) { BlockInfo block = blockInfos[bi]; entrySize += block.DecompressedSize; if (block.Flags.GetCompression() != BundleCompressType.None) { isCompressed = true; break; } } if (isCompressed) { // well, at leat one block is compressed so we should copy data of current entry to separate stream using (SmartStream entryStream = CreateStream(entry.Size)) { long left = entry.Size; long entryOffset = entry.Offset - decompressedOffset; bundleStream.Position = dataOffset + compressedOffset; // copy data of all blocks used by current entry to created stream for (int bi = blockIndex; left > 0; bi++) { long blockOffset = 0; BlockInfo block = blockInfos[bi]; if (cachedBlock == bi) { // some data of previous entry is in the same block as this one // so we don't need to unpack it once again but can use cached stream bundleStream.Position += block.CompressedSize; } else { BundleCompressType compressType = block.Flags.GetCompression(); switch (compressType) { case BundleCompressType.None: blockOffset = dataOffset + compressedOffset; blockStream.Assign(bundleStream); break; case BundleCompressType.LZMA: blockStream.Move(CreateStream(block.DecompressedSize)); SevenZipHelper.DecompressLZMAStream(bundleStream, block.CompressedSize, blockStream, block.DecompressedSize); break; case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: blockStream.Move(CreateStream(block.DecompressedSize)); using (Lz4Stream lzStream = new Lz4Stream(bundleStream, block.CompressedSize)) { long read = lzStream.Read(blockStream, block.DecompressedSize); if (read != block.DecompressedSize) { throw new Exception($"Read {read} but expected {block.CompressedSize}"); } } break; default: throw new NotImplementedException($"Bundle compression '{compressType}' isn't supported"); } cachedBlock = bi; } // consider next offsets: // 1) block - if it is new stream then offset is 0, otherwise offset of this block in bundle file // 2) entry - if this is first block for current entry then it is offset of this entry related to this block // otherwise 0 long fragmentSize = block.DecompressedSize - entryOffset; blockStream.Position = blockOffset + entryOffset; entryOffset = 0; long size = Math.Min(fragmentSize, left); blockStream.CopyStream(entryStream, size); compressedOffset += block.CompressedSize; left -= size; } if (left < 0) { throw new Exception($"Read more than expected"); } newEntries[ei] = new BundleFileEntry(entryStream, entry.FilePath, entry.Name, 0, entry.Size); } } else { // no compressed blocks was found so we can use original bundle stream newEntries[ei] = new BundleFileEntry(entry, dataOffset + entry.Offset); } } } Metadata.Dispose(); Metadata = new BundleMetadata(m_filePath, newEntries); }
private void Read530Metadata(EndianReader reader, long basePosition) { SmartStream bundleStream = (SmartStream)reader.BaseStream; long dataPosition = bundleStream.Position; if (Header.Flags.IsMetadataAtTheEnd()) { bundleStream.Position = basePosition + Header.BundleSize - Header.MetadataCompressedSize; } else { dataPosition += Header.MetadataCompressedSize; } BlockInfo[] blockInfos; BundleCompressType metaCompression = Header.Flags.GetCompression(); switch (metaCompression) { case BundleCompressType.None: { long metaPosition = bundleStream.Position; // unknown 0x10 bundleStream.Position += 0x10; blockInfos = reader.ReadArray <BlockInfo>(); Metadata = new BundleMetadata(m_filePath); Metadata.Read530(reader, bundleStream); if (bundleStream.Position != metaPosition + Header.MetadataDecompressedSize) { throw new Exception($"Read {bundleStream.Position - metaPosition} but expected {Header.MetadataDecompressedSize}"); } break; } case BundleCompressType.LZMA: { using (MemoryStream metaStream = new MemoryStream(new byte[Header.MetadataDecompressedSize])) { SevenZipHelper.DecompressLZMASizeStream(bundleStream, Header.MetadataCompressedSize, metaStream); using (EndianReader metaReader = new EndianReader(metaStream, EndianType.BigEndian)) { // unknown 0x10 metaReader.BaseStream.Position += 0x10; blockInfos = metaReader.ReadArray <BlockInfo>(); Metadata = new BundleMetadata(m_filePath); Metadata.Read530(metaReader, bundleStream); } if (metaStream.Position != metaStream.Length) { throw new Exception($"Read {metaStream.Position} but expected {metaStream.Length}"); } } break; } case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: { using (MemoryStream metaStream = new MemoryStream(new byte[Header.MetadataDecompressedSize])) { using (Lz4Stream lzStream = new Lz4Stream(bundleStream, Header.MetadataCompressedSize)) { long read = lzStream.Read(metaStream, Header.MetadataDecompressedSize); metaStream.Position = 0; if (read != Header.MetadataDecompressedSize) { throw new Exception($"Read {read} but expected {Header.MetadataDecompressedSize}"); } } using (EndianReader metaReader = new EndianReader(metaStream, EndianType.BigEndian)) { // unknown 0x10 metaReader.BaseStream.Position += 0x10; blockInfos = metaReader.ReadArray <BlockInfo>(); Metadata = new BundleMetadata(m_filePath); Metadata.Read530(metaReader, bundleStream); } if (metaStream.Position != metaStream.Length) { throw new Exception($"Read {metaStream.Position} but expected {metaStream.Length}"); } } break; } default: throw new NotSupportedException($"Bundle compression '{metaCompression}' isn't supported"); } bundleStream.Position = dataPosition; Read530Blocks(bundleStream, blockInfos); }