internal static WebFileScheme ReadScheme(SmartStream stream, long offset, long size, string filePath, string fileName) { WebFileScheme scheme = new WebFileScheme(stream, offset, size, filePath, fileName); scheme.ReadScheme(); scheme.ProcessEntries(); return(scheme); }
private SerializedFileScheme(SmartStream stream, long offset, long size, string filePath, string fileName, TransferInstructionFlags flags) : base(stream, offset, size, filePath, fileName) { Flags = flags; Header = new SerializedFileHeader(Name); Metadata = new SerializedFileMetadata(Name); }
private SmartStream ReadBrotli(EndianReader reader) { using (SmartStream stream = SmartStream.CreateMemory()) { using (BrotliInputStream brotliStream = new BrotliInputStream(reader.BaseStream)) { brotliStream.CopyTo(stream); } return(stream.CreateReference()); } }
private SmartStream ReadGZip(EndianReader reader) { using (SmartStream stream = SmartStream.CreateMemory()) { using (GZipStream gzipStream = new GZipStream(reader.BaseStream, CompressionMode.Decompress)) { gzipStream.CopyTo(stream); } return(stream.CreateReference()); } }
public static SerializedFileScheme LoadScheme(string filePath, string fileName, TransferInstructionFlags flags) { if (!MultiFileStream.Exists(filePath)) { throw new Exception($"Serialized file at path '{filePath}' doesn't exist"); } using (SmartStream fileStream = SmartStream.OpenRead(filePath)) { return(ReadScheme(fileStream, 0, fileStream.Length, filePath, fileName, flags)); } }
public static ResourceFileScheme LoadScheme(string filePath, string fileName) { if (!FileMultiStream.Exists(filePath)) { throw new Exception($"Resource file at path '{filePath}' doesn't exist"); } using (SmartStream stream = SmartStream.OpenRead(filePath)) { return(ReadScheme(stream, 0, stream.Length, filePath, fileName)); } }
public FileScheme(SmartStream stream, string filePath) { if (stream == null) { throw new ArgumentNullException(nameof(stream)); } if (string.IsNullOrEmpty(filePath)) { throw new ArgumentNullException(nameof(filePath)); } m_stream = stream; FilePath = filePath; }
public static BundleFileScheme LoadScheme(string filePath) { if (!FileUtils.Exists(filePath)) { throw new Exception($"Bundle file at path '{filePath}' doesn't exist"); } string fileName = Path.GetFileNameWithoutExtension(filePath); using (SmartStream stream = SmartStream.OpenRead(filePath)) { return(ReadScheme(stream, 0, stream.Length, filePath, fileName)); } }
private SerializedFileScheme(SmartStream stream, string filePath, string fileName) : base(filePath, fileName) { if (stream.Length <= int.MaxValue) { byte[] buffer = new byte[stream.Length]; stream.ReadBuffer(buffer, 0, buffer.Length); Stream = new MemoryStream(buffer, 0, buffer.Length, false); } else { Stream = stream.CreateReference(); } }
public FileScheme(SmartStream stream, long offset, long size, string filePath, string fileName) { if (stream == null) { throw new ArgumentNullException(nameof(stream)); } if (string.IsNullOrEmpty(filePath)) { throw new ArgumentNullException(nameof(filePath)); } m_stream = stream.CreateReference(); m_offset = offset; m_size = size; FilePath = filePath; NameOrigin = fileName; Name = FilenameUtils.FixFileIdentifier(fileName); }
private void Read530Data(Stream stream, long headerSize) { if (Header.Flags.IsMetadataAtTheEnd()) { stream.Position = headerSize; } using (BundleFileBlockReader blockReader = new BundleFileBlockReader(stream, Metadata)) { foreach (BundleFileEntry entry in Metadata.Entries) { SmartStream entryStream = blockReader.ReadEntry(entry); FileScheme scheme = GameCollection.ReadScheme(entryStream, FilePath, entry.NameOrigin); AddScheme(scheme); } } }
private void ReadFileStreamData(Stream stream, long basePosition, long headerSize) { if (Header.FileStream.Flags.IsBlocksInfoAtTheEnd()) { stream.Position = basePosition + headerSize; } using (BundleFileBlockReader blockReader = new BundleFileBlockReader(stream, Metadata.BlocksInfo)) { foreach (Node entry in Metadata.DirectoryInfo.Nodes) { SmartStream entryStream = blockReader.ReadEntry(entry); FileScheme scheme = GameCollection.ReadScheme(entryStream, FilePath, entry.PathOrigin); AddScheme(scheme); } } }
public void ReadResourcesFile(FileCollection collection) { if (m_stream.StreamType == SmartStreamType.Memory && m_stream.Length > m_size) { // create separate stream so resouce file don't refer to huge memory blob and this blob will be released using (SmartStream resStream = SmartStream.CreateMemory(new byte[m_size])) { using (PartialStream partStream = new PartialStream(m_stream, m_offset, m_size)) { partStream.CopyTo(resStream); } collection.ReadResourceFile(resStream, FilePath, Name, 0, m_size); } } else { collection.ReadResourceFile(m_stream, FilePath, Name, m_offset, m_size); } }
public void Read(SmartStream stream, string filePath) { if (BundleFile.IsBundleFile(stream)) { ReadBundleFile(stream, filePath); } else if (ArchiveFile.IsArchiveFile(stream)) { ReadArchiveFile(stream, filePath); } else if (WebFile.IsWebFile(stream)) { ReadWebFile(stream, filePath); } else { string fileName = Path.GetFileName(filePath); ReadSerializedFile(stream, filePath, fileName, OnRequestDependency); } }
public static FileScheme ReadScheme(SmartStream stream, string filePath, string fileName) { if (BundleFile.IsBundleFile(stream)) { return(BundleFile.ReadScheme(stream, filePath, fileName)); } if (ArchiveFile.IsArchiveFile(stream)) { return(ArchiveFile.ReadScheme(stream, filePath, fileName)); } if (WebFile.IsWebFile(stream)) { return(WebFile.ReadScheme(stream, filePath)); } if (SerializedFile.IsSerializedFile(stream)) { return(SerializedFile.ReadScheme(stream, filePath, fileName)); } return(ResourceFile.ReadScheme(stream, filePath, fileName)); }
internal ResourceFile(SmartStream stream, long offset, long size, string filePath, string fileName) { if (stream == null) { throw new ArgumentNullException(nameof(stream)); } if (string.IsNullOrEmpty(filePath)) { throw new ArgumentNullException(nameof(filePath)); } if (string.IsNullOrEmpty(fileName)) { throw new ArgumentNullException(nameof(fileName)); } FilePath = filePath; Name = fileName; m_stream = stream.CreateReference(); Offset = offset; Size = size; }
private void ReadPre530Blocks(SmartStream dataStream) { long baseOffset; switch (Header.Type) { case BundleType.UnityRaw: baseOffset = Header.HeaderSize; break; case BundleType.UnityWeb: baseOffset = 0; break; default: throw new NotSupportedException($"Bundle type {Header.Type} isn't supported before 530 generation"); } foreach (BundleFileEntry entry in Metadata.Entries.Values) { FileEntryOffset feOffset = new FileEntryOffset(dataStream.CreateReference(), baseOffset + entry.Offset); m_entryStreams.Add(entry, feOffset); } }
public static FileScheme ReadScheme(SmartStream stream, long offset, long size, string filePath, string fileName) { if (BundleFile.IsBundleFile(stream, offset, size)) { return(BundleFile.ReadScheme(stream, offset, size, filePath, fileName)); } if (ArchiveFile.IsArchiveFile(stream, offset, size)) { return(ArchiveFile.ReadScheme(stream, offset, size, filePath, fileName)); } if (WebFile.IsWebFile(stream, offset, size)) { return(WebFile.ReadScheme(stream, offset, size, filePath, fileName)); } if (ResourceFile.IsDefaultResourceFile(fileName)) { return(ResourceFile.ReadScheme(stream, offset, size, filePath, fileName)); } if (SerializedFile.IsSerializedFile(stream, offset, size)) { return(SerializedFile.ReadScheme(stream, offset, size, filePath, fileName)); } return(ResourceFile.ReadScheme(stream, offset, size, filePath, fileName)); }
public static BundleFileScheme ReadScheme(SmartStream stream, long offset, long size, string filePath, string fileName) { return(BundleFileScheme.ReadScheme(stream, offset, size, filePath, fileName)); }
public static WebFileScheme ReadScheme(SmartStream stream, string filePath) { return(WebFileScheme.ReadScheme(stream, filePath)); }
protected FileSchemeList(SmartStream stream, long offset, long size, string filePath, string fileName) : base(stream, offset, size, filePath, fileName) { }
internal void ReadResourceFile(SmartStream stream, string filePath, string fileName, long offset, long size) { ResourcesFile resource = new ResourcesFile(stream, filePath, fileName, offset, size); AddResourceFile(resource); }
public static SerializedFileScheme ReadScheme(SmartStream stream, long offset, long size, string filePath, string fileName) { return(SerializedFileScheme.ReadSceme(stream, offset, size, filePath, fileName)); }
public static SerializedFileScheme ReadScheme(SmartStream stream, long offset, long size, string filePath, string fileName, TransferInstructionFlags flags) { return(SerializedFileScheme.ReadSceme(stream, offset, size, filePath, fileName, flags)); }
public FileEntryOffset(SmartStream stream, long baseOffset) { Stream = stream; Offset = baseOffset; }
private WebFileScheme(SmartStream stream, long offset, long size, string filePath, string fileName) : base(stream, offset, size, filePath, fileName) { }
internal static SerializedFileScheme ReadSceme(SmartStream stream, long offset, long size, string filePath, string fileName) { return(ReadSceme(stream, offset, size, filePath, fileName, DefaultFlags)); }
private void Read530Blocks(SmartStream dataStream, long headerSize) { if (Header.Flags.IsMetadataAtTheEnd()) { dataStream.Position = headerSize; } int cachedBlock = -1; long dataOffset = dataStream.Position; // If MemoryStream has compressed block then we need to create individual streams for each entry and copy its data into it bool createIndividualStreams = dataStream.StreamType == SmartStreamType.Memory; if (createIndividualStreams) { // find out if this bundle file has compressed blocks foreach (BlockInfo block in Metadata.BlockInfos) { if (block.Flags.GetCompression() != BundleCompressType.None) { createIndividualStreams = true; break; } } } using (SmartStream blockStream = SmartStream.CreateNull()) { foreach (BundleFileEntry entry in Metadata.Entries.Values) { // find out block offsets long blockCompressedOffset = 0; long blockDecompressedOffset = 0; int blockIndex = 0; while (blockDecompressedOffset + Metadata.BlockInfos[blockIndex].DecompressedSize <= entry.Offset) { blockCompressedOffset += Metadata.BlockInfos[blockIndex].CompressedSize; blockDecompressedOffset += Metadata.BlockInfos[blockIndex].DecompressedSize; blockIndex++; } // if at least one block of this entry is compressed or acording to the rule above // we should copy the data of current entry to a separate stream bool needToCopy = createIndividualStreams; if (!needToCopy) { // check if this entry has compressed blocks long entrySize = 0; for (int bi = blockIndex; entrySize < entry.Size; bi++) { if (Metadata.BlockInfos[bi].Flags.GetCompression() != BundleCompressType.None) { // it does. then we need to create individual stream and decomress its data into it needToCopy = true; break; } entrySize += Metadata.BlockInfos[bi].DecompressedSize; } } long entryOffsetInsideBlock = entry.Offset - blockDecompressedOffset; if (needToCopy) { // well, at leat one block is compressed so we should copy data of current entry to a separate stream using (SmartStream entryStream = CreateStream(entry.Size)) { long left = entry.Size; dataStream.Position = dataOffset + blockCompressedOffset; // copy data of all blocks used by current entry to new stream for (int bi = blockIndex; left > 0; bi++) { long blockOffset = 0; BlockInfo block = Metadata.BlockInfos[bi]; if (cachedBlock == bi) { // some data of previous entry is in the same block as this one // so we don't need to unpack it once again. Instead we can use cached stream dataStream.Position += block.CompressedSize; } else { BundleCompressType compressType = block.Flags.GetCompression(); switch (compressType) { case BundleCompressType.None: blockOffset = dataOffset + blockCompressedOffset; blockStream.Assign(dataStream); break; case BundleCompressType.LZMA: blockStream.Move(CreateStream(block.DecompressedSize)); SevenZipHelper.DecompressLZMAStream(dataStream, block.CompressedSize, blockStream, block.DecompressedSize); break; case BundleCompressType.LZ4: case BundleCompressType.LZ4HZ: blockStream.Move(CreateStream(block.DecompressedSize)); using (Lz4DecodeStream lzStream = new Lz4DecodeStream(dataStream, block.CompressedSize)) { long read = lzStream.Read(blockStream, block.DecompressedSize); if (read != block.DecompressedSize) { throw new Exception($"Read {read} but expected {block.DecompressedSize}"); } if (lzStream.IsDataLeft) { throw new Exception($"LZ4 stream still has some data"); } } break; default: throw new NotImplementedException($"Bundle compression '{compressType}' isn't supported"); } cachedBlock = bi; } // consider next offsets: // 1) block - if it is new stream then offset is 0, otherwise offset of this block in the bundle file // 2) entry - if this is first block for current entry then it is offset of this entry related to this block // otherwise 0 long fragmentSize = block.DecompressedSize - entryOffsetInsideBlock; blockStream.Position = blockOffset + entryOffsetInsideBlock; entryOffsetInsideBlock = 0; long size = Math.Min(fragmentSize, left); blockStream.CopyStream(entryStream, size); blockCompressedOffset += block.CompressedSize; left -= size; } if (left < 0) { throw new Exception($"Read more than expected"); } FileEntryOffset feOffset = new FileEntryOffset(entryStream.CreateReference(), 0); m_entryStreams.Add(entry, feOffset); } } else { // no compressed blocks was found so we can use original bundle stream // since FileEntry.Offset contains decompressedOffset we need to preliminarily subtract it FileEntryOffset feOffset = new FileEntryOffset(dataStream.CreateReference(), dataOffset + blockCompressedOffset + entryOffsetInsideBlock); m_entryStreams.Add(entry, feOffset); } } } }
public static SerializedFileScheme ReadScheme(SmartStream stream, string filePath, string fileName) { return(SerializedFileScheme.ReadSceme(stream, filePath, fileName)); }
private SmartStream CreateStream(long decompressedSize) { return(decompressedSize > int.MaxValue ? SmartStream.CreateTemp() : SmartStream.CreateMemory(new byte[decompressedSize])); }