public bool Read(AssetsFileReader reader) { header = new ClassDatabaseFileHeader(); header.Read(reader); if (header.header != "cldb" || header.fileVersion > 4 || header.fileVersion < 1) { valid = false; return(valid); } classes = new List <ClassDatabaseType>(); long classTablePos = reader.Position; AssetsFileReader newReader = reader; if (header.compressionType != 0) { classTablePos = 0; MemoryStream ms; if (header.compressionType == 1) //lz4 { byte[] uncompressedBytes = new byte[header.uncompressedSize]; using (MemoryStream tempMs = new MemoryStream(reader.ReadBytes((int)header.compressedSize))) { Lz4DecoderStream decoder = new Lz4DecoderStream(tempMs); decoder.Read(uncompressedBytes, 0, (int)header.uncompressedSize); decoder.Dispose(); } ms = new MemoryStream(uncompressedBytes); } else if (header.compressionType == 2) //lzma { using (MemoryStream tempMs = new MemoryStream(reader.ReadBytes((int)header.compressedSize))) { ms = SevenZipHelper.StreamDecompress(tempMs); } } else { valid = false; return(valid); } newReader = new AssetsFileReader(ms); newReader.bigEndian = false; } newReader.Position = header.stringTablePos; stringTable = newReader.ReadBytes((int)header.stringTableLen); newReader.Position = classTablePos; uint size = newReader.ReadUInt32(); for (int i = 0; i < size; i++) { ClassDatabaseType cdt = new ClassDatabaseType(); cdt.Read(newReader, header.fileVersion, header.flags); classes.Add(cdt); } valid = true; return(valid); }
public void Read(AssetsFileReader reader) { header = new ClassDatabasePackageHeader(); header.Read(reader); if ((header.compressionType & 0x20) == 0x00) { throw new NotImplementedException("Please uncompress the package before loading."); } files = new ClassDatabaseFile[header.fileCount]; long firstFile = reader.Position; for (int i = 0; i < header.fileCount; i++) { reader.Position = firstFile + header.files[i].offset; byte[] data = reader.ReadBytes((int)header.files[i].length); using (MemoryStream ms = new MemoryStream(data)) using (AssetsFileReader r = new AssetsFileReader(ms)) { files[i] = new ClassDatabaseFile(); files[i].Read(r); } } stringTable = reader.ReadBytes((int)header.stringTableLenUncompressed); for (int i = 0; i < header.fileCount; i++) { files[i].stringTable = stringTable; } }
public void Read(uint version, AssetsFileReader reader) { if (version >= 0x0E) { index = reader.ReadInt64(); } else { index = reader.ReadUInt32(); } curFileOffset = reader.ReadUInt32(); curFileSize = reader.ReadUInt32(); curFileTypeOrIndex = reader.ReadInt32(); if (version < 0x10) { inheritedUnityClass = reader.ReadUInt16(); } if (version <= 0x10) { scriptIndex = reader.ReadUInt16(); } if (0x0F <= version && version <= 0x10) { unknown1 = reader.ReadByte(); reader.ReadBytes(3); } }
public bool Read(AssetsFileReader reader) { header = new ClassDatabaseFileHeader(); header.Read(reader, 0); if (header.header != "cldb" || !(header.fileVersion == 3 || header.fileVersion == 1) || header.compressionType != 0) { valid = false; return(valid); } classes = new List <ClassDatabaseType>(); ulong classTablePos = reader.Position; reader.BaseStream.Position = header.stringTablePos; stringTable = reader.ReadBytes((int)header.stringTableLen); reader.Position = classTablePos; uint size = reader.ReadUInt32(); for (int i = 0; i < size; i++) { ClassDatabaseType cdt = new ClassDatabaseType(); cdt.Read(reader, reader.Position, header.fileVersion); classes.Add(cdt); } valid = true; return(valid); }
public ulong Read(uint version, ulong pos, AssetsFileReader reader, bool bigEndian) { if (version >= 0x0E) { index = reader.ReadUInt64(); } else { index = reader.ReadUInt32(); } offs_curFile = reader.ReadUInt32(); curFileSize = reader.ReadUInt32(); curFileTypeOrIndex = reader.ReadUInt32(); if (version < 0x10) { inheritedUnityClass = reader.ReadUInt16(); } if (version <= 0x10) { scriptIndex = reader.ReadUInt16(); } if (0x0F <= version && version <= 0x10) { unknown1 = reader.ReadByte(); reader.ReadBytes(3); } return(reader.Position); }
public byte[] unknown; //0x11, for format >= 9 ///public uint GetSizeBytes(); public void Read(AssetsFileReader reader) { metadataSize = reader.ReadUInt32(); fileSize = reader.ReadUInt32(); format = reader.ReadUInt32(); firstFileOffset = reader.ReadUInt32(); endianness = reader.ReadByte(); reader.bigEndian = endianness == 1 ? true : false; unknown = reader.ReadBytes(3); }
public byte[] unknown; //0x11, for format >= 9 ///public uint GetSizeBytes(); public ulong Read(ulong absFilePos, AssetsFileReader reader) { metadataSize = reader.ReadUInt32(); fileSize = reader.ReadUInt32(); format = reader.ReadUInt32(); offs_firstFile = reader.ReadUInt32(); endianness = reader.ReadByte(); reader.bigEndian = endianness == 1 ? true : false; unknown = reader.ReadBytes(3); return(reader.Position); }
public void Read(AssetsFileReader reader) { metadataSize = reader.ReadUInt32(); fileSize = reader.ReadUInt32(); format = reader.ReadUInt32(); firstFileOffset = reader.ReadUInt32(); endianness = reader.ReadByte(); //todo "fileSize - metadataSize" for v<9 but I have no files to test on reader.bigEndian = endianness == 1 ? true : false; unknown = reader.ReadBytes(3); reader.Align(); }
public void Read(bool hasTypeTree, AssetsFileReader reader, uint version) { classId = reader.ReadInt32(); if (version >= 0x10) { unknown16_1 = reader.ReadByte(); } if (version >= 0x11) { scriptIndex = reader.ReadUInt16(); } else { scriptIndex = 0xffff; } if ((version < 0x11 && classId < 0) || (version >= 0x11 && classId == 0x72)) { scriptHash1 = reader.ReadUInt32(); scriptHash2 = reader.ReadUInt32(); scriptHash3 = reader.ReadUInt32(); scriptHash4 = reader.ReadUInt32(); } typeHash1 = reader.ReadUInt32(); typeHash2 = reader.ReadUInt32(); typeHash3 = reader.ReadUInt32(); typeHash4 = reader.ReadUInt32(); if (hasTypeTree) { typeFieldsExCount = reader.ReadUInt32(); stringTableLen = reader.ReadUInt32(); typeFieldsEx = new TypeField_0D[typeFieldsExCount]; for (int i = 0; i < typeFieldsExCount; i++) { TypeField_0D typefield0d = new TypeField_0D(); typefield0d.Read(reader, version); typeFieldsEx[i] = typefield0d; } stringTable = Encoding.UTF8.GetString(reader.ReadBytes((int)stringTableLen)); if (version >= 0x15) { dependenciesCount = reader.ReadInt32(); dependencies = new int[dependenciesCount]; for (int i = 0; i < dependenciesCount; i++) { dependencies[i] = reader.ReadInt32(); } } } }
public void Read(AssetsFileReader reader) { metadataSize = reader.ReadUInt32(); fileSize = reader.ReadUInt32(); format = reader.ReadUInt32(); firstFileOffset = reader.ReadUInt32(); endianness = reader.ReadByte(); //todo "fileSize - metadataSize" for v<9 but I have no files to test on unknown = reader.ReadBytes(3); reader.Align(); if (format >= 0x16) { metadataSize = reader.ReadUInt32(); fileSize = reader.ReadInt64(); firstFileOffset = reader.ReadInt64(); reader.Position += 8; } reader.bigEndian = endianness == 1; }
public void Read(AssetsFileReader reader, uint format) { version = reader.ReadUInt16(); depth = reader.ReadByte(); isArray = reader.ReadByte(); typeStringOffset = reader.ReadUInt32(); nameStringOffset = reader.ReadUInt32(); size = reader.ReadInt32(); index = reader.ReadUInt32(); flags = reader.ReadUInt32(); if (format >= 0x12) { unknown = reader.ReadBytes(8); } else { unknown = new byte[0]; } }
public byte[] unknown; //0x18 public ulong Read(ulong absFilePos, AssetsFileReader reader, uint format, bool bigEndian) { version = reader.ReadUInt16(); depth = reader.ReadByte(); isArray = reader.ReadByte(); typeStringOffset = reader.ReadUInt32(); nameStringOffset = reader.ReadUInt32(); size = reader.ReadUInt32(); index = reader.ReadUInt32(); flags = reader.ReadUInt32(); if (format >= 0x12) { unknown = reader.ReadBytes(8); } else { unknown = new byte[0]; } return(reader.Position); }
public void Read(AssetsFileReader reader) { metadataSize = reader.ReadUInt32(); fileSize = reader.ReadUInt32(); format = reader.ReadUInt32(); firstFileOffset = reader.ReadUInt32(); if (format < 9) { endianness = reader.ReadByte(); } else { endianness = 0; } reader.bigEndian = endianness == 1 ? true : false; if (format >= 9) { unknown = reader.ReadBytes(3); } reader.Align(); }
public void Read(AssetsFileReader reader) { metadataSize = reader.ReadUInt32(); fileSize = reader.ReadUInt32(); format = reader.ReadUInt32(); firstFileOffset = reader.ReadUInt32(); endianness = reader.ReadByte(); //todo "fileSize - metadataSize" for v<9 but I have no files to test on unknown = reader.ReadBytes(3); reader.Align(); if (format >= 0x16) { metadataSize = reader.ReadUInt32(); fileSize = reader.ReadInt64(); firstFileOffset = reader.ReadInt64(); } reader.bigEndian = endianness == 1; if (format >= 0x16) { unknown1 = reader.ReadUInt32(); //seen as 0x00 everywhere unknown2 = reader.ReadUInt32(); //seen as 0x1b in bundles and 0x00 everywhere else } }
public override bool Init(AssetsFileReader entryReader, long entryPos, long entrySize, ClassDatabaseFile typeMeta = null) { if (assetsFile != null) { return(false); } this.typeMeta = typeMeta; entryReader.Position = entryPos; //memorystream for alignment issue MemoryStream ms = new MemoryStream(); AssetsFileReader r = new AssetsFileReader(ms); AssetsFileWriter w = new AssetsFileWriter(ms); { w.Write(entryReader.ReadBytes((int)entrySize)); ms.Position = 0; assetsFile = new AssetsFile(r); } return(true); }
public ulong Read(bool hasTypeTree, ulong absFilePos, AssetsFileReader reader, uint version, uint typeVersion, bool bigEndian) { classId = reader.ReadInt32(); if (version >= 0x10) { unknown16_1 = reader.ReadByte(); } if (version >= 0x11) { scriptIndex = reader.ReadUInt16(); } //if ((version < 0x11 && classId < 0) || (version >= 0x11 && scriptIndex != 0xFFFF)) //original is if (classId == 114) //if ((version < 0x11 && classId < 0) || (version >= 0x11 && scriptIndex != 0xFFFF)) if ((version < 0x11 && classId < 0) || (version >= 0x11 && classId == 114)) { unknown1 = reader.ReadUInt32(); unknown2 = reader.ReadUInt32(); unknown3 = reader.ReadUInt32(); unknown4 = reader.ReadUInt32(); } unknown5 = reader.ReadUInt32(); unknown6 = reader.ReadUInt32(); unknown7 = reader.ReadUInt32(); unknown8 = reader.ReadUInt32(); if (hasTypeTree) { typeFieldsExCount = reader.ReadUInt32(); stringTableLen = reader.ReadUInt32(); pTypeFieldsEx = new TypeField_0D[typeFieldsExCount]; for (int i = 0; i < typeFieldsExCount; i++) { TypeField_0D typefield0d = new TypeField_0D(); typefield0d.Read(reader.Position, reader, version, bigEndian); pTypeFieldsEx[i] = typefield0d; } pStringTable = Encoding.UTF8.GetString(reader.ReadBytes((int)stringTableLen)); } return(reader.Position); }
public bool Pack(AssetsFileReader reader, AssetsFileWriter writer, AssetBundleCompressionType compType) { reader.Position = 0; writer.Position = 0; if (Read(reader, false)) { AssetBundleHeader06 newHeader = new AssetBundleHeader06() { signature = bundleHeader6.signature, fileVersion = bundleHeader6.fileVersion, minPlayerVersion = bundleHeader6.minPlayerVersion, fileEngineVersion = bundleHeader6.fileEngineVersion, totalFileSize = 0, compressedSize = 0, decompressedSize = 0, flags = 0x43 }; AssetBundleBlockAndDirectoryList06 newBlockAndDirList = new AssetBundleBlockAndDirectoryList06() { checksumLow = 0, checksumHigh = 0, blockCount = 0, blockInf = null, directoryCount = bundleInf6.directoryCount, dirInf = bundleInf6.dirInf }; List <AssetBundleBlockInfo06> newBlocks = new List <AssetBundleBlockInfo06>(); reader.Position = bundleHeader6.GetFileDataOffset(); int fileDataLength = (int)(bundleHeader6.totalFileSize - reader.Position); byte[] fileData = reader.ReadBytes(fileDataLength); //todo, we just write everything to memory and then write to file //we could calculate the blocks we need ahead of time and correctly //size the block listing before this so we can write directly to file byte[] compressedFileData; switch (compType) { case AssetBundleCompressionType.LZMA: { compressedFileData = SevenZipHelper.Compress(fileData); newBlocks.Add(new AssetBundleBlockInfo06() { compressedSize = (uint)compressedFileData.Length, decompressedSize = (uint)fileData.Length, flags = 0x41 }); break; } case AssetBundleCompressionType.LZ4: { using (var memStreamCom = new MemoryStream()) using (var binaryWriter = new BinaryWriter(memStreamCom)) { using (var memStreamUnc = new MemoryStream(fileData)) using (var binaryReader = new BinaryReader(memStreamUnc)) { //compress into 0x20000 blocks byte[] uncompressedBlock = binaryReader.ReadBytes(131072); while (uncompressedBlock.Length != 0) { byte[] compressedBlock = LZ4Codec.Encode32HC(uncompressedBlock, 0, uncompressedBlock.Length); if (compressedBlock.Length > uncompressedBlock.Length) { newBlocks.Add(new AssetBundleBlockInfo06() { compressedSize = (uint)uncompressedBlock.Length, decompressedSize = (uint)uncompressedBlock.Length, flags = 0x0 }); binaryWriter.Write(uncompressedBlock); } else { newBlocks.Add(new AssetBundleBlockInfo06() { compressedSize = (uint)compressedBlock.Length, decompressedSize = (uint)uncompressedBlock.Length, flags = 0x3 }); binaryWriter.Write(compressedBlock); } uncompressedBlock = binaryReader.ReadBytes(131072); } } compressedFileData = memStreamCom.ToArray(); } break; } case AssetBundleCompressionType.NONE: { compressedFileData = fileData; newBlocks.Add(new AssetBundleBlockInfo06() { compressedSize = (uint)fileData.Length, decompressedSize = (uint)fileData.Length, flags = 0x00 }); break; } default: { return(false); } } newBlockAndDirList.blockInf = newBlocks.ToArray(); byte[] bundleInfoBytes; using (var memStream = new MemoryStream()) { var afw = new AssetsFileWriter(memStream); newBlockAndDirList.Write(afw); bundleInfoBytes = memStream.ToArray(); } if (bundleInfoBytes == null || bundleInfoBytes.Length == 0) { return(false); } //listing is usually lz4 even if the data blocks are lzma byte[] bundleInfoBytesCom = LZ4Codec.Encode32HC(bundleInfoBytes, 0, bundleInfoBytes.Length); byte[] bundleHeaderBytes = null; using (var memStream = new MemoryStream()) { var afw = new AssetsFileWriter(memStream); newHeader.Write(afw); bundleHeaderBytes = memStream.ToArray(); } if (bundleHeaderBytes == null || bundleHeaderBytes.Length == 0) { return(false); } uint totalFileSize = (uint)(bundleHeaderBytes.Length + bundleInfoBytesCom.Length + compressedFileData.Length); newHeader.totalFileSize = totalFileSize; newHeader.decompressedSize = (uint)bundleInfoBytes.Length; newHeader.compressedSize = (uint)bundleInfoBytesCom.Length; newHeader.Write(writer); if (newHeader.fileVersion >= 7) { writer.Align16(); } writer.Write(bundleInfoBytesCom); writer.Write(compressedFileData); return(true); } return(false); }
public bool Unpack(AssetsFileReader reader, AssetsFileWriter writer) { reader.Position = 0; if (Read(reader, true)) { reader.Position = bundleHeader6.GetBundleInfoOffset(); MemoryStream blocksInfoStream; AssetsFileReader memReader; int compressedSize = (int)bundleHeader6.compressedSize; switch (bundleHeader6.GetCompressionType()) { case 1: using (MemoryStream mstream = new MemoryStream(reader.ReadBytes(compressedSize))) { blocksInfoStream = SevenZipHelper.StreamDecompress(mstream); } break; case 2: case 3: byte[] uncompressedBytes = new byte[bundleHeader6.decompressedSize]; using (MemoryStream mstream = new MemoryStream(reader.ReadBytes(compressedSize))) { var decoder = new Lz4DecoderStream(mstream); decoder.Read(uncompressedBytes, 0, (int)bundleHeader6.decompressedSize); decoder.Dispose(); } blocksInfoStream = new MemoryStream(uncompressedBytes); break; default: blocksInfoStream = null; break; } if (bundleHeader6.GetCompressionType() != 0) { using (memReader = new AssetsFileReader(blocksInfoStream)) { memReader.Position = 0; bundleInf6.Read(0, memReader); } } AssetBundleHeader06 newBundleHeader6 = new AssetBundleHeader06() { signature = bundleHeader6.signature, fileVersion = bundleHeader6.fileVersion, minPlayerVersion = bundleHeader6.minPlayerVersion, fileEngineVersion = bundleHeader6.fileEngineVersion, totalFileSize = 0, compressedSize = bundleHeader6.decompressedSize, decompressedSize = bundleHeader6.decompressedSize, flags = bundleHeader6.flags & 0x40 //set compression and block position to 0 }; long fileSize = newBundleHeader6.GetFileDataOffset(); for (int i = 0; i < bundleInf6.blockCount; i++) { fileSize += bundleInf6.blockInf[i].decompressedSize; } newBundleHeader6.totalFileSize = fileSize; AssetBundleBlockAndDirectoryList06 newBundleInf6 = new AssetBundleBlockAndDirectoryList06() { checksumLow = 0, //-todo, figure out how to make real checksums, uabe sets these to 0 too checksumHigh = 0, blockCount = bundleInf6.blockCount, directoryCount = bundleInf6.directoryCount }; newBundleInf6.blockInf = new AssetBundleBlockInfo06[newBundleInf6.blockCount]; for (int i = 0; i < newBundleInf6.blockCount; i++) { newBundleInf6.blockInf[i] = new AssetBundleBlockInfo06() { compressedSize = bundleInf6.blockInf[i].decompressedSize, decompressedSize = bundleInf6.blockInf[i].decompressedSize, flags = (ushort)(bundleInf6.blockInf[i].flags & 0xC0) //set compression to none }; } newBundleInf6.dirInf = new AssetBundleDirectoryInfo06[newBundleInf6.directoryCount]; for (int i = 0; i < newBundleInf6.directoryCount; i++) { newBundleInf6.dirInf[i] = new AssetBundleDirectoryInfo06() { offset = bundleInf6.dirInf[i].offset, decompressedSize = bundleInf6.dirInf[i].decompressedSize, flags = bundleInf6.dirInf[i].flags, name = bundleInf6.dirInf[i].name }; } newBundleHeader6.Write(writer); if (newBundleHeader6.fileVersion >= 7) { writer.Align16(); } newBundleInf6.Write(writer); reader.Position = bundleHeader6.GetFileDataOffset(); for (int i = 0; i < newBundleInf6.blockCount; i++) { AssetBundleBlockInfo06 info = bundleInf6.blockInf[i]; switch (info.GetCompressionType()) { case 0: reader.BaseStream.CopyToCompat(writer.BaseStream, info.compressedSize); break; case 1: SevenZipHelper.StreamDecompress(reader.BaseStream, writer.BaseStream, info.compressedSize, info.decompressedSize); break; case 2: case 3: using (MemoryStream tempMs = new MemoryStream()) { reader.BaseStream.CopyToCompat(tempMs, info.compressedSize); tempMs.Position = 0; using (Lz4DecoderStream decoder = new Lz4DecoderStream(tempMs)) { decoder.CopyToCompat(writer.BaseStream, info.decompressedSize); } } break; } } return(true); } return(false); }
//set fileID to -1 if all replacers are for this .assets file but don't have the fileID set to the same one //typeMeta is used to add the type information (hash and type fields) for format >= 0x10 if necessary public ulong Write(AssetsFileWriter writer, ulong filePos, AssetsReplacer[] pReplacers, uint fileID, ClassDatabaseFile typeMeta = null) { header.Write(writer.Position, writer); for (int i = 0; i < pReplacers.Length; i++) { AssetsReplacer replacer = pReplacers[i]; if (!typeTree.pTypes_Unity5.Any(t => t.classId == replacer.GetClassID())) { Type_0D type = new Type_0D() { classId = replacer.GetClassID(), unknown16_1 = 0, scriptIndex = 0xFFFF, unknown5 = 0, unknown6 = 0, unknown7 = 0, unknown8 = 0, typeFieldsExCount = 0, stringTableLen = 0, pStringTable = "" }; typeTree.pTypes_Unity5.Concat(new Type_0D[] { type }); } } typeTree.Write(writer.Position, writer, header.format); int initialSize = (int)(AssetFileInfo.GetSize(header.format) * AssetCount); int newSize = (int)(AssetFileInfo.GetSize(header.format) * (AssetCount + pReplacers.Length)); int appendedSize = newSize - initialSize; reader.Position = AssetTablePos; List <AssetFileInfo> originalAssetInfos = new List <AssetFileInfo>(); List <AssetFileInfo> assetInfos = new List <AssetFileInfo>(); List <AssetsReplacer> currentReplacers = pReplacers.ToList(); uint currentOffset = 0; //-write all original assets, modify sizes if needed and skip those to be removed for (int i = 0; i < AssetCount; i++) { AssetFileInfo info = new AssetFileInfo(); info.Read(header.format, reader.Position, reader, reader.bigEndian); originalAssetInfos.Add(info); AssetsReplacer replacer = currentReplacers.FirstOrDefault(n => n.GetPathID() == info.index); if (replacer != null) { if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_AddOrModify) { int classIndex = Array.FindIndex(typeTree.pTypes_Unity5, t => t.classId == replacer.GetClassID()); info = new AssetFileInfo() { index = replacer.GetPathID(), offs_curFile = currentOffset, curFileSize = (uint)classIndex, curFileTypeOrIndex = (uint)replacer.GetClassID(), inheritedUnityClass = (ushort)replacer.GetClassID(), //-what is this scriptIndex = replacer.GetMonoScriptID(), unknown1 = 0 }; } else if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_Remove) { continue; } } currentOffset += info.curFileSize; uint pad = 8 - (currentOffset % 8); if (pad != 8) { currentOffset += pad; } assetInfos.Add(info); } //-write new assets while (currentReplacers.Count > 0) { AssetsReplacer replacer = currentReplacers.First(); if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_AddOrModify) { int classIndex = Array.FindIndex(typeTree.pTypes_Unity5, t => t.classId == replacer.GetClassID()); AssetFileInfo info = new AssetFileInfo() { index = replacer.GetPathID(), offs_curFile = currentOffset, curFileSize = (uint)replacer.GetSize(), curFileTypeOrIndex = (uint)classIndex, inheritedUnityClass = (ushort)replacer.GetClassID(), scriptIndex = replacer.GetMonoScriptID(), unknown1 = 0 }; currentOffset += info.curFileSize; uint pad = 8 - (currentOffset % 8); if (pad != 8) { currentOffset += pad; } assetInfos.Add(info); } currentReplacers.Remove(replacer); } writer.Write(assetInfos.Count); writer.Align(); for (int i = 0; i < assetInfos.Count; i++) { assetInfos[i].Write(header.format, writer.Position, writer); } preloadTable.Write(writer.Position, writer, header.format); dependencies.Write(writer.Position, writer, header.format); uint metadataSize = (uint)writer.Position - 0x13; //-for padding only. if all initial data before assetData is more than 0x1000, this is skipped while (writer.Position < 0x1000 /*header.offs_firstFile*/) { writer.Write((byte)0x00); } header.offs_firstFile = (uint)writer.Position; for (int i = 0; i < assetInfos.Count; i++) { AssetFileInfo info = assetInfos[i]; AssetsReplacer replacer = pReplacers.FirstOrDefault(n => n.GetPathID() == info.index); if (replacer != null) { if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_AddOrModify) { replacer.Write(writer.Position, writer); writer.Align8(); } else if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_Remove) { continue; } } else { AssetFileInfo originalInfo = originalAssetInfos.FirstOrDefault(n => n.index == info.index); if (originalInfo != null) { reader.Position = header.offs_firstFile + originalInfo.offs_curFile; byte[] assetData = reader.ReadBytes((int)originalInfo.curFileSize); writer.Write(assetData); writer.Align8(); } } } ulong fileSizeMarker = writer.Position; reader.Position = header.offs_firstFile; writer.Position = 0; header.metadataSize = metadataSize; header.fileSize = (uint)fileSizeMarker; header.Write(writer.Position, writer); return(writer.Position); }
public bool Write(AssetsFileWriter writer, List <BundleReplacer> replacers, ClassDatabaseFile typeMeta = null) { bundleHeader6.Write(writer); if (bundleHeader6.fileVersion >= 7) { writer.Align16(); } AssetBundleBlockAndDirectoryList06 newBundleInf6 = new AssetBundleBlockAndDirectoryList06() { checksumLow = 0, checksumHigh = 0 }; //I could map the assets to their blocks but I don't //have any more-than-1-block files to test on //this should work just fine as far as I know newBundleInf6.blockInf = new AssetBundleBlockInfo06[] { new AssetBundleBlockInfo06 { compressedSize = 0, decompressedSize = 0, flags = 0x40 } }; //assets that did not have their data modified but need //the original info to read from the original file var newToOriginalDirInfoLookup = new Dictionary <AssetBundleDirectoryInfo06, AssetBundleDirectoryInfo06>(); List <AssetBundleDirectoryInfo06> originalDirInfos = new List <AssetBundleDirectoryInfo06>(); List <AssetBundleDirectoryInfo06> dirInfos = new List <AssetBundleDirectoryInfo06>(); List <BundleReplacer> currentReplacers = replacers.ToList(); //this is kind of useless at the moment but leaving it here //because if the AssetsFile size can be precalculated in the //future, we can use this to skip rewriting sizes long currentOffset = 0; //write all original files, modify sizes if needed and skip those to be removed for (int i = 0; i < bundleInf6.directoryCount; i++) { AssetBundleDirectoryInfo06 info = bundleInf6.dirInf[i]; originalDirInfos.Add(info); AssetBundleDirectoryInfo06 newInfo = new AssetBundleDirectoryInfo06() { offset = currentOffset, decompressedSize = info.decompressedSize, flags = info.flags, name = info.name }; BundleReplacer replacer = currentReplacers.FirstOrDefault(n => n.GetOriginalEntryName() == newInfo.name); if (replacer != null) { currentReplacers.Remove(replacer); if (replacer.GetReplacementType() == BundleReplacementType.AddOrModify) { newInfo = new AssetBundleDirectoryInfo06() { offset = currentOffset, decompressedSize = replacer.GetSize(), flags = info.flags, name = replacer.GetEntryName() }; } else if (replacer.GetReplacementType() == BundleReplacementType.Rename) { newInfo = new AssetBundleDirectoryInfo06() { offset = currentOffset, decompressedSize = info.decompressedSize, flags = info.flags, name = replacer.GetEntryName() }; newToOriginalDirInfoLookup[newInfo] = info; } else if (replacer.GetReplacementType() == BundleReplacementType.Remove) { continue; } } else { newToOriginalDirInfoLookup[newInfo] = info; } if (newInfo.decompressedSize != -1) { currentOffset += newInfo.decompressedSize; } dirInfos.Add(newInfo); } //write new files while (currentReplacers.Count > 0) { BundleReplacer replacer = currentReplacers[0]; if (replacer.GetReplacementType() == BundleReplacementType.AddOrModify) { AssetBundleDirectoryInfo06 info = new AssetBundleDirectoryInfo06() { offset = currentOffset, decompressedSize = replacer.GetSize(), flags = 0x04, //idk it just works (tm) name = replacer.GetEntryName() }; currentOffset += info.decompressedSize; dirInfos.Add(info); } currentReplacers.Remove(replacer); } //write the listings long bundleInfPos = writer.Position; newBundleInf6.dirInf = dirInfos.ToArray(); //this is only here to allocate enough space so it's fine if it's inaccurate newBundleInf6.Write(writer); long assetDataPos = writer.Position; //actually write the file data to the bundle now for (int i = 0; i < dirInfos.Count; i++) { AssetBundleDirectoryInfo06 info = dirInfos[i]; BundleReplacer replacer = replacers.FirstOrDefault(n => n.GetOriginalEntryName() == info.name); if (replacer != null) { if (replacer.GetReplacementType() == BundleReplacementType.AddOrModify) { long startPos = writer.Position; long endPos = replacer.Write(writer); long size = endPos - startPos; dirInfos[i].decompressedSize = size; dirInfos[i].offset = startPos - assetDataPos; } else if (replacer.GetReplacementType() == BundleReplacementType.Remove) { continue; } } else { if (newToOriginalDirInfoLookup.TryGetValue(info, out AssetBundleDirectoryInfo06 originalInfo)) { long startPos = writer.Position; reader.Position = bundleHeader6.GetFileDataOffset() + originalInfo.offset; byte[] assetData = reader.ReadBytes((int)originalInfo.decompressedSize); writer.Write(assetData); dirInfos[i].offset = startPos - assetDataPos; } } } //now that we know what the sizes are of the written files let's go back and fix them long finalSize = writer.Position; uint assetSize = (uint)(finalSize - assetDataPos); writer.Position = bundleInfPos; newBundleInf6.blockInf[0].decompressedSize = assetSize; newBundleInf6.blockInf[0].compressedSize = assetSize; newBundleInf6.dirInf = dirInfos.ToArray(); newBundleInf6.Write(writer); uint infoSize = (uint)(assetDataPos - bundleInfPos); writer.Position = 0; AssetBundleHeader06 newBundleHeader6 = new AssetBundleHeader06() { signature = bundleHeader6.signature, fileVersion = bundleHeader6.fileVersion, minPlayerVersion = bundleHeader6.minPlayerVersion, fileEngineVersion = bundleHeader6.fileEngineVersion, totalFileSize = finalSize, compressedSize = infoSize, decompressedSize = infoSize, flags = bundleHeader6.flags & unchecked ((uint)~0x80) //unset info at end flag }; newBundleHeader6.Write(writer); return(true); }
//set fileID to -1 if all replacers are for this .assets file but don't have the fileID set to the same one //typeMeta is used to add the type information (hash and type fields) for format >= 0x10 if necessary public void Write(AssetsFileWriter writer, ulong filePos, List <AssetsReplacer> replacers, uint fileID, ClassDatabaseFile typeMeta = null) { header.Write(writer); for (int i = 0; i < replacers.Count; i++) { AssetsReplacer replacer = replacers[i]; if (!typeTree.unity5Types.Any(t => t.classId == replacer.GetClassID())) { Type_0D type = new Type_0D() { classId = replacer.GetClassID(), unknown16_1 = 0, scriptIndex = 0xFFFF, typeHash1 = 0, typeHash2 = 0, typeHash3 = 0, typeHash4 = 0, typeFieldsExCount = 0, stringTableLen = 0, stringTable = "" }; typeTree.unity5Types.Concat(new Type_0D[] { type }); } } typeTree.Write(writer, header.format); int initialSize = (int)(AssetFileInfo.GetSize(header.format) * AssetCount); int newSize = (int)(AssetFileInfo.GetSize(header.format) * (AssetCount + replacers.Count)); int appendedSize = newSize - initialSize; reader.Position = AssetTablePos; List <AssetFileInfo> originalAssetInfos = new List <AssetFileInfo>(); List <AssetFileInfo> assetInfos = new List <AssetFileInfo>(); List <AssetsReplacer> currentReplacers = replacers.ToList(); uint currentOffset = 0; //-write all original assets, modify sizes if needed and skip those to be removed for (int i = 0; i < AssetCount; i++) { AssetFileInfo info = new AssetFileInfo(); info.Read(header.format, reader); originalAssetInfos.Add(info); AssetFileInfo newInfo = new AssetFileInfo() { index = info.index, curFileOffset = currentOffset, curFileSize = info.curFileSize, curFileTypeOrIndex = info.curFileTypeOrIndex, inheritedUnityClass = info.inheritedUnityClass, scriptIndex = info.scriptIndex, unknown1 = info.unknown1 }; AssetsReplacer replacer = currentReplacers.FirstOrDefault(n => n.GetPathID() == newInfo.index); if (replacer != null) { currentReplacers.Remove(replacer); if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_AddOrModify) { int classIndex; if (replacer.GetMonoScriptID() == 0xFFFF) { classIndex = typeTree.unity5Types.FindIndex(t => t.classId == replacer.GetClassID()); } else { classIndex = typeTree.unity5Types.FindIndex(t => t.classId == replacer.GetClassID() && t.scriptIndex == replacer.GetMonoScriptID()); } newInfo = new AssetFileInfo() { index = replacer.GetPathID(), curFileOffset = currentOffset, curFileSize = (uint)replacer.GetSize(), curFileTypeOrIndex = classIndex, inheritedUnityClass = (ushort)replacer.GetClassID(), //for older unity versions scriptIndex = replacer.GetMonoScriptID(), unknown1 = 0 }; } else if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_Remove) { continue; } } currentOffset += newInfo.curFileSize; uint pad = 8 - (currentOffset % 8); if (pad != 8) { currentOffset += pad; } assetInfos.Add(newInfo); } //-write new assets while (currentReplacers.Count > 0) { AssetsReplacer replacer = currentReplacers.First(); if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_AddOrModify) { int classIndex; if (replacer.GetMonoScriptID() == 0xFFFF) { classIndex = typeTree.unity5Types.FindIndex(t => t.classId == replacer.GetClassID()); } else { classIndex = typeTree.unity5Types.FindIndex(t => t.classId == replacer.GetClassID() && t.scriptIndex == replacer.GetMonoScriptID()); } AssetFileInfo info = new AssetFileInfo() { index = replacer.GetPathID(), curFileOffset = currentOffset, curFileSize = (uint)replacer.GetSize(), curFileTypeOrIndex = classIndex, inheritedUnityClass = (ushort)replacer.GetClassID(), scriptIndex = replacer.GetMonoScriptID(), unknown1 = 0 }; currentOffset += info.curFileSize; uint pad = 8 - (currentOffset % 8); if (pad != 8) { currentOffset += pad; } assetInfos.Add(info); } currentReplacers.Remove(replacer); } writer.Write(assetInfos.Count); writer.Align(); for (int i = 0; i < assetInfos.Count; i++) { assetInfos[i].Write(header.format, writer); } preloadTable.Write(writer); dependencies.Write(writer); //temporary fix for secondarytypecount and friends if (header.format >= 14) { writer.Write(0); //secondaryTypeCount writer.Write((byte)0); //unknownString length } uint metadataSize = (uint)writer.Position - 0x14; //-for padding only. if all initial data before assetData is more than 0x1000, this is skipped while (writer.Position < 0x1000 /*header.offs_firstFile*/) { writer.Write((byte)0x00); } writer.Align16(); uint offs_firstFile = (uint)writer.Position; for (int i = 0; i < assetInfos.Count; i++) { AssetFileInfo info = assetInfos[i]; AssetsReplacer replacer = replacers.FirstOrDefault(n => n.GetPathID() == info.index); if (replacer != null) { if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_AddOrModify) { replacer.Write(writer); if (i != assetInfos.Count - 1) { writer.Align8(); } } else if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_Remove) { continue; } } else { AssetFileInfo originalInfo = originalAssetInfos.FirstOrDefault(n => n.index == info.index); if (originalInfo != null) { reader.Position = header.firstFileOffset + originalInfo.curFileOffset; byte[] assetData = reader.ReadBytes((int)originalInfo.curFileSize); writer.Write(assetData); if (i != assetInfos.Count - 1) { writer.Align8(); } } } } header.firstFileOffset = offs_firstFile; long fileSizeMarker = writer.Position; reader.Position = header.firstFileOffset; writer.Position = 0; header.metadataSize = metadataSize; header.fileSize = (uint)fileSizeMarker; header.Write(writer); }
///public bool Write(AssetsFileReader reader, LPARAM readerPar, /// AssetsFileWriter writer, LPARAM writerPar, /// /// class BundleReplacer **pReplacers, size_t replacerCount, /// AssetsFileVerifyLogger errorLogger = NULL, ClassDatabaseFile* typeMeta = NULL); //-todo, use a faster custom bundle decompressor. currently a copy paste of unity studio's public bool Unpack(AssetsFileReader reader, AssetsFileWriter writer) { if (Read(reader, true)) { reader.Position = bundleHeader6.GetBundleInfoOffset(); MemoryStream blocksInfoStream; AssetsFileReader memReader; switch (bundleHeader6.flags & 0x3F) { case 1: blocksInfoStream = SevenZipHelper.StreamDecompress(new MemoryStream(reader.ReadBytes((int)bundleHeader6.compressedSize))); break; case 2: case 3: byte[] uncompressedBytes = new byte[bundleHeader6.decompressedSize]; using (var mstream = new MemoryStream(reader.ReadBytes((int)bundleHeader6.compressedSize))) { var decoder = new Lz4DecoderStream(mstream); decoder.Read(uncompressedBytes, 0, (int)bundleHeader6.decompressedSize); decoder.Dispose(); } blocksInfoStream = new MemoryStream(uncompressedBytes); break; default: blocksInfoStream = null; break; } if ((bundleHeader6.flags & 0x3F) != 0) { using (memReader = new AssetsFileReader(blocksInfoStream)) { bundleInf6.Read(0, memReader); } } reader.Position = bundleHeader6.GetFileDataOffset(); byte[][] blocksData = new byte[bundleInf6.blockCount][]; for (int i = 0; i < bundleInf6.blockCount; i++) { AssetsBundleBlockInfo06 info = bundleInf6.blockInf[i]; byte[] data = reader.ReadBytes((int)info.compressedSize); switch (info.flags & 0x3F) { case 0: blocksData[i] = data; break; case 1: blocksData[i] = new byte[info.decompressedSize]; using (MemoryStream mstream = new MemoryStream(data)) { MemoryStream decoder = SevenZipHelper.StreamDecompress(mstream, info.decompressedSize); decoder.Read(blocksData[i], 0, (int)info.decompressedSize); decoder.Dispose(); } break; case 2: case 3: blocksData[i] = new byte[info.decompressedSize]; using (MemoryStream mstream = new MemoryStream(data)) { var decoder = new Lz4DecoderStream(mstream); decoder.Read(blocksData[i], 0, (int)info.decompressedSize); decoder.Dispose(); } break; } } AssetsBundleHeader06 newBundleHeader6 = new AssetsBundleHeader06() { signature = bundleHeader6.signature, fileVersion = bundleHeader6.fileVersion, minPlayerVersion = bundleHeader6.minPlayerVersion, fileEngineVersion = bundleHeader6.fileEngineVersion, totalFileSize = 0, compressedSize = bundleHeader6.decompressedSize, decompressedSize = bundleHeader6.decompressedSize, flags = bundleHeader6.flags & 0x40 //set compression and block position to 0 }; ulong fileSize = newBundleHeader6.GetFileDataOffset(); for (int i = 0; i < bundleInf6.blockCount; i++) { fileSize += bundleInf6.blockInf[i].decompressedSize; } newBundleHeader6.totalFileSize = fileSize; AssetsBundleBlockAndDirectoryList06 newBundleInf6 = new AssetsBundleBlockAndDirectoryList06() { checksumLow = 0, //-todo, figure out how to make real checksums, uabe sets these to 0 too checksumHigh = 0, blockCount = bundleInf6.blockCount, directoryCount = bundleInf6.directoryCount }; newBundleInf6.blockInf = new AssetsBundleBlockInfo06[newBundleInf6.blockCount]; for (int i = 0; i < newBundleInf6.blockCount; i++) { newBundleInf6.blockInf[i] = new AssetsBundleBlockInfo06(); newBundleInf6.blockInf[i].compressedSize = bundleInf6.blockInf[i].decompressedSize; newBundleInf6.blockInf[i].decompressedSize = bundleInf6.blockInf[i].decompressedSize; newBundleInf6.blockInf[i].flags = (ushort)(bundleInf6.blockInf[i].flags & 0xC0); //set compression to none } newBundleInf6.dirInf = new AssetsBundleDirectoryInfo06[newBundleInf6.directoryCount]; for (int i = 0; i < newBundleInf6.directoryCount; i++) { newBundleInf6.dirInf[i].offset = bundleInf6.dirInf[i].offset; newBundleInf6.dirInf[i].decompressedSize = bundleInf6.dirInf[i].decompressedSize; newBundleInf6.dirInf[i].flags = bundleInf6.dirInf[i].flags; newBundleInf6.dirInf[i].name = bundleInf6.dirInf[i].name; } newBundleHeader6.Write(writer, 0); newBundleInf6.Write(writer, writer.Position); for (int i = 0; i < newBundleInf6.blockCount; i++) { writer.Write(blocksData[i]); } return(true); } return(false); }
public AssetTypeValueField ReadType(AssetsFileReader reader, AssetTypeValueField valueField) { if (valueField.templateField.isArray) { if (valueField.templateField.childrenCount == 2) { EnumValueTypes sizeType = valueField.templateField.children[0].valueType; if (sizeType == EnumValueTypes.Int32 || sizeType == EnumValueTypes.UInt32) { if (valueField.templateField.valueType == EnumValueTypes.ByteArray) { valueField.childrenCount = 0; valueField.children = new AssetTypeValueField[0]; int size = reader.ReadInt32(); byte[] data = reader.ReadBytes(size); if (valueField.templateField.align) { reader.Align(); } AssetTypeByteArray atba = new AssetTypeByteArray(); atba.size = (uint)size; atba.data = data; valueField.value = new AssetTypeValue(EnumValueTypes.ByteArray, atba); } else { valueField.childrenCount = reader.ReadInt32(); valueField.children = new AssetTypeValueField[valueField.childrenCount]; for (int i = 0; i < valueField.childrenCount; i++) { valueField.children[i] = new AssetTypeValueField(); valueField.children[i].templateField = valueField.templateField.children[1]; valueField.children[i] = ReadType(reader, valueField.children[i]); } if (valueField.templateField.align) { reader.Align(); } AssetTypeArray ata = new AssetTypeArray(); ata.size = valueField.childrenCount; valueField.value = new AssetTypeValue(EnumValueTypes.Array, ata); } } else { throw new Exception("Invalid array value type! Found an unexpected " + sizeType.ToString() + " type instead!"); } } else { throw new Exception("Invalid array!"); } } else { EnumValueTypes type = valueField.templateField.valueType; if (type != 0) { valueField.value = new AssetTypeValue(type, null); } if (type == EnumValueTypes.String) { int length = reader.ReadInt32(); valueField.value.Set(reader.ReadBytes(length)); reader.Align(); } else { valueField.childrenCount = valueField.templateField.childrenCount; if (valueField.childrenCount == 0) { valueField.children = new AssetTypeValueField[0]; switch (valueField.templateField.valueType) { case EnumValueTypes.Int8: valueField.value.Set(reader.ReadSByte()); if (valueField.templateField.align) { reader.Align(); } break; case EnumValueTypes.UInt8: case EnumValueTypes.Bool: valueField.value.Set(reader.ReadByte()); if (valueField.templateField.align) { reader.Align(); } break; case EnumValueTypes.Int16: valueField.value.Set(reader.ReadInt16()); if (valueField.templateField.align) { reader.Align(); } break; case EnumValueTypes.UInt16: valueField.value.Set(reader.ReadUInt16()); if (valueField.templateField.align) { reader.Align(); } break; case EnumValueTypes.Int32: valueField.value.Set(reader.ReadInt32()); break; case EnumValueTypes.UInt32: valueField.value.Set(reader.ReadUInt32()); break; case EnumValueTypes.Int64: valueField.value.Set(reader.ReadInt64()); break; case EnumValueTypes.UInt64: valueField.value.Set(reader.ReadUInt64()); break; case EnumValueTypes.Float: valueField.value.Set(reader.ReadSingle()); break; case EnumValueTypes.Double: valueField.value.Set(reader.ReadDouble()); break; } } else { valueField.children = new AssetTypeValueField[valueField.childrenCount]; for (int i = 0; i < valueField.childrenCount; i++) { valueField.children[i] = new AssetTypeValueField(); valueField.children[i].templateField = valueField.templateField.children[i]; valueField.children[i] = ReadType(reader, valueField.children[i]); } if (valueField.templateField.align) { reader.Align(); } } } } return(valueField); }
public bool Read(AssetsFileReader reader) { header = new ClassDatabasePackageHeader(); header.Read(reader); files = new List <ClassDatabaseFile>(); long firstFile = reader.Position; AssetsFileReader newReader = reader; if ((header.compressionType & 0x80) == 0) //multiple compressed blocks { //untested! //the compression is handled by the cldbs themselves for (int i = 0; i < header.fileCount; i++) { newReader.Position = firstFile + header.files[i].offset; byte[] data = newReader.ReadBytes((int)header.files[i].length); using (MemoryStream ms = new MemoryStream(data)) using (AssetsFileReader r = new AssetsFileReader(ms)) { ClassDatabaseFile file = new ClassDatabaseFile(); file.Read(r); files.Add(file); } } } else //one compressed block { if ((header.compressionType & 0x20) == 0) //file block compressed { firstFile = 0; int compressedSize = (int)(header.stringTableOffset - newReader.Position); int uncompressedSize = (int)header.fileBlockSize; MemoryStream ms; if ((header.compressionType & 0x1f) == 1) //lz4 { byte[] uncompressedBytes = new byte[uncompressedSize]; using (MemoryStream tempMs = new MemoryStream(newReader.ReadBytes(compressedSize))) { Lz4DecoderStream decoder = new Lz4DecoderStream(tempMs); decoder.Read(uncompressedBytes, 0, uncompressedSize); decoder.Dispose(); } ms = new MemoryStream(uncompressedBytes); } else if ((header.compressionType & 0x1f) == 2) //lzma { byte[] dbg = newReader.ReadBytes(compressedSize); using (MemoryStream tempMs = new MemoryStream(dbg)) { ms = SevenZipHelper.StreamDecompress(tempMs, uncompressedSize); } } else { valid = false; return(valid); } newReader = new AssetsFileReader(ms); newReader.bigEndian = false; } for (int i = 0; i < header.fileCount; i++) { newReader.Position = firstFile + header.files[i].offset; byte[] data = newReader.ReadBytes((int)header.files[i].length); using (MemoryStream ms = new MemoryStream(data)) using (AssetsFileReader r = new AssetsFileReader(ms)) { ClassDatabaseFile file = new ClassDatabaseFile(); file.Read(r); files.Add(file); } } } newReader = reader; newReader.Position = header.stringTableOffset; if ((header.compressionType & 0x40) == 0) //string table is compressed { int compressedSize = (int)header.stringTableLenCompressed; int uncompressedSize = (int)header.stringTableLenUncompressed; MemoryStream ms; if ((header.compressionType & 0x1f) == 1) //lz4 { byte[] uncompressedBytes = new byte[uncompressedSize]; using (MemoryStream tempMs = new MemoryStream(newReader.ReadBytes(compressedSize))) { Lz4DecoderStream decoder = new Lz4DecoderStream(tempMs); decoder.Read(uncompressedBytes, 0, uncompressedSize); decoder.Dispose(); } ms = new MemoryStream(uncompressedBytes); } else if ((header.compressionType & 0x1f) == 2) //lzma { using (MemoryStream tempMs = new MemoryStream(newReader.ReadBytes(compressedSize))) { ms = SevenZipHelper.StreamDecompress(tempMs, uncompressedSize); } } else { valid = false; return(valid); } newReader = new AssetsFileReader(ms); newReader.bigEndian = false; } stringTable = newReader.ReadBytes((int)header.stringTableLenUncompressed); for (int i = 0; i < header.fileCount; i++) { files[i].stringTable = stringTable; } valid = true; return(valid); }