public void Read(AssetsFileReader reader) { header = new ClassDatabasePackageHeader(); header.Read(reader); if ((header.compressionType & 0x20) == 0x00) { throw new NotImplementedException("Please uncompress the package before loading."); } files = new ClassDatabaseFile[header.fileCount]; long firstFile = reader.Position; for (int i = 0; i < header.fileCount; i++) { reader.Position = firstFile + header.files[i].offset; byte[] data = reader.ReadBytes((int)header.files[i].length); using (MemoryStream ms = new MemoryStream(data)) using (AssetsFileReader r = new AssetsFileReader(ms)) { files[i] = new ClassDatabaseFile(); files[i].Read(r); } } stringTable = reader.ReadBytes((int)header.stringTableLenUncompressed); for (int i = 0; i < header.fileCount; i++) { files[i].stringTable = stringTable; } }
public bool ImportFile(AssetsFileReader reader) { ClassDatabaseFile cldb = new ClassDatabaseFile(); bool valid = cldb.Read(reader); if (valid) { files.Add(cldb); header.files.Add(new ClassDatabaseFileRef() { offset = 0, length = 0, name = "" }); return(true); } return(false); }
public bool FromClassDatabase(ClassDatabaseFile pFile, ClassDatabaseType pType, uint fieldIndex) { ClassDatabaseTypeField field = pType.fields[(int)fieldIndex]; name = field.fieldName.GetString(pFile); type = field.typeName.GetString(pFile); valueType = AssetTypeValueField.GetValueTypeByTypeName(type); isArray = field.isArray == 1 ? true : false; align = (field.flags2 & 0x4000) != 0x00 ? true : false; hasValue = (valueType == EnumValueTypes.ValueType_None) ? false : true; List <int> childrenIndexes = new List <int>(); int thisDepth = pType.fields[(int)fieldIndex].depth; for (int i = (int)fieldIndex + 1; i < pType.fields.Count; i++) { if (pType.fields[i].depth == thisDepth + 1) { childrenCount++; childrenIndexes.Add(i); } if (pType.fields[i].depth <= thisDepth) { break; } } children = new AssetTypeTemplateField[childrenCount]; int child = 0; for (int i = (int)fieldIndex + 1; i < pType.fields.Count; i++) { if (pType.fields[i].depth == thisDepth + 1) { children[child] = new AssetTypeTemplateField(); children[child].FromClassDatabase(pFile, pType, (uint)childrenIndexes[child]); child++; } if (pType.fields[i].depth <= thisDepth) { break; } } return(true); }
private void AddStringTableEntry(ClassDatabaseFile cldb, StringBuilder strTable, Dictionary <string, uint> strMap, ref ClassDatabaseFileString str) { string stringValue = str.GetString(cldb); if (strTable != null) { //search for string first and use that index if possible if (!strMap.ContainsKey(stringValue)) { strMap[stringValue] = (uint)strTable.Length; strTable.Append(stringValue + '\0'); } str.str.stringTableOffset = strMap[stringValue]; } else { //always add string str.str.stringTableOffset = (uint)strTable.Length; strTable.Append(stringValue + '\0'); } }
public override bool Init(AssetsFileReader entryReader, long entryPos, long entrySize, ClassDatabaseFile typeMeta = null) { if (assetsFile != null) { return(false); } this.typeMeta = typeMeta; entryReader.Position = entryPos; //memorystream for alignment issue MemoryStream ms = new MemoryStream(); AssetsFileReader r = new AssetsFileReader(ms); AssetsFileWriter w = new AssetsFileWriter(ms); { w.Write(entryReader.ReadBytes((int)entrySize)); ms.Position = 0; assetsFile = new AssetsFile(r); } return(true); }
//set fileID to -1 if all replacers are for this .assets file but don't have the fileID set to the same one //typeMeta is used to add the type information (hash and type fields) for format >= 0x10 if necessary public void Write(AssetsFileWriter writer, ulong filePos, List <AssetsReplacer> replacers, uint fileID, ClassDatabaseFile typeMeta = null) { header.Write(writer); for (int i = 0; i < replacers.Count; i++) { AssetsReplacer replacer = replacers[i]; if (!typeTree.unity5Types.Any(t => t.classId == replacer.GetClassID())) { Type_0D type = new Type_0D() { classId = replacer.GetClassID(), unknown16_1 = 0, scriptIndex = 0xFFFF, typeHash1 = 0, typeHash2 = 0, typeHash3 = 0, typeHash4 = 0, typeFieldsExCount = 0, stringTableLen = 0, stringTable = "" }; typeTree.unity5Types.Concat(new Type_0D[] { type }); } } typeTree.Write(writer, header.format); int initialSize = (int)(AssetFileInfo.GetSize(header.format) * AssetCount); int newSize = (int)(AssetFileInfo.GetSize(header.format) * (AssetCount + replacers.Count)); int appendedSize = newSize - initialSize; reader.Position = AssetTablePos; List <AssetFileInfo> originalAssetInfos = new List <AssetFileInfo>(); List <AssetFileInfo> assetInfos = new List <AssetFileInfo>(); List <AssetsReplacer> currentReplacers = replacers.ToList(); uint currentOffset = 0; //-write all original assets, modify sizes if needed and skip those to be removed for (int i = 0; i < AssetCount; i++) { AssetFileInfo info = new AssetFileInfo(); info.Read(header.format, reader); originalAssetInfos.Add(info); AssetFileInfo newInfo = new AssetFileInfo() { index = info.index, curFileOffset = currentOffset, curFileSize = info.curFileSize, curFileTypeOrIndex = info.curFileTypeOrIndex, inheritedUnityClass = info.inheritedUnityClass, scriptIndex = info.scriptIndex, unknown1 = info.unknown1 }; AssetsReplacer replacer = currentReplacers.FirstOrDefault(n => n.GetPathID() == newInfo.index); if (replacer != null) { currentReplacers.Remove(replacer); if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_AddOrModify) { int classIndex; if (replacer.GetMonoScriptID() == 0xFFFF) { classIndex = typeTree.unity5Types.FindIndex(t => t.classId == replacer.GetClassID()); } else { classIndex = typeTree.unity5Types.FindIndex(t => t.classId == replacer.GetClassID() && t.scriptIndex == replacer.GetMonoScriptID()); } newInfo = new AssetFileInfo() { index = replacer.GetPathID(), curFileOffset = currentOffset, curFileSize = (uint)replacer.GetSize(), curFileTypeOrIndex = classIndex, inheritedUnityClass = (ushort)replacer.GetClassID(), //for older unity versions scriptIndex = replacer.GetMonoScriptID(), unknown1 = 0 }; } else if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_Remove) { continue; } } currentOffset += newInfo.curFileSize; uint pad = 8 - (currentOffset % 8); if (pad != 8) { currentOffset += pad; } assetInfos.Add(newInfo); } //-write new assets while (currentReplacers.Count > 0) { AssetsReplacer replacer = currentReplacers.First(); if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_AddOrModify) { int classIndex; if (replacer.GetMonoScriptID() == 0xFFFF) { classIndex = typeTree.unity5Types.FindIndex(t => t.classId == replacer.GetClassID()); } else { classIndex = typeTree.unity5Types.FindIndex(t => t.classId == replacer.GetClassID() && t.scriptIndex == replacer.GetMonoScriptID()); } AssetFileInfo info = new AssetFileInfo() { index = replacer.GetPathID(), curFileOffset = currentOffset, curFileSize = (uint)replacer.GetSize(), curFileTypeOrIndex = classIndex, inheritedUnityClass = (ushort)replacer.GetClassID(), scriptIndex = replacer.GetMonoScriptID(), unknown1 = 0 }; currentOffset += info.curFileSize; uint pad = 8 - (currentOffset % 8); if (pad != 8) { currentOffset += pad; } assetInfos.Add(info); } currentReplacers.Remove(replacer); } writer.Write(assetInfos.Count); writer.Align(); for (int i = 0; i < assetInfos.Count; i++) { assetInfos[i].Write(header.format, writer); } preloadTable.Write(writer); dependencies.Write(writer); //temporary fix for secondarytypecount and friends if (header.format >= 14) { writer.Write(0); //secondaryTypeCount writer.Write((byte)0); //unknownString length } uint metadataSize = (uint)writer.Position - 0x14; //-for padding only. if all initial data before assetData is more than 0x1000, this is skipped while (writer.Position < 0x1000 /*header.offs_firstFile*/) { writer.Write((byte)0x00); } writer.Align16(); uint offs_firstFile = (uint)writer.Position; for (int i = 0; i < assetInfos.Count; i++) { AssetFileInfo info = assetInfos[i]; AssetsReplacer replacer = replacers.FirstOrDefault(n => n.GetPathID() == info.index); if (replacer != null) { if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_AddOrModify) { replacer.Write(writer); if (i != assetInfos.Count - 1) { writer.Align8(); } } else if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_Remove) { continue; } } else { AssetFileInfo originalInfo = originalAssetInfos.FirstOrDefault(n => n.index == info.index); if (originalInfo != null) { reader.Position = header.firstFileOffset + originalInfo.curFileOffset; byte[] assetData = reader.ReadBytes((int)originalInfo.curFileSize); writer.Write(assetData); if (i != assetInfos.Count - 1) { writer.Align8(); } } } } header.firstFileOffset = offs_firstFile; long fileSizeMarker = writer.Position; reader.Position = header.firstFileOffset; writer.Position = 0; header.metadataSize = metadataSize; header.fileSize = (uint)fileSizeMarker; header.Write(writer); }
public void Write(AssetsFileWriter writer, long filePos, List <AssetsReplacer> replacers, uint fileID = 0, ClassDatabaseFile typeMeta = null) { if (filePos == -1) { filePos = writer.Position; } else { writer.Position = filePos; } header.Write(writer); foreach (AssetsReplacer replacer in replacers) { int replacerClassId = replacer.GetClassID(); ushort replacerScriptIndex = replacer.GetMonoScriptID(); if (!typeTree.unity5Types.Any(t => t.classId == replacerClassId && t.scriptIndex == replacerScriptIndex)) { Type_0D type = null; if (typeMeta != null) { ClassDatabaseType cldbType = AssetHelper.FindAssetClassByID(typeMeta, (uint)replacerClassId); if (cldbType != null) { type = C2T5.Cldb2TypeTree(typeMeta, cldbType); //in original AssetsTools, if you tried to use a new monoId it would just try to use //the highest existing scriptIndex that existed without making a new one (unless there //were no monobehavours ofc) this isn't any better as we just assign a plain monobehaviour //typetree to a type that probably has more fields. I don't really know of a better way to //handle this at the moment as cldbs cannot differentiate monoids. type.scriptIndex = replacerScriptIndex; } } if (type == null) { type = new Type_0D { classId = replacerClassId, unknown16_1 = 0, scriptIndex = replacerScriptIndex, typeHash1 = 0, typeHash2 = 0, typeHash3 = 0, typeHash4 = 0, typeFieldsExCount = 0, stringTableLen = 0, stringTable = "" }; } typeTree.unity5Types.Add(type); } } typeTree.Write(writer, header.format); Dictionary <long, AssetFileInfo> oldAssetInfosByPathId = new Dictionary <long, AssetFileInfo>(); Dictionary <long, AssetsReplacer> replacersByPathId = replacers.ToDictionary(r => r.GetPathID()); List <AssetFileInfo> newAssetInfos = new List <AssetFileInfo>(); // Collect unchanged assets (that aren't getting removed) reader.Position = assetTablePos; for (int i = 0; i < assetCount; i++) { AssetFileInfo oldAssetInfo = new AssetFileInfo(); oldAssetInfo.Read(header.format, reader); oldAssetInfosByPathId.Add(oldAssetInfo.index, oldAssetInfo); if (replacersByPathId.ContainsKey(oldAssetInfo.index)) { continue; } AssetFileInfo newAssetInfo = new AssetFileInfo { index = oldAssetInfo.index, curFileTypeOrIndex = oldAssetInfo.curFileTypeOrIndex, inheritedUnityClass = oldAssetInfo.inheritedUnityClass, scriptIndex = oldAssetInfo.scriptIndex, unknown1 = oldAssetInfo.unknown1 }; newAssetInfos.Add(newAssetInfo); } // Collect modified and new assets foreach (AssetsReplacer replacer in replacers.Where(r => r.GetReplacementType() == AssetsReplacementType.AddOrModify)) { AssetFileInfo newAssetInfo = new AssetFileInfo { index = replacer.GetPathID(), inheritedUnityClass = (ushort)replacer.GetClassID(), //for older unity versions scriptIndex = replacer.GetMonoScriptID(), unknown1 = 0 }; if (header.format < 0x10) { newAssetInfo.curFileTypeOrIndex = replacer.GetClassID(); } else { if (replacer.GetMonoScriptID() == 0xFFFF) { newAssetInfo.curFileTypeOrIndex = typeTree.unity5Types.FindIndex(t => t.classId == replacer.GetClassID()); } else { newAssetInfo.curFileTypeOrIndex = typeTree.unity5Types.FindIndex(t => t.classId == replacer.GetClassID() && t.scriptIndex == replacer.GetMonoScriptID()); } } newAssetInfos.Add(newAssetInfo); } newAssetInfos.Sort((i1, i2) => i1.index.CompareTo(i2.index)); // Write asset infos (will write again later on to update the offsets and sizes) writer.Write(newAssetInfos.Count); writer.Align(); long newAssetTablePos = writer.Position; foreach (AssetFileInfo newAssetInfo in newAssetInfos) { newAssetInfo.Write(header.format, writer); } preloadTable.Write(writer); dependencies.Write(writer); // Temporary fix for secondaryTypeCount and friends if (header.format >= 0x14) { writer.Write(0); //secondaryTypeCount } uint newMetadataSize = (uint)(writer.Position - filePos - 0x13); //0x13 is header - "endianness byte"? (if that's what it even is) if (header.format >= 0x16) { // Remove larger variation fields as well newMetadataSize -= 0x1c; } // For padding only. if all initial data before assetData is more than 0x1000, this is skipped if (writer.Position < 0x1000) { while (writer.Position < 0x1000) { writer.Write((byte)0x00); } } else { if (writer.Position % 16 == 0) { writer.Position += 16; } else { writer.Align16(); } } long newFirstFileOffset = writer.Position; // Write all asset data for (int i = 0; i < newAssetInfos.Count; i++) { AssetFileInfo newAssetInfo = newAssetInfos[i]; newAssetInfo.curFileOffset = writer.Position - newFirstFileOffset; if (replacersByPathId.TryGetValue(newAssetInfo.index, out AssetsReplacer replacer)) { replacer.Write(writer); } else { AssetFileInfo oldAssetInfo = oldAssetInfosByPathId[newAssetInfo.index]; reader.Position = header.firstFileOffset + oldAssetInfo.curFileOffset; reader.BaseStream.CopyToCompat(writer.BaseStream, oldAssetInfo.curFileSize); } newAssetInfo.curFileSize = (uint)(writer.Position - (newFirstFileOffset + newAssetInfo.curFileOffset)); if (i != newAssetInfos.Count - 1) { writer.Align8(); } } long newFileSize = writer.Position - filePos; // Write new header AssetsFileHeader newHeader = new AssetsFileHeader { metadataSize = newMetadataSize, fileSize = newFileSize, format = header.format, firstFileOffset = newFirstFileOffset, endianness = header.endianness, unknown = header.unknown, unknown1 = header.unknown1, unknown2 = header.unknown2 }; writer.Position = filePos; newHeader.Write(writer); // Write new asset infos again (this time with offsets and sizes filled in) writer.Position = newAssetTablePos; foreach (AssetFileInfo newAssetInfo in newAssetInfos) { newAssetInfo.Write(header.format, writer); } // Set writer position back to end of file writer.Position = filePos + newFileSize; }
public abstract bool SetTypeInfo(ClassDatabaseFile file, ClassDatabaseType type, bool localCopy);
public virtual bool SetTypeInfo(ClassDatabaseFile file, ClassDatabaseType type, bool localCopy) { throw new NotImplementedException(); }
public override bool Init(AssetsFileReader entryReader, long entryPos, long entrySize, ClassDatabaseFile typeMeta = null) { return(true); }
public override bool Init(AssetsFileReader entryReader, long entryPos, long entrySize, ClassDatabaseFile typeMeta = null) { if (assetsFile != null) { return(false); } SegmentStream stream = new SegmentStream(entryReader.BaseStream, entryPos, entrySize); AssetsFileReader reader = new AssetsFileReader(stream); assetsFile = new AssetsFile(reader); return(true); }
public override bool SetTypeInfo(ClassDatabaseFile file, ClassDatabaseType type, bool localCopy) { return(false); }
public override bool GetTypeInfo(out ClassDatabaseFile file, out ClassDatabaseType type) { file = null; type = null; return(false); }
//set fileID to -1 if all replacers are for this .assets file but don't have the fileID set to the same one //typeMeta is used to add the type information (hash and type fields) for format >= 0x10 if necessary public ulong Write(AssetsFileWriter writer, ulong filePos, AssetsReplacer[] pReplacers, uint fileID, ClassDatabaseFile typeMeta = null) { header.Write(writer.Position, writer); for (int i = 0; i < pReplacers.Length; i++) { AssetsReplacer replacer = pReplacers[i]; if (!typeTree.pTypes_Unity5.Any(t => t.classId == replacer.GetClassID())) { Type_0D type = new Type_0D() { classId = replacer.GetClassID(), unknown16_1 = 0, scriptIndex = 0xFFFF, unknown5 = 0, unknown6 = 0, unknown7 = 0, unknown8 = 0, typeFieldsExCount = 0, stringTableLen = 0, pStringTable = "" }; typeTree.pTypes_Unity5.Concat(new Type_0D[] { type }); } } typeTree.Write(writer.Position, writer, header.format); int initialSize = (int)(AssetFileInfo.GetSize(header.format) * AssetCount); int newSize = (int)(AssetFileInfo.GetSize(header.format) * (AssetCount + pReplacers.Length)); int appendedSize = newSize - initialSize; reader.Position = AssetTablePos; List <AssetFileInfo> originalAssetInfos = new List <AssetFileInfo>(); List <AssetFileInfo> assetInfos = new List <AssetFileInfo>(); List <AssetsReplacer> currentReplacers = pReplacers.ToList(); uint currentOffset = 0; //-write all original assets, modify sizes if needed and skip those to be removed for (int i = 0; i < AssetCount; i++) { AssetFileInfo info = new AssetFileInfo(); info.Read(header.format, reader.Position, reader, reader.bigEndian); originalAssetInfos.Add(info); AssetsReplacer replacer = currentReplacers.FirstOrDefault(n => n.GetPathID() == info.index); if (replacer != null) { if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_AddOrModify) { int classIndex = Array.FindIndex(typeTree.pTypes_Unity5, t => t.classId == replacer.GetClassID()); info = new AssetFileInfo() { index = replacer.GetPathID(), offs_curFile = currentOffset, curFileSize = (uint)classIndex, curFileTypeOrIndex = (uint)replacer.GetClassID(), inheritedUnityClass = (ushort)replacer.GetClassID(), //-what is this scriptIndex = replacer.GetMonoScriptID(), unknown1 = 0 }; } else if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_Remove) { continue; } } currentOffset += info.curFileSize; uint pad = 8 - (currentOffset % 8); if (pad != 8) { currentOffset += pad; } assetInfos.Add(info); } //-write new assets while (currentReplacers.Count > 0) { AssetsReplacer replacer = currentReplacers.First(); if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_AddOrModify) { int classIndex = Array.FindIndex(typeTree.pTypes_Unity5, t => t.classId == replacer.GetClassID()); AssetFileInfo info = new AssetFileInfo() { index = replacer.GetPathID(), offs_curFile = currentOffset, curFileSize = (uint)replacer.GetSize(), curFileTypeOrIndex = (uint)classIndex, inheritedUnityClass = (ushort)replacer.GetClassID(), scriptIndex = replacer.GetMonoScriptID(), unknown1 = 0 }; currentOffset += info.curFileSize; uint pad = 8 - (currentOffset % 8); if (pad != 8) { currentOffset += pad; } assetInfos.Add(info); } currentReplacers.Remove(replacer); } writer.Write(assetInfos.Count); writer.Align(); for (int i = 0; i < assetInfos.Count; i++) { assetInfos[i].Write(header.format, writer.Position, writer); } preloadTable.Write(writer.Position, writer, header.format); dependencies.Write(writer.Position, writer, header.format); uint metadataSize = (uint)writer.Position - 0x13; //-for padding only. if all initial data before assetData is more than 0x1000, this is skipped while (writer.Position < 0x1000 /*header.offs_firstFile*/) { writer.Write((byte)0x00); } header.offs_firstFile = (uint)writer.Position; for (int i = 0; i < assetInfos.Count; i++) { AssetFileInfo info = assetInfos[i]; AssetsReplacer replacer = pReplacers.FirstOrDefault(n => n.GetPathID() == info.index); if (replacer != null) { if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_AddOrModify) { replacer.Write(writer.Position, writer); writer.Align8(); } else if (replacer.GetReplacementType() == AssetsReplacementType.AssetsReplacement_Remove) { continue; } } else { AssetFileInfo originalInfo = originalAssetInfos.FirstOrDefault(n => n.index == info.index); if (originalInfo != null) { reader.Position = header.offs_firstFile + originalInfo.offs_curFile; byte[] assetData = reader.ReadBytes((int)originalInfo.curFileSize); writer.Write(assetData); writer.Align8(); } } } ulong fileSizeMarker = writer.Position; reader.Position = header.offs_firstFile; writer.Position = 0; header.metadataSize = metadataSize; header.fileSize = (uint)fileSizeMarker; header.Write(writer.Position, writer); return(writer.Position); }
public override bool SetTypeInfo(ClassDatabaseFile file, ClassDatabaseType type, bool localCopy) { this.file = file; this.type = type; return(true); }
public override bool GetTypeInfo(out ClassDatabaseFile file, out ClassDatabaseType type) { file = this.file; type = this.type; return(true); }
public void Write(AssetsFileWriter writer, int optimizeStringTable = 1, int compress = 1) { long filePos = writer.BaseStream.Position; //lol don't do this if compress is 0 if ((compress & 0x80) == 0) { throw new NotImplementedException("Compression flag 0x80 must be used"); } //compress 1 for lz4 and 2 for lzma //this is backwards from assets files //build string table StringBuilder strTableBuilder = new StringBuilder(); Dictionary <string, uint> strTableMap; if (optimizeStringTable != 0) { strTableMap = new Dictionary <string, uint>(); } else { strTableMap = null; } foreach (ClassDatabaseFile cldb in files) { for (int i = 0; i < cldb.classes.Count; i++) { ClassDatabaseType type = cldb.classes[i]; AddStringTableEntry(cldb, strTableBuilder, strTableMap, ref type.name); if (header.fileVersion == 4 && (cldb.header.flags & 1) != 0) { AddStringTableEntry(cldb, strTableBuilder, strTableMap, ref type.assemblyFileName); } List <ClassDatabaseTypeField> fields = type.fields; for (int j = 0; j < fields.Count; j++) { ClassDatabaseTypeField field = fields[j]; AddStringTableEntry(cldb, strTableBuilder, strTableMap, ref field.fieldName); AddStringTableEntry(cldb, strTableBuilder, strTableMap, ref field.typeName); fields[j] = field; } } } header.fileCount = (uint)files.Count; header.Write(writer); using (MemoryStream cldbMs = new MemoryStream()) using (AssetsFileWriter cldbWriter = new AssetsFileWriter(cldbMs)) { //annoyingly, files and header.files are two different lists... for (int i = 0; i < files.Count; i++) { ClassDatabaseFile cldb = files[i]; long cldbStartFilePos = cldbWriter.BaseStream.Position; //does not support 0x80 self compression rn cldb.Write(cldbWriter, 0, 0, false); long cldbEndFilePos = cldbWriter.BaseStream.Position; string cldbName = header.files[i].name; header.files[i] = new ClassDatabaseFileRef() { offset = (uint)cldbStartFilePos, length = (uint)(cldbEndFilePos - cldbStartFilePos), name = cldbName }; } header.fileBlockSize = (uint)cldbMs.Length; cldbMs.Position = 0; if ((compress & 0x20) == 0) //compressed { if ((compress & 0x1f) == 1) //lz4 { byte[] compressedBlock = LZ4Codec.Encode32HC(cldbMs.ToArray(), 0, (int)cldbMs.Length); writer.Write(compressedBlock); } else if ((compress & 0x1f) == 2) //lzma { byte[] compressedBlock = SevenZipHelper.Compress(cldbMs.ToArray()); writer.Write(compressedBlock); } else { throw new ArgumentException("File marked as compressed but no valid compression option set!"); } } else //write normally { cldbMs.CopyToCompat(writer.BaseStream); } } header.stringTableOffset = (uint)writer.Position; byte[] stringTableBytes = Encoding.ASCII.GetBytes(strTableBuilder.ToString()); header.stringTableLenUncompressed = (uint)stringTableBytes.Length; if ((compress & 0x40) == 0) //string table is compressed { if ((compress & 0x1f) == 1) //lz4 { stringTableBytes = LZ4Codec.Encode32HC(stringTableBytes, 0, stringTableBytes.Length); } else if ((compress & 0x1f) == 2) //lzma { stringTableBytes = SevenZipHelper.Compress(stringTableBytes); } else { throw new ArgumentException("File marked as compressed but no valid compression option set!"); } } header.stringTableLenCompressed = (uint)stringTableBytes.Length; writer.Write(stringTableBytes); writer.Position = filePos; header.compressionType = (byte)compress; header.Write(writer); }
public virtual bool GetTypeInfo(out ClassDatabaseFile file, out ClassDatabaseType type) { throw new NotImplementedException(); }
public bool Read(AssetsFileReader reader) { header = new ClassDatabasePackageHeader(); header.Read(reader); files = new List <ClassDatabaseFile>(); long firstFile = reader.Position; AssetsFileReader newReader = reader; if ((header.compressionType & 0x80) == 0) //multiple compressed blocks { //untested! //the compression is handled by the cldbs themselves for (int i = 0; i < header.fileCount; i++) { newReader.Position = firstFile + header.files[i].offset; byte[] data = newReader.ReadBytes((int)header.files[i].length); using (MemoryStream ms = new MemoryStream(data)) using (AssetsFileReader r = new AssetsFileReader(ms)) { ClassDatabaseFile file = new ClassDatabaseFile(); file.Read(r); files.Add(file); } } } else //one compressed block { if ((header.compressionType & 0x20) == 0) //file block compressed { firstFile = 0; int compressedSize = (int)(header.stringTableOffset - newReader.Position); int uncompressedSize = (int)header.fileBlockSize; MemoryStream ms; if ((header.compressionType & 0x1f) == 1) //lz4 { byte[] uncompressedBytes = new byte[uncompressedSize]; using (MemoryStream tempMs = new MemoryStream(newReader.ReadBytes(compressedSize))) { Lz4DecoderStream decoder = new Lz4DecoderStream(tempMs); decoder.Read(uncompressedBytes, 0, uncompressedSize); decoder.Dispose(); } ms = new MemoryStream(uncompressedBytes); } else if ((header.compressionType & 0x1f) == 2) //lzma { byte[] dbg = newReader.ReadBytes(compressedSize); using (MemoryStream tempMs = new MemoryStream(dbg)) { ms = SevenZipHelper.StreamDecompress(tempMs, uncompressedSize); } } else { valid = false; return(valid); } newReader = new AssetsFileReader(ms); newReader.bigEndian = false; } for (int i = 0; i < header.fileCount; i++) { newReader.Position = firstFile + header.files[i].offset; byte[] data = newReader.ReadBytes((int)header.files[i].length); using (MemoryStream ms = new MemoryStream(data)) using (AssetsFileReader r = new AssetsFileReader(ms)) { ClassDatabaseFile file = new ClassDatabaseFile(); file.Read(r); files.Add(file); } } } newReader = reader; newReader.Position = header.stringTableOffset; if ((header.compressionType & 0x40) == 0) //string table is compressed { int compressedSize = (int)header.stringTableLenCompressed; int uncompressedSize = (int)header.stringTableLenUncompressed; MemoryStream ms; if ((header.compressionType & 0x1f) == 1) //lz4 { byte[] uncompressedBytes = new byte[uncompressedSize]; using (MemoryStream tempMs = new MemoryStream(newReader.ReadBytes(compressedSize))) { Lz4DecoderStream decoder = new Lz4DecoderStream(tempMs); decoder.Read(uncompressedBytes, 0, uncompressedSize); decoder.Dispose(); } ms = new MemoryStream(uncompressedBytes); } else if ((header.compressionType & 0x1f) == 2) //lzma { using (MemoryStream tempMs = new MemoryStream(newReader.ReadBytes(compressedSize))) { ms = SevenZipHelper.StreamDecompress(tempMs, uncompressedSize); } } else { valid = false; return(valid); } newReader = new AssetsFileReader(ms); newReader.bigEndian = false; } stringTable = newReader.ReadBytes((int)header.stringTableLenUncompressed); for (int i = 0; i < header.fileCount; i++) { files[i].stringTable = stringTable; } valid = true; return(valid); }
public bool Write(AssetsFileWriter writer, List <BundleReplacer> replacers, ClassDatabaseFile typeMeta = null) { bundleHeader6.Write(writer); if (bundleHeader6.fileVersion >= 7) { writer.Align16(); } AssetBundleBlockAndDirectoryList06 newBundleInf6 = new AssetBundleBlockAndDirectoryList06() { checksumLow = 0, checksumHigh = 0 }; //I could map the assets to their blocks but I don't //have any more-than-1-block files to test on //this should work just fine as far as I know newBundleInf6.blockInf = new AssetBundleBlockInfo06[] { new AssetBundleBlockInfo06 { compressedSize = 0, decompressedSize = 0, flags = 0x40 } }; //assets that did not have their data modified but need //the original info to read from the original file var newToOriginalDirInfoLookup = new Dictionary <AssetBundleDirectoryInfo06, AssetBundleDirectoryInfo06>(); List <AssetBundleDirectoryInfo06> originalDirInfos = new List <AssetBundleDirectoryInfo06>(); List <AssetBundleDirectoryInfo06> dirInfos = new List <AssetBundleDirectoryInfo06>(); List <BundleReplacer> currentReplacers = replacers.ToList(); //this is kind of useless at the moment but leaving it here //because if the AssetsFile size can be precalculated in the //future, we can use this to skip rewriting sizes long currentOffset = 0; //write all original files, modify sizes if needed and skip those to be removed for (int i = 0; i < bundleInf6.directoryCount; i++) { AssetBundleDirectoryInfo06 info = bundleInf6.dirInf[i]; originalDirInfos.Add(info); AssetBundleDirectoryInfo06 newInfo = new AssetBundleDirectoryInfo06() { offset = currentOffset, decompressedSize = info.decompressedSize, flags = info.flags, name = info.name }; BundleReplacer replacer = currentReplacers.FirstOrDefault(n => n.GetOriginalEntryName() == newInfo.name); if (replacer != null) { currentReplacers.Remove(replacer); if (replacer.GetReplacementType() == BundleReplacementType.AddOrModify) { newInfo = new AssetBundleDirectoryInfo06() { offset = currentOffset, decompressedSize = replacer.GetSize(), flags = info.flags, name = replacer.GetEntryName() }; } else if (replacer.GetReplacementType() == BundleReplacementType.Rename) { newInfo = new AssetBundleDirectoryInfo06() { offset = currentOffset, decompressedSize = info.decompressedSize, flags = info.flags, name = replacer.GetEntryName() }; newToOriginalDirInfoLookup[newInfo] = info; } else if (replacer.GetReplacementType() == BundleReplacementType.Remove) { continue; } } else { newToOriginalDirInfoLookup[newInfo] = info; } if (newInfo.decompressedSize != -1) { currentOffset += newInfo.decompressedSize; } dirInfos.Add(newInfo); } //write new files while (currentReplacers.Count > 0) { BundleReplacer replacer = currentReplacers[0]; if (replacer.GetReplacementType() == BundleReplacementType.AddOrModify) { AssetBundleDirectoryInfo06 info = new AssetBundleDirectoryInfo06() { offset = currentOffset, decompressedSize = replacer.GetSize(), flags = 0x04, //idk it just works (tm) name = replacer.GetEntryName() }; currentOffset += info.decompressedSize; dirInfos.Add(info); } currentReplacers.Remove(replacer); } //write the listings long bundleInfPos = writer.Position; newBundleInf6.dirInf = dirInfos.ToArray(); //this is only here to allocate enough space so it's fine if it's inaccurate newBundleInf6.Write(writer); long assetDataPos = writer.Position; //actually write the file data to the bundle now for (int i = 0; i < dirInfos.Count; i++) { AssetBundleDirectoryInfo06 info = dirInfos[i]; BundleReplacer replacer = replacers.FirstOrDefault(n => n.GetEntryName() == info.name); if (replacer != null) { if (replacer.GetReplacementType() == BundleReplacementType.AddOrModify) { long startPos = writer.Position; long endPos = replacer.Write(writer); long size = endPos - startPos; dirInfos[i].decompressedSize = size; dirInfos[i].offset = startPos - assetDataPos; } else if (replacer.GetReplacementType() == BundleReplacementType.Remove) { continue; } } else { if (newToOriginalDirInfoLookup.TryGetValue(info, out AssetBundleDirectoryInfo06 originalInfo)) { long startPos = writer.Position; reader.Position = bundleHeader6.GetFileDataOffset() + originalInfo.offset; reader.BaseStream.CopyToCompat(writer.BaseStream, originalInfo.decompressedSize); dirInfos[i].offset = startPos - assetDataPos; } } } //now that we know what the sizes are of the written files let's go back and fix them long finalSize = writer.Position; uint assetSize = (uint)(finalSize - assetDataPos); writer.Position = bundleInfPos; newBundleInf6.blockInf[0].decompressedSize = assetSize; newBundleInf6.blockInf[0].compressedSize = assetSize; newBundleInf6.dirInf = dirInfos.ToArray(); newBundleInf6.Write(writer); uint infoSize = (uint)(assetDataPos - bundleInfPos); writer.Position = 0; AssetBundleHeader06 newBundleHeader6 = new AssetBundleHeader06() { signature = bundleHeader6.signature, fileVersion = bundleHeader6.fileVersion, minPlayerVersion = bundleHeader6.minPlayerVersion, fileEngineVersion = bundleHeader6.fileEngineVersion, totalFileSize = finalSize, compressedSize = infoSize, decompressedSize = infoSize, flags = bundleHeader6.flags & unchecked ((uint)~0x80) & unchecked ((uint)~0x3f) //unset info at end flag and compression value }; newBundleHeader6.Write(writer); return(true); }
public abstract bool GetTypeInfo(out ClassDatabaseFile file, out ClassDatabaseType type);