public bool Read(AssetsFileReader reader) { header = new ClassDatabaseFileHeader(); header.Read(reader); if (header.header != "cldb" || header.fileVersion > 4 || header.fileVersion < 1) { valid = false; return(valid); } classes = new List <ClassDatabaseType>(); long classTablePos = reader.Position; AssetsFileReader newReader = reader; if (header.compressionType != 0) { classTablePos = 0; MemoryStream ms; if (header.compressionType == 1) //lz4 { byte[] uncompressedBytes = new byte[header.uncompressedSize]; using (MemoryStream tempMs = new MemoryStream(reader.ReadBytes((int)header.compressedSize))) { Lz4DecoderStream decoder = new Lz4DecoderStream(tempMs); decoder.Read(uncompressedBytes, 0, (int)header.uncompressedSize); decoder.Dispose(); } ms = new MemoryStream(uncompressedBytes); } else if (header.compressionType == 2) //lzma { using (MemoryStream tempMs = new MemoryStream(reader.ReadBytes((int)header.compressedSize))) { ms = SevenZipHelper.StreamDecompress(tempMs); } } else { valid = false; return(valid); } newReader = new AssetsFileReader(ms); newReader.bigEndian = false; } newReader.Position = header.stringTablePos; stringTable = newReader.ReadBytes((int)header.stringTableLen); newReader.Position = classTablePos; uint size = newReader.ReadUInt32(); for (int i = 0; i < size; i++) { ClassDatabaseType cdt = new ClassDatabaseType(); cdt.Read(newReader, header.fileVersion, header.flags); classes.Add(cdt); } valid = true; return(valid); }
public bool Read(AssetsFileReader reader) { header = new ClassDatabaseFileHeader(); header.Read(reader, 0); if (header.header != "cldb" || !(header.fileVersion == 3 || header.fileVersion == 1) || header.compressionType != 0) { valid = false; return(valid); } classes = new List <ClassDatabaseType>(); ulong classTablePos = reader.Position; reader.BaseStream.Position = header.stringTablePos; stringTable = reader.ReadBytes((int)header.stringTableLen); reader.Position = classTablePos; uint size = reader.ReadUInt32(); for (int i = 0; i < size; i++) { ClassDatabaseType cdt = new ClassDatabaseType(); cdt.Read(reader, reader.Position, header.fileVersion); classes.Add(cdt); } valid = true; return(valid); }
public bool FromClassDatabase(ClassDatabaseFile pFile, ClassDatabaseType pType, uint fieldIndex) { ClassDatabaseTypeField field = pType.fields[(int)fieldIndex]; name = field.fieldName.GetString(pFile); type = field.typeName.GetString(pFile); valueType = AssetTypeValueField.GetValueTypeByTypeName(type); isArray = field.isArray == 1 ? true : false; align = (field.flags2 & 0x4000) != 0x00 ? true : false; hasValue = (valueType == EnumValueTypes.ValueType_None) ? false : true; List <int> childrenIndexes = new List <int>(); int thisDepth = pType.fields[(int)fieldIndex].depth; for (int i = (int)fieldIndex + 1; i < pType.fields.Count; i++) { if (pType.fields[i].depth == thisDepth + 1) { childrenCount++; childrenIndexes.Add(i); } if (pType.fields[i].depth <= thisDepth) { break; } } children = new AssetTypeTemplateField[childrenCount]; int child = 0; for (int i = (int)fieldIndex + 1; i < pType.fields.Count; i++) { if (pType.fields[i].depth == thisDepth + 1) { children[child] = new AssetTypeTemplateField(); children[child].FromClassDatabase(pFile, pType, (uint)childrenIndexes[child]); child++; } if (pType.fields[i].depth <= thisDepth) { break; } } return(true); }
public virtual bool SetTypeInfo(ClassDatabaseFile file, ClassDatabaseType type, bool localCopy) { throw new NotImplementedException(); }
public virtual bool GetTypeInfo(out ClassDatabaseFile file, out ClassDatabaseType type) { throw new NotImplementedException(); }
public override bool GetTypeInfo(out ClassDatabaseFile file, out ClassDatabaseType type) { file = this.file; type = this.type; return(true); }
public override bool SetTypeInfo(ClassDatabaseFile file, ClassDatabaseType type, bool localCopy) { this.file = file; this.type = type; return(true); }
public void Write(AssetsFileWriter writer, long filePos, List <AssetsReplacer> replacers, uint fileID = 0, ClassDatabaseFile typeMeta = null) { if (filePos == -1) { filePos = writer.Position; } else { writer.Position = filePos; } header.Write(writer); foreach (AssetsReplacer replacer in replacers) { int replacerClassId = replacer.GetClassID(); ushort replacerScriptIndex = replacer.GetMonoScriptID(); if (!typeTree.unity5Types.Any(t => t.classId == replacerClassId && t.scriptIndex == replacerScriptIndex)) { Type_0D type = null; if (typeMeta != null) { ClassDatabaseType cldbType = AssetHelper.FindAssetClassByID(typeMeta, (uint)replacerClassId); if (cldbType != null) { type = C2T5.Cldb2TypeTree(typeMeta, cldbType); //in original AssetsTools, if you tried to use a new monoId it would just try to use //the highest existing scriptIndex that existed without making a new one (unless there //were no monobehavours ofc) this isn't any better as we just assign a plain monobehaviour //typetree to a type that probably has more fields. I don't really know of a better way to //handle this at the moment as cldbs cannot differentiate monoids. type.scriptIndex = replacerScriptIndex; } } if (type == null) { type = new Type_0D { classId = replacerClassId, unknown16_1 = 0, scriptIndex = replacerScriptIndex, typeHash1 = 0, typeHash2 = 0, typeHash3 = 0, typeHash4 = 0, typeFieldsExCount = 0, stringTableLen = 0, stringTable = "" }; } typeTree.unity5Types.Add(type); } } typeTree.Write(writer, header.format); Dictionary <long, AssetFileInfo> oldAssetInfosByPathId = new Dictionary <long, AssetFileInfo>(); Dictionary <long, AssetsReplacer> replacersByPathId = replacers.ToDictionary(r => r.GetPathID()); List <AssetFileInfo> newAssetInfos = new List <AssetFileInfo>(); // Collect unchanged assets (that aren't getting removed) reader.Position = assetTablePos; for (int i = 0; i < assetCount; i++) { AssetFileInfo oldAssetInfo = new AssetFileInfo(); oldAssetInfo.Read(header.format, reader); oldAssetInfosByPathId.Add(oldAssetInfo.index, oldAssetInfo); if (replacersByPathId.ContainsKey(oldAssetInfo.index)) { continue; } AssetFileInfo newAssetInfo = new AssetFileInfo { index = oldAssetInfo.index, curFileTypeOrIndex = oldAssetInfo.curFileTypeOrIndex, inheritedUnityClass = oldAssetInfo.inheritedUnityClass, scriptIndex = oldAssetInfo.scriptIndex, unknown1 = oldAssetInfo.unknown1 }; newAssetInfos.Add(newAssetInfo); } // Collect modified and new assets foreach (AssetsReplacer replacer in replacers.Where(r => r.GetReplacementType() == AssetsReplacementType.AddOrModify)) { AssetFileInfo newAssetInfo = new AssetFileInfo { index = replacer.GetPathID(), inheritedUnityClass = (ushort)replacer.GetClassID(), //for older unity versions scriptIndex = replacer.GetMonoScriptID(), unknown1 = 0 }; if (header.format < 0x10) { newAssetInfo.curFileTypeOrIndex = replacer.GetClassID(); } else { if (replacer.GetMonoScriptID() == 0xFFFF) { newAssetInfo.curFileTypeOrIndex = typeTree.unity5Types.FindIndex(t => t.classId == replacer.GetClassID()); } else { newAssetInfo.curFileTypeOrIndex = typeTree.unity5Types.FindIndex(t => t.classId == replacer.GetClassID() && t.scriptIndex == replacer.GetMonoScriptID()); } } newAssetInfos.Add(newAssetInfo); } newAssetInfos.Sort((i1, i2) => i1.index.CompareTo(i2.index)); // Write asset infos (will write again later on to update the offsets and sizes) writer.Write(newAssetInfos.Count); writer.Align(); long newAssetTablePos = writer.Position; foreach (AssetFileInfo newAssetInfo in newAssetInfos) { newAssetInfo.Write(header.format, writer); } preloadTable.Write(writer); dependencies.Write(writer); // Temporary fix for secondaryTypeCount and friends if (header.format >= 0x14) { writer.Write(0); //secondaryTypeCount } uint newMetadataSize = (uint)(writer.Position - filePos - 0x13); //0x13 is header - "endianness byte"? (if that's what it even is) if (header.format >= 0x16) { // Remove larger variation fields as well newMetadataSize -= 0x1c; } // For padding only. if all initial data before assetData is more than 0x1000, this is skipped if (writer.Position < 0x1000) { while (writer.Position < 0x1000) { writer.Write((byte)0x00); } } else { if (writer.Position % 16 == 0) { writer.Position += 16; } else { writer.Align16(); } } long newFirstFileOffset = writer.Position; // Write all asset data for (int i = 0; i < newAssetInfos.Count; i++) { AssetFileInfo newAssetInfo = newAssetInfos[i]; newAssetInfo.curFileOffset = writer.Position - newFirstFileOffset; if (replacersByPathId.TryGetValue(newAssetInfo.index, out AssetsReplacer replacer)) { replacer.Write(writer); } else { AssetFileInfo oldAssetInfo = oldAssetInfosByPathId[newAssetInfo.index]; reader.Position = header.firstFileOffset + oldAssetInfo.curFileOffset; reader.BaseStream.CopyToCompat(writer.BaseStream, oldAssetInfo.curFileSize); } newAssetInfo.curFileSize = (uint)(writer.Position - (newFirstFileOffset + newAssetInfo.curFileOffset)); if (i != newAssetInfos.Count - 1) { writer.Align8(); } } long newFileSize = writer.Position - filePos; // Write new header AssetsFileHeader newHeader = new AssetsFileHeader { metadataSize = newMetadataSize, fileSize = newFileSize, format = header.format, firstFileOffset = newFirstFileOffset, endianness = header.endianness, unknown = header.unknown, unknown1 = header.unknown1, unknown2 = header.unknown2 }; writer.Position = filePos; newHeader.Write(writer); // Write new asset infos again (this time with offsets and sizes filled in) writer.Position = newAssetTablePos; foreach (AssetFileInfo newAssetInfo in newAssetInfos) { newAssetInfo.Write(header.format, writer); } // Set writer position back to end of file writer.Position = filePos + newFileSize; }
public void Write(AssetsFileWriter writer, int optimizeStringTable = 1, int compress = 1) { long filePos = writer.BaseStream.Position; //lol don't do this if compress is 0 if ((compress & 0x80) == 0) { throw new NotImplementedException("Compression flag 0x80 must be used"); } //compress 1 for lz4 and 2 for lzma //this is backwards from assets files //build string table StringBuilder strTableBuilder = new StringBuilder(); Dictionary <string, uint> strTableMap; if (optimizeStringTable != 0) { strTableMap = new Dictionary <string, uint>(); } else { strTableMap = null; } foreach (ClassDatabaseFile cldb in files) { for (int i = 0; i < cldb.classes.Count; i++) { ClassDatabaseType type = cldb.classes[i]; AddStringTableEntry(cldb, strTableBuilder, strTableMap, ref type.name); if (header.fileVersion == 4 && (cldb.header.flags & 1) != 0) { AddStringTableEntry(cldb, strTableBuilder, strTableMap, ref type.assemblyFileName); } List <ClassDatabaseTypeField> fields = type.fields; for (int j = 0; j < fields.Count; j++) { ClassDatabaseTypeField field = fields[j]; AddStringTableEntry(cldb, strTableBuilder, strTableMap, ref field.fieldName); AddStringTableEntry(cldb, strTableBuilder, strTableMap, ref field.typeName); fields[j] = field; } } } header.fileCount = (uint)files.Count; header.Write(writer); using (MemoryStream cldbMs = new MemoryStream()) using (AssetsFileWriter cldbWriter = new AssetsFileWriter(cldbMs)) { //annoyingly, files and header.files are two different lists... for (int i = 0; i < files.Count; i++) { ClassDatabaseFile cldb = files[i]; long cldbStartFilePos = cldbWriter.BaseStream.Position; //does not support 0x80 self compression rn cldb.Write(cldbWriter, 0, 0, false); long cldbEndFilePos = cldbWriter.BaseStream.Position; string cldbName = header.files[i].name; header.files[i] = new ClassDatabaseFileRef() { offset = (uint)cldbStartFilePos, length = (uint)(cldbEndFilePos - cldbStartFilePos), name = cldbName }; } header.fileBlockSize = (uint)cldbMs.Length; cldbMs.Position = 0; if ((compress & 0x20) == 0) //compressed { if ((compress & 0x1f) == 1) //lz4 { byte[] compressedBlock = LZ4Codec.Encode32HC(cldbMs.ToArray(), 0, (int)cldbMs.Length); writer.Write(compressedBlock); } else if ((compress & 0x1f) == 2) //lzma { byte[] compressedBlock = SevenZipHelper.Compress(cldbMs.ToArray()); writer.Write(compressedBlock); } else { throw new ArgumentException("File marked as compressed but no valid compression option set!"); } } else //write normally { cldbMs.CopyToCompat(writer.BaseStream); } } header.stringTableOffset = (uint)writer.Position; byte[] stringTableBytes = Encoding.ASCII.GetBytes(strTableBuilder.ToString()); header.stringTableLenUncompressed = (uint)stringTableBytes.Length; if ((compress & 0x40) == 0) //string table is compressed { if ((compress & 0x1f) == 1) //lz4 { stringTableBytes = LZ4Codec.Encode32HC(stringTableBytes, 0, stringTableBytes.Length); } else if ((compress & 0x1f) == 2) //lzma { stringTableBytes = SevenZipHelper.Compress(stringTableBytes); } else { throw new ArgumentException("File marked as compressed but no valid compression option set!"); } } header.stringTableLenCompressed = (uint)stringTableBytes.Length; writer.Write(stringTableBytes); writer.Position = filePos; header.compressionType = (byte)compress; header.Write(writer); }
public override bool SetTypeInfo(ClassDatabaseFile file, ClassDatabaseType type, bool localCopy) { return(false); }
public override bool GetTypeInfo(out ClassDatabaseFile file, out ClassDatabaseType type) { file = null; type = null; return(false); }
public void Write(AssetsFileWriter writer, int optimizeStringTable = 1, int compress = 1, bool writeStringTable = true) { long filePos = writer.BaseStream.Position; byte[] newStrTable = stringTable; //"optimize string table (slow)" mode 2 not supported //ex: >AABB\0>localAABB\0 can be just >local>AABB\0 if (optimizeStringTable == 1) { StringBuilder strTableBuilder = new StringBuilder(); Dictionary <string, uint> strTableMap = new Dictionary <string, uint>(); for (int i = 0; i < classes.Count; i++) { ClassDatabaseType type = classes[i]; AddStringTableEntry(strTableBuilder, strTableMap, ref type.name); if (header.fileVersion == 4 && (header.flags & 1) != 0) { AddStringTableEntry(strTableBuilder, strTableMap, ref type.assemblyFileName); } List <ClassDatabaseTypeField> fields = type.fields; for (int j = 0; j < fields.Count; j++) { ClassDatabaseTypeField field = fields[j]; AddStringTableEntry(strTableBuilder, strTableMap, ref field.fieldName); AddStringTableEntry(strTableBuilder, strTableMap, ref field.typeName); fields[j] = field; } } } header.Write(writer); writer.Write(classes.Count); for (int i = 0; i < classes.Count; i++) { classes[i].Write(writer, header.fileVersion, header.flags); } long stringTablePos = writer.Position; //set false only for tpk packing, don't set false anytime else! if (writeStringTable) { writer.Write(newStrTable); } long fileEndPos = writer.Position; long stringTableLen = writer.Position - stringTablePos; long fileSize = writer.Position; header.stringTablePos = (uint)stringTablePos; header.stringTableLen = (uint)stringTableLen; header.uncompressedSize = (uint)fileSize; writer.Position = filePos; header.Write(writer); writer.Position = fileEndPos; }
public abstract bool SetTypeInfo(ClassDatabaseFile file, ClassDatabaseType type, bool localCopy);
public abstract bool GetTypeInfo(out ClassDatabaseFile file, out ClassDatabaseType type);