private IReadOnlyDictionary <string, GameFile> ReadIndexLegacy(bool caseInsensitive) { Ar.Position = Info.IndexOffset; var index = new FByteArchive($"{Name} - Index", ReadAndDecrypt((int)Info.IndexSize)); string mountPoint; try { mountPoint = index.ReadFString(); } catch (Exception e) { throw new InvalidAesKeyException($"Given aes key '{AesKey?.KeyString}'is not working with '{Name}'", e); } ValidateMountPoint(ref mountPoint); MountPoint = mountPoint; var fileCount = index.Read <int>(); var files = new Dictionary <string, GameFile>(fileCount); for (var i = 0; i < fileCount; i++) { var path = string.Concat(mountPoint, index.ReadFString()); var entry = new FPakEntry(this, path, index); if (entry.IsDeleted && entry.Size == 0) { continue; } if (entry.IsEncrypted) { EncryptedFileCount++; } if (caseInsensitive) { files[path.ToLowerInvariant()] = entry; } else { files[path] = entry; } } return(Files = files); }
public IoGlobalData(IoStoreReader globalReader) { var nameHashesChunk = globalReader.ChunkIndex(new FIoChunkId(0, 0, EIoChunkType.LoaderGlobalNameHashes)); var nameCount = (int)(globalReader.TocResource.ChunkOffsetLengths[nameHashesChunk].Length / sizeof(ulong) - 1); var nameAr = new FByteArchive("LoaderGlobalNames", globalReader.Read(new FIoChunkId(0, 0, EIoChunkType.LoaderGlobalNames))); GlobalNameMap = FNameEntrySerialized.LoadNameBatch(nameAr, nameCount); var metaAr = new FByteArchive("LoaderInitialLoadMeta", globalReader.Read(new FIoChunkId(0, 0, EIoChunkType.LoaderInitialLoadMeta))); var numObjects = metaAr.Read <int>(); var scriptObjects = metaAr.ReadArray <FScriptObjectEntry>(numObjects); ObjectHashStore = new ObjectIndexHashEntry[numObjects]; ObjectHashHeads = new ObjectIndexHashEntry[4096]; for (int i = 0; i < ObjectHashHeads.Length; i++) { ObjectHashHeads[i] = new ObjectIndexHashEntry(); } for (int i = 0; i < numObjects; i++) { ref var e = ref scriptObjects[i]; var scriptName = GlobalNameMap[(int)e.ObjectName.NameIndex]; var entry = new ObjectIndexHashEntry { Name = scriptName.Name, ObjectIndex = e.GlobalIndex }; ObjectHashStore[i] = entry; var hash = ObjectIndexToHash(e.GlobalIndex); entry.Next = ObjectHashHeads[hash]; ObjectHashHeads[hash] = entry; }
public static TypeMappings Parse(FArchive Ar) { var magic = Ar.Read <ushort>(); if (magic != FileMagic) { throw new ParserException(".usmap file has an invalid magic constant"); } var version = Ar.Read <Version>(); if (version < 0 || version > Version.LATEST) { throw new ParserException($".usmap has an invalid version {(byte) version}"); } var compression = Ar.Read <ECompressionMethod>(); var compSize = Ar.Read <uint>(); var decompSize = Ar.Read <uint>(); var data = new byte[decompSize]; switch (compression) { case ECompressionMethod.None: if (compSize != decompSize) { throw new ParserException("No compression: Compression size must be equal to decompression size"); } Ar.Read(data, 0, (int)compSize); break; case ECompressionMethod.Oodle: Oodle.Decompress(Ar.ReadBytes((int)compSize), 0, (int)compSize, data, 0, (int)decompSize); break; case ECompressionMethod.Brotli: throw new NotImplementedException(); default: throw new ParserException($"Invalid compression method {compression}"); } Ar = new FByteArchive(Ar.Name, data); var nameSize = Ar.Read <uint>(); var nameLut = new List <String>((int)nameSize); for (int i = 0; i < nameSize; i++) { var nameLength = Ar.Read <byte>(); nameLut.Add(ReadStringUnsafe(Ar, nameLength)); } var enumCount = Ar.Read <uint>(); var enums = new Dictionary <string, Dictionary <int, string> >((int)enumCount); for (int i = 0; i < enumCount; i++) { var enumName = Ar.ReadName(nameLut) !; var enumNamesSize = Ar.Read <byte>(); var enumNames = new Dictionary <int, string>(enumNamesSize); for (int j = 0; j < enumNamesSize; j++) { var value = Ar.ReadName(nameLut) !; enumNames[j] = value; } enums.Add(enumName, enumNames); } var structCount = Ar.Read <uint>(); var structs = new Dictionary <string, Struct>(); var mappings = new TypeMappings(structs, enums); for (int i = 0; i < structCount; i++) { var s = ParseStruct(mappings, Ar, nameLut); structs[s.Name] = s; } return(mappings); }
private IReadOnlyDictionary <string, GameFile> ReadIndexUpdated(bool caseInsensitive) { // Prepare primary index and decrypt if necessary Ar.Position = Info.IndexOffset; FArchive primaryIndex = new FByteArchive($"{Name} - Primary Index", ReadAndDecrypt((int)Info.IndexSize)); string mountPoint; try { mountPoint = primaryIndex.ReadFString(); } catch (Exception e) { throw new InvalidAesKeyException($"Given aes key '{AesKey?.KeyString}'is not working with '{Name}'", e); } ValidateMountPoint(ref mountPoint); MountPoint = mountPoint; var fileCount = primaryIndex.Read <int>(); EncryptedFileCount = 0; primaryIndex.Position += 8; // PathHashSeed if (!primaryIndex.ReadBoolean()) { throw new ParserException(primaryIndex, "No path hash index"); } primaryIndex.Position += 36; // PathHashIndexOffset (long) + PathHashIndexSize (long) + PathHashIndexHash (20 bytes) if (!primaryIndex.ReadBoolean()) { throw new ParserException(primaryIndex, "No directory index"); } var directoryIndexOffset = primaryIndex.Read <long>(); var directoryIndexSize = primaryIndex.Read <long>(); primaryIndex.Position += 20; // Directory Index hash var encodedPakEntriesSize = primaryIndex.Read <int>(); var encodedPakEntries = primaryIndex.ReadBytes(encodedPakEntriesSize); if (primaryIndex.Read <int>() < 0) { throw new ParserException("Corrupt pak PrimaryIndex detected"); } // Read FDirectoryIndex Ar.Position = directoryIndexOffset; var directoryIndex = new FByteArchive($"{Name} - Directory Index", ReadAndDecrypt((int)directoryIndexSize)); unsafe { fixed(byte *ptr = encodedPakEntries) { var directoryIndexLength = directoryIndex.Read <int>(); var files = new Dictionary <string, GameFile>(fileCount); for (var i = 0; i < directoryIndexLength; i++) { var dir = directoryIndex.ReadFString(); var dirDictLength = directoryIndex.Read <int>(); for (var j = 0; j < dirDictLength; j++) { var name = directoryIndex.ReadFString(); var path = string.Concat(mountPoint, dir, name); var entry = new FPakEntry(this, path, ptr + directoryIndex.Read <int>()); if (entry.IsEncrypted) { EncryptedFileCount++; } if (caseInsensitive) { files[path.ToLowerInvariant()] = entry; } else { files[path] = entry; } } } Files = files; return(files); } } }
public FIoStoreTocResource(FArchive Ar, EIoStoreTocReadOptions readOptions = EIoStoreTocReadOptions.Default) { var streamBuffer = new byte[Ar.Length]; Ar.Read(streamBuffer, 0, streamBuffer.Length); using var archive = new FByteArchive(Ar.Name, streamBuffer); // Header Header = new FIoStoreTocHeader(archive); if (Header.Version < EIoStoreTocVersion.PartitionSize) { Header.PartitionCount = 1; Header.PartitionSize = uint.MaxValue; } // Chunk IDs ChunkIds = archive.ReadArray <FIoChunkId>((int)Header.TocEntryCount); // Chunk offsets ChunkOffsetLengths = new FIoOffsetAndLength[Header.TocEntryCount]; for (int i = 0; i < Header.TocEntryCount; i++) { ChunkOffsetLengths[i] = new FIoOffsetAndLength(archive); } // Chunk perfect hash map uint perfectHashSeedsCount = 0; uint chunksWithoutPerfectHashCount = 0; if (Header.Version >= EIoStoreTocVersion.PerfectHashWithOverflow) { perfectHashSeedsCount = Header.TocChunkPerfectHashSeedsCount; chunksWithoutPerfectHashCount = Header.TocChunksWithoutPerfectHashCount; } else if (Header.Version >= EIoStoreTocVersion.PerfectHash) { perfectHashSeedsCount = Header.TocChunkPerfectHashSeedsCount; } if (perfectHashSeedsCount > 0) { ChunkPerfectHashSeeds = archive.ReadArray <int>((int)perfectHashSeedsCount); } if (chunksWithoutPerfectHashCount > 0) { ChunkIndicesWithoutPerfectHash = archive.ReadArray <int>((int)chunksWithoutPerfectHashCount); } // Compression blocks CompressionBlocks = new FIoStoreTocCompressedBlockEntry[Header.TocCompressedBlockEntryCount]; for (int i = 0; i < Header.TocCompressedBlockEntryCount; i++) { CompressionBlocks[i] = new FIoStoreTocCompressedBlockEntry(archive); } // Compression methods unsafe { var bufferSize = (int)(Header.CompressionMethodNameLength * Header.CompressionMethodNameCount); var buffer = stackalloc byte[bufferSize]; archive.Serialize(buffer, bufferSize); CompressionMethods = new CompressionMethod[Header.CompressionMethodNameCount + 1]; CompressionMethods[0] = CompressionMethod.None; for (var i = 0; i < Header.CompressionMethodNameCount; i++) { var name = new string((sbyte *)buffer + i * Header.CompressionMethodNameLength, 0, (int)Header.CompressionMethodNameLength).TrimEnd('\0'); if (string.IsNullOrEmpty(name)) { continue; } if (!Enum.TryParse(name, true, out CompressionMethod method)) { Log.Warning($"Unknown compression method '{name}' in {Ar.Name}"); method = CompressionMethod.Unknown; } CompressionMethods[i + 1] = method; } } // Chunk block signatures if (Header.ContainerFlags.HasFlag(EIoContainerFlags.Signed)) { var hashSize = archive.Read <int>(); // tocSignature and blockSignature both byte[hashSize] // and ChunkBlockSignature of FSHAHash[Header.TocCompressedBlockEntryCount] archive.Position += hashSize + hashSize + FSHAHash.SIZE * Header.TocCompressedBlockEntryCount; // You could verify hashes here but nah } // Directory index if (Header.Version >= EIoStoreTocVersion.DirectoryIndex && readOptions.HasFlag(EIoStoreTocReadOptions.ReadDirectoryIndex) && Header.ContainerFlags.HasFlag(EIoContainerFlags.Indexed) && Header.DirectoryIndexSize > 0) { DirectoryIndexBuffer = archive.ReadBytes((int)Header.DirectoryIndexSize); } // Meta if (readOptions.HasFlag(EIoStoreTocReadOptions.ReadTocMeta)) { ChunkMetas = new FIoStoreTocEntryMeta[Header.TocEntryCount]; for (int i = 0; i < Header.TocEntryCount; i++) { ChunkMetas[i] = new FIoStoreTocEntryMeta(archive); } } }