private bool SerializeOodleDecompressData(FOodleCompressedData dataInfo, out byte[] outData) { outData = new byte[0]; var decompressedLength = (int)dataInfo.DecompressedLength; var compressedLength = (int)dataInfo.CompressedLength; if (compressedLength > _innerArchive.Length - dataInfo.Offset || decompressedLength > MAX_COMPRESS_BUFFER || compressedLength > MAX_COMPRESS_BUFFER) { return(false); } _innerArchive.Position = dataInfo.Offset; if (compressedLength == decompressedLength) { outData = _innerArchive.ReadBytes(decompressedLength); } else { outData = new byte[decompressedLength]; var compressedData = _innerArchive.ReadBytes(compressedLength); Compression.Oodle.Decompress(compressedData, 0, compressedLength, outData, 0, decompressedLength); } return(outData.Length == decompressedLength); }
private VManifest(FArchive Ar) { using (Ar) { Header = new VHeader(Ar); var compressedBuffer = Ar.ReadBytes((int)Header.CompressedSize); var uncompressedBuffer = ZlibStream.UncompressBuffer(compressedBuffer); if (uncompressedBuffer.Length != Header.UncompressedSize) { throw new ParserException(Ar, $"Decompression failed, {uncompressedBuffer.Length} != {Header.UncompressedSize}"); } using var manifest = new FByteArchive("UncompressedValorantManifest", uncompressedBuffer); Chunks = manifest.ReadArray <VChunk>((int)Header.ChunkCount); Paks = manifest.ReadArray((int)Header.PakCount, () => new VPak(manifest)); if (manifest.Position != manifest.Length) { throw new ParserException(manifest, $"Parsing failed, {manifest.Position} != {manifest.Length}"); } } _client = new HttpClient(new HttpClientHandler { UseProxy = false, UseCookies = false, AutomaticDecompression = DecompressionMethods.All, CheckCertificateRevocationList = false, PreAuthenticate = false, MaxConnectionsPerServer = 1337, UseDefaultCredentials = false, AllowAutoRedirect = false }); }
public FIoStoreTocHeader(FArchive Ar) { TocMagic = Ar.ReadBytes(16); if (!TOC_MAGIC.SequenceEqual(TocMagic)) { throw new ParserException(Ar, "Invalid utoc magic"); } Version = Ar.Read <EIoStoreTocVersion>(); _reserved0 = Ar.Read <byte>(); _reserved1 = Ar.Read <ushort>(); TocHeaderSize = Ar.Read <uint>(); TocEntryCount = Ar.Read <uint>(); TocCompressedBlockEntryCount = Ar.Read <uint>(); TocCompressedBlockEntrySize = Ar.Read <uint>(); CompressionMethodNameCount = Ar.Read <uint>(); CompressionMethodNameLength = Ar.Read <uint>(); CompressionBlockSize = Ar.Read <uint>(); DirectoryIndexSize = Ar.Read <uint>(); PartitionCount = Ar.Read <uint>(); ContainerId = Ar.Read <FIoContainerId>(); EncryptionKeyGuid = Ar.Read <FGuid>(); ContainerFlags = Ar.Read <EIoContainerFlags>(); _reserved3 = Ar.Read <byte>(); _reserved4 = Ar.Read <ushort>(); _reserved5 = Ar.Read <uint>(); PartitionSize = Ar.Read <ulong>(); _reserved6 = Ar.ReadArray <ulong>(6); }
public VPak(FArchive Ar) { Id = Ar.Read <ulong>(); Size = Ar.Read <uint>(); ChunkIndices = Ar.ReadArray <uint>(Ar.Read <int>()); Name = Encoding.ASCII.GetString(Ar.ReadBytes(Ar.ReadByte())); }
public FShaderCodeArchive(FArchive Ar) { var archiveVersion = Ar.Read <uint>(); var bIsIoStore = false; // version - 1 | Must be I/O Store. // version - 2 | Normal pak storage if (Ar.Game >= EGame.GAME_UE5_0) { if (archiveVersion == 1) { bIsIoStore = true; } } switch (archiveVersion) { case 2: { var shaders = new FSerializedShaderArchive(Ar); ShaderCode = new byte[shaders.ShaderEntries.Length][]; for (var i = 0; i < shaders.ShaderEntries.Length; i++) { ShaderCode[i] = Ar.ReadBytes((int)shaders.ShaderEntries[i].Size); } SerializedShaders = shaders; break; } case 1 when bIsIoStore: // I/O Store-based ushaderbytecode files start at version 1 now, same as old pak versions. { var shaders = new FIoStoreShaderCodeArchive(Ar); // ShaderCode = new byte[shaders.ShaderEntries.Length][]; // for (var i = 0; i < shaders.ShaderEntries.Length; i++) // { // ShaderCode[i] = Ar.ReadBytes((int) shaders.ShaderEntries[i].UncompressedSize); // } SerializedShaders = shaders; break; } case 1 when !bIsIoStore: // TODO - Need to figure out how this should work // https://github.com/EpicGames/UnrealEngine/blob/4.22/Engine/Source/Runtime/RenderCore/Private/ShaderCodeLibrary.cpp#L910 // var mapVarNameNum = Ar.Read<int>(); // // PrevCookedShaders = new Dictionary<FSHAHash, FShaderCodeEntry>(mapVarNameNum); // for (var i = 0; i < mapVarNameNum; ++i) // { // PrevCookedShaders[Ar.Read<FSHAHash>()] = new FShaderCodeEntry(Ar); // } break; } }
protected override Unreal.Core.BinaryReader Decrypt(FArchive archive, int size) { if (!this.Replay.Info.Encrypted) { var decryptedReader = new Unreal.Core.BinaryReader(new MemoryStream(archive.ReadBytes(size))) { EngineNetworkVersion = Replay.Header.EngineNetworkVersion, NetworkVersion = Replay.Header.NetworkVersion, ReplayHeaderFlags = Replay.Header.Flags, ReplayVersion = Replay.Info.FileVersion }; return(decryptedReader); } var encryptedBytes = archive.ReadBytes(size); var key = this.Replay.Info.EncryptionKey; using RijndaelManaged rDel = new RijndaelManaged { KeySize = (key.Length * 8), Key = key, Mode = CipherMode.ECB, Padding = PaddingMode.PKCS7 }; using ICryptoTransform cTransform = rDel.CreateDecryptor(); byte[] decryptedArray = cTransform.TransformFinalBlock(encryptedBytes, 0, encryptedBytes.Length); var decrypted = new Unreal.Core.BinaryReader(new MemoryStream(decryptedArray)) { EngineNetworkVersion = Replay.Header.EngineNetworkVersion, NetworkVersion = Replay.Header.NetworkVersion, ReplayHeaderFlags = Replay.Header.Flags, ReplayVersion = Replay.Info.FileVersion }; return(decrypted); }
public VHeader(FArchive Ar) { Magic = Ar.Read <uint>(); if (Magic != _MAGIC) { throw new ParserException(Ar, "Invalid manifest magic"); } HeaderSize = Ar.Read <uint>(); ManifestId = Ar.Read <ulong>(); UncompressedSize = Ar.Read <uint>(); CompressedSize = Ar.Read <uint>(); ChunkCount = Ar.Read <uint>(); PakCount = Ar.Read <uint>(); var gameVersionLength = Ar.ReadByte(); GameVersion = gameVersionLength == 0 ? null : Encoding.ASCII.GetString(Ar.ReadBytes(gameVersionLength)); Ar.Position = HeaderSize; }
public override void ReadReplayChunks(FArchive archive) { while (!archive.AtEnd()) { var chunkType = archive.ReadUInt32AsEnum <ReplayChunkType>(); _writer.Write((uint)chunkType); var chunkSize = archive.ReadInt32(); var offset = archive.Position; //Console.WriteLine($"Chunk {chunkType}. Size: {chunkSize}. Offset: {offset}"); if (chunkType == ReplayChunkType.Checkpoint) { ReadCheckpoint(archive); } else if (chunkType == ReplayChunkType.Event) { ReadEvent(archive); } else if (chunkType == ReplayChunkType.ReplayData) { ReadReplayData(archive); } else if (chunkType == ReplayChunkType.Header) { //Copy over bytes _writer.Write(chunkSize); _writer.Write(archive.ReadBytes(chunkSize)); _writer.Flush(); } if (archive.Position != offset + chunkSize) { //_logger?.LogWarning($"Chunk ({chunkType}) at offset {offset} not correctly read..."); archive.Seek(offset + chunkSize, SeekOrigin.Begin); } } }
public static FNameEntrySerialized[] LoadNameBatch(FArchive Ar) { var num = Ar.Read <int>(); if (num == 0) { return(Array.Empty <FNameEntrySerialized>()); } Ar.Position += sizeof(uint); // var numStringBytes = Ar.Read<uint>(); Ar.Position += sizeof(ulong); // var hashVersion = Ar.Read<ulong>(); Ar.Position += num * sizeof(ulong); // var hashes = Ar.ReadArray<ulong>(num); var headers = Ar.ReadArray <FSerializedNameHeader>(num); var entries = new FNameEntrySerialized[num]; for (var i = 0; i < num; i++) { var header = headers[i]; var length = (int)header.Length; string s = header.IsUtf16 ? new string(Ar.ReadArray <char>(length)) : Encoding.UTF8.GetString(Ar.ReadBytes(length)); entries[i] = new FNameEntrySerialized(s); } return(entries); }
public WwiseReader(FArchive Ar) { IdToString = new Dictionary <uint, string>(); WwiseEncodedMedias = new Dictionary <string, byte[]>(); while (Ar.Position < Ar.Length) { var sectionIdentifier = Ar.Read <ESectionIdentifier>(); var sectionLength = Ar.Read <int>(); var position = Ar.Position; switch (sectionIdentifier) { case ESectionIdentifier.AKPK: if (!Ar.ReadBoolean()) { throw new ParserException(Ar, $"'{Ar.Name}' has unsupported endianness."); } Ar.Position += 16; Folders = Ar.ReadArray(() => new AkFolder(Ar)); foreach (var folder in Folders) { folder.PopulateName(Ar); } foreach (var folder in Folders) { folder.Entries = new AkEntry[Ar.Read <uint>()]; for (var i = 0; i < folder.Entries.Length; i++) { var entry = new AkEntry(Ar); entry.Path = Folders[entry.FolderId].Name; var savePos = Ar.Position; Ar.Position = entry.Offset; entry.IsSoundBank = Ar.Read <ESectionIdentifier>() == ESectionIdentifier.BKHD; Ar.Position -= 4; entry.Data = Ar.ReadBytes(entry.Size); Ar.Position = savePos; folder.Entries[i] = entry; } } break; case ESectionIdentifier.BKHD: Header = Ar.Read <BankHeader>(); break; case ESectionIdentifier.INIT: Initialization = Ar.ReadArray(() => { Ar.Position += 4; return(Ar.ReadFString()); }); break; case ESectionIdentifier.DIDX: WemIndexes = Ar.ReadArray(sectionLength / 12, Ar.Read <DataIndex>); break; case ESectionIdentifier.DATA: if (WemIndexes == null) { break; } WemSounds = new byte[WemIndexes.Length][]; for (var i = 0; i < WemSounds.Length; i++) { Ar.Position = position + WemIndexes[i].Offset; WemSounds[i] = Ar.ReadBytes(WemIndexes[i].Length); WwiseEncodedMedias[WemIndexes[i].Id.ToString()] = WemSounds[i]; } break; case ESectionIdentifier.HIRC: Hierarchy = Ar.ReadArray(() => new Hierarchy(Ar)); break; case ESectionIdentifier.RIFF: // read byte[sectionLength] it's simply a wem file break; case ESectionIdentifier.STID: Ar.Position += 4; var count = Ar.Read <int>(); for (var i = 0; i < count; i++) { IdToString[Ar.Read <uint>()] = Ar.ReadString(); } break; case ESectionIdentifier.STMG: break; case ESectionIdentifier.ENVS: break; case ESectionIdentifier.FXPR: break; case ESectionIdentifier.PLAT: Platform = Ar.ReadFString(); break; default: #if DEBUG Log.Warning($"Unknown section {sectionIdentifier:X} at {position - sizeof(uint) - sizeof(uint)}"); #endif break; } if (Ar.Position != position + sectionLength) { var shouldBe = position + sectionLength; #if DEBUG Log.Warning($"Didn't read 0x{sectionIdentifier:X} correctly (at {Ar.Position}, should be {shouldBe})"); #endif Ar.Position = shouldBe; } } if (Folders != null) { foreach (var folder in Folders) { foreach (var entry in folder.Entries) { if (entry.IsSoundBank || entry.Data == null) { continue; } WwiseEncodedMedias[IdToString.TryGetValue(entry.NameHash, out var k) ? k : $"{entry.Path.ToUpper()}_{entry.NameHash}"] = entry.Data;
public static TypeMappings Parse(FArchive Ar) { var magic = Ar.Read <ushort>(); if (magic != FileMagic) { throw new ParserException(".usmap file has an invalid magic constant"); } var version = Ar.Read <Version>(); if (version < 0 || version > Version.LATEST) { throw new ParserException($".usmap has an invalid version {(byte) version}"); } var compression = Ar.Read <ECompressionMethod>(); var compSize = Ar.Read <uint>(); var decompSize = Ar.Read <uint>(); var data = new byte[decompSize]; switch (compression) { case ECompressionMethod.None: if (compSize != decompSize) { throw new ParserException("No compression: Compression size must be equal to decompression size"); } Ar.Read(data, 0, (int)compSize); break; case ECompressionMethod.Oodle: Oodle.Decompress(Ar.ReadBytes((int)compSize), 0, (int)compSize, data, 0, (int)decompSize); break; case ECompressionMethod.Brotli: throw new NotImplementedException(); default: throw new ParserException($"Invalid compression method {compression}"); } Ar = new FByteArchive(Ar.Name, data); var nameSize = Ar.Read <uint>(); var nameLut = new List <String>((int)nameSize); for (int i = 0; i < nameSize; i++) { var nameLength = Ar.Read <byte>(); nameLut.Add(ReadStringUnsafe(Ar, nameLength)); } var enumCount = Ar.Read <uint>(); var enums = new Dictionary <string, Dictionary <int, string> >((int)enumCount); for (int i = 0; i < enumCount; i++) { var enumName = Ar.ReadName(nameLut) !; var enumNamesSize = Ar.Read <byte>(); var enumNames = new Dictionary <int, string>(enumNamesSize); for (int j = 0; j < enumNamesSize; j++) { var value = Ar.ReadName(nameLut) !; enumNames[j] = value; } enums.Add(enumName, enumNames); } var structCount = Ar.Read <uint>(); var structs = new Dictionary <string, Struct>(); var mappings = new TypeMappings(structs, enums); for (int i = 0; i < structCount; i++) { var s = ParseStruct(mappings, Ar, nameLut); structs[s.Name] = s; } return(mappings); }
public override byte[] MountPointCheckBytes() { Ar.Position = Info.IndexOffset; return(Ar.ReadBytes((int)(4 + MAX_MOUNTPOINT_TEST_LENGTH * 2).Align(Aes.ALIGN))); }
protected byte[] ReadAndDecrypt(int length, FArchive reader, bool isEncrypted) => DecryptIfEncrypted(reader.ReadBytes(length), isEncrypted);
public FSHAHash(FArchive Ar) { Hash = Ar.ReadBytes(SIZE); }
public FIoChunkHash(FArchive Ar) { Hash = Ar.ReadBytes(32); }
public FMemoryImageResult(FArchive Ar) // LoadFromArchive { var bUseNewFormat = Ar.Versions["ShaderMap.UseNewCookedFormat"]; if (bUseNewFormat) { LayoutParameters = new FPlatformTypeLayoutParameters(Ar); } var FrozenSize = Ar.Read <uint>(); var FrozenObject = Ar.ReadBytes((int)FrozenSize); if (bUseNewFormat) { PointerTable = new FPointerTableBase(Ar); } var NumVTables = Ar.Read <uint>(); var NumScriptNames = Ar.Read <uint>(); var NumMinimalNames = Ar.Read <uint>(); for (var i = 0; i < NumVTables; ++i) { var TypeNameHash = Ar.Read <ulong>(); var NumPatches = Ar.Read <uint>(); for (var PatchIndex = 0; PatchIndex < NumPatches; ++PatchIndex) { var VTableOffset = Ar.Read <uint>(); var Offset = Ar.Read <uint>(); } } for (var i = 0; i < NumScriptNames; ++i) { var Name = Ar.ReadFName(); var NumPatches = Ar.Read <uint>(); for (var PatchIndex = 0; PatchIndex < NumPatches; ++PatchIndex) { var Offset = Ar.Read <uint>(); } } for (var i = 0; i < NumMinimalNames; ++i) { var Name = Ar.ReadFName(); var NumPatches = Ar.Read <uint>(); for (var PatchIndex = 0; PatchIndex < NumPatches; ++PatchIndex) { var Offset = Ar.Read <uint>(); } } if (!bUseNewFormat) { ShaderMapPointerTable = new FShaderMapPointerTable(Ar); PointerTable = new FPointerTableBase(Ar); } }