/// <summary> /// Initialize the chunk reader. /// </summary> /// <param name="file"></param> /// <param name="streamFile"></param> protected MapStreamReader(string file, string streamFile) { DebugUtil.EnsureCondition(File.Exists(file), () => $"File not found: {file}"); DebugUtil.EnsureCondition(File.Exists(streamFile), () => $"File not found: {streamFile}"); Reader = new BinaryReader(File.OpenRead(file)); }
private void ReadChunks(long totalSize) { var runTo = Reader.BaseStream.Position + totalSize; for (var i = 0; i < 0xFFFF && Reader.BaseStream.Position < runTo; i++) { var chunkId = Reader.ReadUInt32(); var chunkSize = Reader.ReadUInt32(); BinaryUtil.ApplyPadding(Reader, ref chunkSize); Console.WriteLine($"\tID: 0x{chunkId:X8} size: {chunkSize}"); var chunkRunTo = Reader.BaseStream.Position + chunkSize; switch (chunkId) { case 0x80134001: // Root { ReadChunks(chunkSize); break; } case 0x80134010: // Object { new SolidObjectReader(Reader, chunkSize).Get(); break; } case 0x00134002: // Info { var fileInfo = BinaryUtil.ReadStruct <FileInfo>(Reader); break; } case 0x00134003: // Hash table { DebugUtil.EnsureCondition( chunkSize % 8 == 0, () => $"{chunkSize} % 8 != 0"); for (var j = 0; j < chunkSize / 8; j++) { var hash = Reader.ReadUInt32(); Reader.ReadUInt32(); } break; } default: break; } Reader.BaseStream.Seek(chunkRunTo, SeekOrigin.Begin); } }
/// <summary> /// Initialize the chunk reader. /// </summary> /// <param name="file"></param> /// <param name="options"></param> protected BundleReader(string file, BundleReadOptions?options) { DebugUtil.EnsureCondition(File.Exists(file), () => $"File not found: {file}"); Reader = new BinaryReader(File.OpenRead(file)); Options = options ?? new BundleReadOptions { StartPosition = -1, EndPosition = -1 }; }
protected override void Read(long size) { var curPos = Context.Reader.BaseStream.Position; var runTo = curPos + size; var header = BinaryUtil.ReadStruct <LanguageHeader>(Context.Reader); DebugUtil.EnsureCondition( header.HashTableOffset % 2 == 0, () => "Hash table is not aligned evenly! This is very bad."); DebugUtil.EnsureCondition( header.StringTableOffset % 2 == 0, () => "String table is not aligned evenly! This is very bad."); Context.Reader.BaseStream.Position = curPos + header.HashTableOffset; DebugUtil.PrintPosition(Context.Reader, GetType()); DebugUtil.EnsureCondition( Context.Reader.BaseStream.Position + header.NumStrings * 8 < runTo, () => "Cannot read further - hash table would overflow and/or leave no room for strings. This is very bad."); for (var i = 0; i < header.NumStrings; i++) { var entry = new LanguageEntry { HashOne = Context.Reader.ReadUInt32(), HashTwo = Context.Reader.ReadUInt32(), }; LanguagePack.Entries.Add(entry); } Context.Reader.BaseStream.Position = curPos + header.StringTableOffset; DebugUtil.PrintPosition(Context.Reader, GetType()); for (var i = 0; i < header.NumStrings; i++) { LanguagePack.Entries[i].Text = BinaryUtil.ReadNullTerminatedString(Context.Reader); } }
/// <summary> /// Internal bin read function /// </summary> private void InternalBinRead() { var block = ReadBlock(_binReader); DebugUtil.EnsureCondition( block.Block.Type == VltMarker.BinMagic, () => $"Expected BinMagic, got {block.Block.Type}" ); block.Block.SeekToDataStart(_binReader.BaseStream); var runTo = _binReader.BaseStream.Position + block.Block.DataSize(); while (_binReader.BaseStream.Position < runTo) { var text = BinaryUtil.ReadNullTerminatedString(_binReader); if (text.Length > 0) { HashManager.AddHash(text); } } }
private CompressionType GetCompressionType() { var position = Context.Reader.BaseStream.Position; CompressionType type; // Check for compression { var flag = Context.Reader.ReadBytes(4); if (flag[0] == 'J' && flag[1] == 'D' && flag[2] == 'L' && flag[3] == 'Z') { DebugUtil.EnsureCondition(Context.Reader.ReadInt16() == 0x1002, () => "Invalid JDLZ header!"); type = CompressionType.JDLZ; } else { type = FindCompressionType(); } } Context.Reader.BaseStream.Position = position; return(type); }
/// <summary> /// Returns a <see cref="Game"/> value by using the SymbolEntry address of the given EXE. /// </summary> /// <param name="exePath"></param> /// <returns></returns> public static NFSGame GetGame(string exePath) { DebugUtil.EnsureCondition(File.Exists(exePath), () => $"Can't find file: {exePath}"); var exeBytes = File.ReadAllBytes(exePath); var positions = BinaryUtil.SearchBytePattern( Encoding.ASCII.GetBytes(EaglSymbolEntry), exeBytes); foreach (var item in positions) { switch (item) { case EaglSmybolEntryUG2: return(NFSGame.Underground2); case EaglSymbolEntryWorld: return(NFSGame.World); default: return(NFSGame.Unknown); } } positions = BinaryUtil.SearchBytePattern( Encoding.ASCII.GetBytes(Eagl4SymbolEntry), exeBytes); foreach (var item in positions) { switch (item) { case Eagl4SmybolEntryMW: return(NFSGame.MW); case Eagl4SmybolEntryCarbon: return(NFSGame.Carbon); case Eagl4SmybolEntryProstreet: return(NFSGame.ProStreet); default: return(NFSGame.Unknown); } } positions = BinaryUtil.SearchBytePattern( Encoding.ASCII.GetBytes(NfsUndercoverString), exeBytes); foreach (var item in positions) { switch (item) { case NfsUndercoverStringAddress: return(NFSGame.Undercover); default: return(NFSGame.Unknown); } } return(NFSGame.Unknown); }
public static async Task Main(string[] args) { DebugUtil.EnsureCondition(args.Length == 1, () => "Invalid number of arguments! Please provide the full path to the desired game directory."); var downloader = new DownloadManager(); // if (!File.Exists($"{args[0]}/nfsw.exe")) { var staticCdnSource = new StaticCdnSource(new CDNDownloadOptions { Download = DownloadData.GameBase | DownloadData.Tracks | DownloadData.TracksHigh | DownloadData.Speech, GameDirectory = args[0], GameVersion = "1614b", GameLanguage = "en" }); staticCdnSource.ProgressUpdated.Add((length, downloaded, compressedLength, file) => { Console.WriteLine($"file: {file} - downloaded: {downloaded}/{length}"); }); //staticCdnSource.VerificationProgressUpdated.Add((file, displayFile, number, files) => //{ // Console.WriteLine($"verifying #{number}/{files}: {displayFile}"); //}); staticCdnSource.VerificationFailed.Add((file, hash, actualHash) => { Console.WriteLine($"failed to verify {file} - expected {hash}, got {actualHash}"); }); downloader.Sources.Add(staticCdnSource); } var patchCDNSource = new PatchCDNSource(new PatchDownloadOptions { GameDirectory = args[0] }); patchCDNSource.VerificationProgressUpdated.Add((file, displayFile, number, files) => { Console.WriteLine($"verifying #{number}/{files}: {displayFile}"); }); patchCDNSource.VerificationFailed.Add((file, hash, actualHash) => { Console.WriteLine($"failed to verify {file} - expected {hash}, got {actualHash}"); }); downloader.Sources.Add(patchCDNSource); downloader.DownloadCompleted.Add(() => { Console.WriteLine("Download completed!"); }); downloader.DownloadFailed.Add(e => { Console.WriteLine($"Download failed: {e.Message} (in {e.TargetSite.DeclaringType?.FullName}.{e.TargetSite?.Name})"); Console.WriteLine(e.StackTrace); }); var stopwatch = new Stopwatch(); stopwatch.Start(); await downloader.Download(); stopwatch.Stop(); Console.WriteLine($"Download ended in {stopwatch.ElapsedMilliseconds}ms"); stopwatch.Reset(); stopwatch.Start(); await downloader.VerifyHashes(); stopwatch.Stop(); Console.WriteLine($"Verification ended in {stopwatch.ElapsedMilliseconds}ms"); Console.ReadKey(); }
private void ReadChunks(long totalSize) { var runTo = Reader.BaseStream.Position + totalSize; for (var i = 0; i < 0xFFFF && Reader.BaseStream.Position < runTo; i++) { var chunkId = Reader.ReadUInt32(); var chunkSize = Reader.ReadUInt32(); var chunkRunTo = Reader.BaseStream.Position + chunkSize; Console.WriteLine($"\tID: 0x{chunkId:X8} size: {chunkSize}"); switch (chunkId) { case 0xB3310000: // TPK root case 0xB3320000: // TPK data root { ReadChunks(chunkSize); break; } case 0x33310001: // TPK header { var header = BinaryUtil.ReadStruct <Header>(Reader); _texturePack.Name = header.Name; _texturePack.Path = header.Path; _texturePack.Hash = header.Hash; break; } case 0x33310002: // TPK hash table { DebugUtil.EnsureCondition( chunkSize % 8 == 0, () => $"{chunkSize} % 8 != 0"); var numEntries = chunkSize / 8; for (var j = 0; j < numEntries; j++) { var hash = Reader.ReadUInt32(); _texturePack.Hashes.Add(hash); Reader.ReadUInt32(); } break; } case 0x33310004: // TPK entry table { DebugUtil.EnsureCondition( chunkSize % 124 == 0, () => $"{chunkSize} % 124 != 0"); var tpkHeaders = BinaryUtil.ReadList <TpkTextureHeader>(Reader, chunkSize); foreach (var tpkHeader in tpkHeaders) { Console.WriteLine($"{tpkHeader.Name} {tpkHeader.Width}x{tpkHeader.Height} @ {tpkHeader.DataOffset} (size: {tpkHeader.DataSize})"); } break; } } Reader.BaseStream.Seek(chunkRunTo, SeekOrigin.Begin); } }
public Language(ChunkID id, long size, long position) : base(id, size, position) { DebugUtil.EnsureCondition( id == ChunkID.BCHUNK_LANGUAGE, () => $"Expected BCHUNK_LANGUAGE, got {id.ToString()}"); }
public SolidList(ChunkID id, long size, long position) : base(id, size, position) { DebugUtil.EnsureCondition( id == ChunkID.BCHUNK_SPEED_ESOLID_LIST_CHUNKS, () => $"Expected BCHUNK_SPEED_ESOLID_LIST_CHUNKS, got {id.ToString()}"); }
public CarList(ChunkID id, long size, long position) : base(id, size, position) { DebugUtil.EnsureCondition( id == ChunkID.BCHUNK_CARINFO_ARRAY, () => $"Expected BCHUNK_CARINFO_ARRAY, got {id.ToString()}"); }
/// <summary> /// Initialize the database reader. /// </summary> /// <param name="file"></param> protected DatabaseReader(string file) { DebugUtil.EnsureCondition(File.Exists(file), () => $"File not found: {file}"); Reader = new BinaryReader(File.OpenRead(file)); }
protected override void ReadChunks(long totalSize) { var runTo = BinaryReader.BaseStream.Position + totalSize; for (var i = 0; i < 0xFFFF && BinaryReader.BaseStream.Position < runTo; i++) { var chunkId = BinaryReader.ReadUInt32(); var chunkSize = BinaryReader.ReadUInt32(); var normalizedId = (long)(chunkId & 0xffffffff); if (_paddedChunks.Contains(normalizedId)) { uint pad = 0; while (BinaryReader.ReadByte() == 0x11) { pad++; } // This is a bad hack to get around the fact that sometimes padded chunk data actually starts with 0x11... // Padding is always even so if we detect uneven padding, we just jump back 2 bytes instead of 1. BinaryReader.BaseStream.Seek(pad % 2 == 0 ? -1 : -2, SeekOrigin.Current); BinaryUtil.PrintPosition(BinaryReader, GetType()); chunkSize -= (pad % 2 == 0 ? pad : pad - 1); } var chunkRunTo = BinaryReader.BaseStream.Position + chunkSize; // BinaryUtil.PrintID(BinaryReader, chunkId, normalizedId, chunkSize, GetType(), _logLevel, // typeof(SolidListChunks)); switch (normalizedId) { case (long)SolidListChunks.Header: { goto case (long)SolidListChunks.Object; } case (long)SolidListChunks.Object: { _logLevel = 2; ReadChunks(chunkSize); _logLevel = 1; break; } case (long)SolidListChunks.MeshHeader: { _logLevel = 3; ReadChunks(chunkSize); _logLevel = 2; break; } case (long)SolidListChunks.FileInfo: { var fileInfo = BinaryUtil.ReadStruct <FileInfo>(BinaryReader); _solidList.Path = fileInfo.Path; _solidList.SectionId = fileInfo.Section; break; } case (long)SolidListChunks.HashTable: { // each hash entry is 8 bytes: 4 bytes for the hash and 4 bytes of 0x00 var numEntries = chunkSize / 8; for (var j = 0; j < numEntries; j++) { _solidList.Hashes.Add(BinaryReader.ReadUInt32()); BinaryReader.BaseStream.Seek(4, SeekOrigin.Current); } break; } case (long)SolidListChunks.ObjectHeader: { DebugUtil.EnsureCondition( _solidList.Hashes.Count >= _solidList.Objects.Count, () => $"Expected enough hashes for {_solidList.Objects.Count} object(s); we only have {_solidList.Hashes.Count}"); var objectHeader = BinaryUtil.ReadStruct <ObjectHeader>(BinaryReader); var objectName = BinaryUtil.ReadNullTerminatedString(BinaryReader); var objectHash = _solidList.Hashes[_solidList.Objects.Count]; _solidList.Objects.Add(new SolidObject { Hash = objectHash, Matrix = objectHeader.Matrix, MaxPoint = objectHeader.MaxPoint, MinPoint = objectHeader.MinPoint, Name = objectName }); break; } case (long)SolidListChunks.TextureRefs: { // each entry is 8 bytes. // first 4 bytes are the texture hash. // last 4 are 0x00. as usual. var numEntries = chunkSize / 8; for (var j = 0; j < numEntries; j++) { var hash = BinaryReader.ReadUInt32(); BinaryReader.BaseStream.Seek(4, SeekOrigin.Current); } break; } case (long)SolidListChunks.MeshVertices: { break; } default: { break; } } BinaryUtil.ValidatePosition(BinaryReader, chunkRunTo, GetType()); BinaryReader.BaseStream.Seek(chunkRunTo - BinaryReader.BaseStream.Position, SeekOrigin.Current); } }
public FNGFile(ChunkID id, long size, long position) : base(id, size, position) { DebugUtil.EnsureCondition( id == ChunkID.BCHUNK_FENG_PACKAGE, () => $"Expected BCHUNK_FENG_PACKAGE, got {id.ToString()}"); }
protected override void ReadChunks(long totalSize) { var runTo = BinaryReader.BaseStream.Position + totalSize; for (var i = 0; i < 0xFFFF && BinaryReader.BaseStream.Position < runTo; i++) { var chunkId = BinaryReader.ReadUInt32(); var chunkSize = BinaryReader.ReadUInt32(); var chunkRunTo = BinaryReader.BaseStream.Position + chunkSize; var normalizedId = (long)(chunkId & 0xffffffff); #if DEBUG BinaryUtil.PrintID(BinaryReader, chunkId, normalizedId, chunkSize, GetType(), 1, typeof(AnimatedTPKChunks)); #endif switch (normalizedId) { case (long)AnimatedTPKChunks.AnimatedTPKEntries: { DebugUtil.EnsureCondition(_texturePack.Texture == null, () => "There is already an animated texture in this pack!"); var anim = BinaryUtil.ReadStruct <AnimatedTextureStruct>(BinaryReader); _texturePack.Texture = new AnimatedTexture { FramesPerSecond = anim.FPS, Hash = anim.Hash, Name = anim.Name, NumFrames = anim.NumFrames }; break; } case (long)AnimatedTPKChunks.AnimatedTPKHashes: { DebugUtil.EnsureCondition(n => _texturePack.Texture != null, () => "No texture, but there's a hash section?"); var sizeRemaining = chunkRunTo - BinaryReader.BaseStream.Position; DebugUtil.EnsureCondition( n => sizeRemaining / 16 >= _texturePack.Texture.NumFrames, () => $"Not enough hashes for animation {_texturePack.Texture.Name}! Expected at least {_texturePack.Texture.NumFrames}, got {sizeRemaining / 16}" ); for (var j = 0; j < _texturePack.Texture.NumFrames; j++) { _texturePack.Texture.FrameHashes.Add(BinaryReader.ReadInt32()); BinaryReader.BaseStream.Seek(12, SeekOrigin.Current); } break; } default: { var data = new byte[chunkSize]; BinaryReader.Read(data, 0, data.Length); #if DEBUG Console.WriteLine(BinaryUtil.HexDump(data)); #endif break; } } BinaryUtil.ValidatePosition(BinaryReader, chunkRunTo, GetType()); BinaryReader.BaseStream.Seek(chunkRunTo - BinaryReader.BaseStream.Position, SeekOrigin.Current); } }
public void Init(VltRowRecord rowRecord, TableEndBlock block, BinaryReader vltReader, BinaryReader binReader) { var info = new VltInfo(_vltClass.ClassRecord.NumFields); DebugUtil.EnsureCondition( block.InfoDictionary.ContainsKey(rowRecord.Position), () => "Uh oh."); var basePosition = block.InfoDictionary[rowRecord.Position].Address2; info.BlockContainer = block; info.Class = _vltClass; info.RowRecord = rowRecord; for (var i = 0; i < _vltClass.ClassRecord.NumFields; ++i) { var field = _vltClass.Fields[i]; BinaryReader br; if (!field.IsOptional()) { br = binReader; br.BaseStream.Seek(basePosition + field.Offset, SeekOrigin.Begin); } else { br = null; foreach (var row in rowRecord.Rows) { if (row.Hash == field.Hash) { if (row.IsInVlt()) { br = vltReader; br.BaseStream.Seek(row.Position, SeekOrigin.Begin); } else { br = binReader; br.BaseStream.Seek(block.InfoDictionary[row.Position].Address2, SeekOrigin.Begin); } } } if (br == null) { continue; } } var type = VltTypeMap.Instance.GetTypeForKey(field.TypeHash); if (type == null) { type = typeof(RawType); } VltType vltType; if (field.IsArray()) { vltType = new ArrayType(field, type); } else { vltType = VltType.Create(type); vltType.Size = field.Length; if (vltType is RawType rt) { rt.Length = field.Length; } } vltType.Address = (uint)br.BaseStream.Position; vltType.IsVlt = br == vltReader; vltType.TypeHash = field.TypeHash; vltType.Hash = field.Hash; vltType.Info = info; vltType.Read(br); if (vltType is ArrayType va) { Console.WriteLine($"Class: 0x{_vltClass.Hash:X8} | Field: 0x{field.Hash:X8} | Array of {va.Type} (original: 0x{field.TypeHash:X8}) with {va.Entries}/{va.MaxEntries} entries"); foreach (var av in va.Types) { Console.WriteLine($"\tValue: {av}"); } } else { if (!(vltType is RawType)) { Console.WriteLine( $"Class: 0x{_vltClass.Hash:X8} | Field: 0x{field.Hash:X8} | {vltType.GetType()} -> {vltType}"); } } info.Set(i, vltType); } _infoList.Add(info); }
public SectionList(ChunkID id, long size, long position) : base(id, size, position) { DebugUtil.EnsureCondition( id == ChunkID.BCHUNK_TRACKSTREAMER_SECTIONS, () => $"Expected BCHUNK_TRACKSTREAMER_SECTIONS, got {id.ToString()}"); }
protected override void ReadChunks(long totalSize) { var runTo = BinaryReader.BaseStream.Position + totalSize; for (var i = 0; i < 0xFFFF && BinaryReader.BaseStream.Position < runTo; i++) { var chunkId = BinaryReader.ReadUInt32(); var chunkSize = BinaryReader.ReadUInt32(); var normalizedId = (long)(chunkId & 0xffffffff); BinaryUtil.ReadPadding(BinaryReader, ref chunkSize); var chunkRunTo = BinaryReader.BaseStream.Position + chunkSize; BinaryUtil.PrintID(BinaryReader, chunkId, normalizedId, chunkSize, GetType(), _logLevel, typeof(SolidListChunks)); switch (normalizedId) { case (long)SolidListChunks.Header: { goto case (long)SolidListChunks.Object; } case (long)SolidListChunks.Object: { _logLevel = 2; ReadChunks(chunkSize); _logLevel = 1; break; } case (long)SolidListChunks.MeshHeader: { _solidList.LastObject.Mesh = new SolidMesh(); _logLevel = 3; ReadChunks(chunkSize); _logLevel = 2; break; } case (long)SolidListChunks.FileInfo: { var fileInfo = BinaryUtil.ReadStruct <FileInfo>(BinaryReader); _solidList.Path = fileInfo.Path; _solidList.SectionId = fileInfo.Section; break; } case (long)SolidListChunks.HashTable: { // each hash entry is 8 bytes: 4 bytes for the hash and 4 bytes of 0x00 var numEntries = chunkSize / 8; for (var j = 0; j < numEntries; j++) { _solidList.Hashes.Add(BinaryReader.ReadUInt32()); BinaryReader.BaseStream.Seek(4, SeekOrigin.Current); } break; } case (long)SolidListChunks.TextureRefs: // Texture hashes { // Every entry is 8 bytes; a 4-byte hash and 4 bytes of 0x00. var numTextures = chunkSize / 8; for (var j = 0; j < numTextures; j++) { var hash = BinaryReader.ReadUInt32(); BinaryReader.BaseStream.Seek(4, SeekOrigin.Current); _solidList.LastObject.Textures.Add(hash); } break; } case (long)SolidListChunks.ObjectHeader: { DebugUtil.EnsureCondition( _solidList.Hashes.Count >= _solidList.Objects.Count, () => $"Expected enough hashes for {_solidList.Objects.Count} object(s); we only have {_solidList.Hashes.Count}"); var objectHeader = BinaryUtil.ReadStruct <ObjectHeader>(BinaryReader); var objectName = BinaryUtil.ReadNullTerminatedString(BinaryReader); var objectHash = _solidList.Hashes[_solidList.Objects.Count]; _solidList.Objects.Add(new SolidObject { Hash = objectHash, Matrix = objectHeader.Matrix, MaxPoint = objectHeader.MaxPoint, MinPoint = objectHeader.MinPoint, Name = objectName }); break; } case (long)SolidListChunks.MeshFaces: { for (var j = 0; j < BinaryUtil.ComputeEntryCount <FaceStruct>(chunkSize); j++) { var face = BinaryUtil.ReadStruct <FaceStruct>(BinaryReader); _solidList.LastObject.Mesh.Faces.Add(new Face { VertexA = face.VertexA, VertexB = face.VertexB, VertexC = face.VertexC, }); } break; } case (long)SolidListChunks.MeshVertices: { var startPos = BinaryReader.BaseStream.Position; // I spy, with my little eye, a bad hack that shouldn't exist if (_solidList.Path.Contains(@"CARS\") && _solidList.SectionId == "DEFAULT") { DebugUtil.EnsureCondition(ReadVertex36(BinaryUtil.ComputeEntryCount <Vertex36>(chunkSize)), () => "Failed to read vertices properly!"); } else { if (!ReadVertex24(BinaryUtil.ComputeEntryCount <Vertex24>(chunkSize))) { BinaryReader.BaseStream.Position = startPos; DebugUtil.EnsureCondition(ReadVertex36(BinaryUtil.ComputeEntryCount <Vertex36>(chunkSize)), () => "Failed to read vertices properly!"); } } break; } } BinaryUtil.ValidatePosition(BinaryReader, chunkRunTo, GetType()); BinaryReader.BaseStream.Seek(chunkRunTo - BinaryReader.BaseStream.Position, SeekOrigin.Current); } }
/** * Internal function to build a DownloadDatabase. * This lets us keep track of the list of files to download from a store. */ private DownloadDatabase BuildDatabase(XmlDocument document) { var headerElements = document.GetElementsByTagName("header"); DebugUtil.EnsureCondition( headerElements.Count == 1, () => "Failed to read header!"); var headerEl = headerElements[0]; DebugUtil.EnsureCondition(headerEl.HasChildNodes, () => "Invalid header: code 1"); DebugUtil.EnsureCondition(headerEl["length"] != null, () => "Invalid header: no length"); DebugUtil.EnsureCondition(headerEl["compressed"] != null, () => "Invalid header: no compressed length"); DebugUtil.EnsureCondition(headerEl["firstcab"] != null, () => "Invalid header: no firstcab"); DebugUtil.EnsureCondition(headerEl["lastcab"] != null, () => "Invalid header: no lastcab"); var database = new DownloadDatabase { Header = new Header { Length = ulong.Parse(headerEl["length"]?.InnerText ?? throw new InvalidMetadataException("Missing length field in header?")), CompressedLength = ulong.Parse(headerEl["compressed"]?.InnerText ?? throw new InvalidMetadataException("Missing compressed length field in header?")), FirstCabinet = ulong.Parse(headerEl["firstcab"]?.InnerText ?? throw new InvalidMetadataException("Missing firstcab field in header?")), LastCabinet = ulong.Parse(headerEl["lastcab"]?.InnerText ?? throw new InvalidMetadataException("Missing lastcab field in header?")), } }; foreach (var fileElement in document.GetElementsByTagName("fileinfo").Cast <XmlElement>()) { DebugUtil.EnsureCondition( fileElement.SelectSingleNode("path") != null, () => "Invalid file info: No path key!"); DebugUtil.EnsureCondition( fileElement.SelectSingleNode("file") != null, () => "Invalid file info: No file key!"); DebugUtil.EnsureCondition( fileElement.SelectSingleNode("hash") != null, () => "Invalid file info: No hash key!"); DebugUtil.EnsureCondition( fileElement.SelectSingleNode("revision") != null, () => "Invalid file info: No revision key!"); DebugUtil.EnsureCondition( fileElement.SelectSingleNode("section") != null, () => "Invalid file info: No section key!"); DebugUtil.EnsureCondition( fileElement.SelectSingleNode("offset") != null, () => "Invalid file info: No offset key!"); DebugUtil.EnsureCondition( fileElement.SelectSingleNode("length") != null, () => "Invalid file info: No length key!"); database.Files.Add(new StaticCDN.FileInfo { Path = fileElement.SelectSingleNode("path")?.InnerText .Replace("CDShift", _downloadOptions.GameDirectory), File = fileElement.SelectSingleNode("file")?.InnerText, Hash = fileElement.SelectSingleNode("hash")?.InnerText, Revision = uint.Parse(fileElement["revision"]?.InnerText ?? throw new InvalidMetadataException("Missing revision field")), Section = uint.Parse(fileElement["section"]?.InnerText ?? throw new InvalidMetadataException("Missing section field")), Offset = uint.Parse(fileElement["offset"]?.InnerText ?? throw new InvalidMetadataException("Missing offset field")), Length = uint.Parse(fileElement["length"]?.InnerText ?? throw new InvalidMetadataException("Missing length field")), CompressedLength = fileElement.SelectSingleNode("compressed") != null ? int.Parse(fileElement["compressed"]?.InnerText ?? throw new InvalidMetadataException("Missing compressed length field")) : -1, OriginalPath = fileElement.SelectSingleNode("path")?.InnerText });
protected override void ReadChunks(long totalSize) { var runTo = BinaryReader.BaseStream.Position + totalSize; for (var i = 0; i < 0xFFFF && BinaryReader.BaseStream.Position < runTo; i++) { var chunkId = BinaryReader.ReadUInt32(); var chunkSize = BinaryReader.ReadUInt32(); var normalizedId = (long)(chunkId & 0xffffffff); if (_paddedChunks.Contains(normalizedId)) { uint pad = 0; while (BinaryReader.ReadByte() == 0x11) { pad++; } // This is a bad hack to get around the fact that sometimes padded chunk data actually starts with 0x11... // Padding is always even so if we detect uneven padding, we just jump back 2 bytes instead of 1. BinaryReader.BaseStream.Seek(pad % 2 == 0 ? -1 : -2, SeekOrigin.Current); BinaryUtil.PrintPosition(BinaryReader, GetType()); chunkSize -= (pad % 2 == 0 ? pad : pad - 1); } var chunkRunTo = BinaryReader.BaseStream.Position + chunkSize; // BinaryUtil.PrintID(BinaryReader, chunkId, normalizedId, chunkSize, GetType(), _logLevel, // typeof(LightListChunks)); switch (normalizedId) { case (long)LightListChunks.Header: { var header = BinaryUtil.ReadStruct <HeaderStruct>(BinaryReader); _lightList.NumLights = header.NumLightEntries; break; } case (long)LightListChunks.Entries: { DebugUtil.EnsureCondition( _lightList.NumLights == BinaryUtil.ComputeEntryCount <LightStruct>(chunkSize), () => $"Expected {_lightList.NumLights} light(s), ComputeEntryCount reported {BinaryUtil.ComputeEntryCount<LightStruct>(chunkSize)}"); var lights = BinaryUtil.ReadList <LightStruct>(BinaryReader, chunkSize); break; } default: break; } BinaryUtil.ValidatePosition(BinaryReader, chunkRunTo, GetType()); BinaryReader.BaseStream.Seek(chunkRunTo - BinaryReader.BaseStream.Position, SeekOrigin.Current); } }
public TexturePack(ChunkID id, long size, long position) : base(id, size, position) { DebugUtil.EnsureCondition( id == ChunkID.BCHUNK_SPEED_TEXTURE_PACK_LIST_CHUNKS, () => $"Expected BCHUNK_SPEED_TEXTURE_PACK_LIST_CHUNKS, got {id.ToString()}"); }
public TrackList(ChunkID id, long size, long position) : base(id, size, position) { DebugUtil.EnsureCondition( id == ChunkID.BCHUNK_TRACKINFO, () => $"Expected BCHUNK_TRACKINFO, got {id.ToString()}"); }