public static CatalogFile Read(Stream input) { const Endian endian = Endian.Little; var magic1 = input.ReadValueU64(endian); var magic2 = input.ReadValueU64(endian); if (magic1 != Signature || magic2 != Signature) { throw new FormatException(); } var instance = new CatalogFile(); var chunkCount = input.ReadValueU32(endian); var unknown2Count = input.ReadValueU32(endian); var unknown3Count = input.ReadValueU32(endian); var unknown1C = input.ReadValueU32(endian); var unknown20 = input.ReadValueU32(endian); var unknown24 = input.ReadValueU32(endian); if (unknown2Count != 0 || unknown1C != 0 || unknown20 != 0 || unknown24 != 0) { throw new FormatException(); } instance.ChunkEntries.Clear(); for (int i = 0; i < chunkCount; i++) { var entry = new ChunkEntry(); entry.SHA1 = new SHA1(input.ReadBytes(20)); entry.Offset = input.ReadValueU32(endian); entry.Size = input.ReadValueU32(endian); entry.TailSize = input.ReadValueU32(endian); entry.DataIndex = input.ReadValueU32(endian); instance.ChunkEntries.Add(entry); } for (int i = 0; i < unknown3Count; i++) { var entry = new Unknown3Entry(); entry.Unknown0 = new SHA1(input.ReadBytes(20)); entry.Unknown1 = input.ReadValueU32(endian); entry.Unknown2 = input.ReadValueU32(endian); entry.Unknown3 = input.ReadValueU32(endian); entry.Unknown4 = input.ReadValueU32(endian); entry.Unknown5 = input.ReadValueU32(endian); entry.Unknown6 = new SHA1(input.ReadBytes(20)); entry.Unknown7 = new SHA1(input.ReadBytes(20)); instance.Unknown3s.Add(entry); } if (input.Position != input.Length) { throw new FormatException(); } return(instance); }
private void ApplySHA1s() { int count = 0; foreach (byte[] sha1 in Sha1List) { if (count < Header.ebxCount) { EbxEntry e = EbxList[count]; e._sha1 = sha1; EbxList[count] = e; } if (count >= Header.ebxCount && count < Header.ebxCount + Header.resCount) { ResEntry e = ResList[count - (int)Header.ebxCount]; e._sha1 = sha1; ResList[count - (int)Header.ebxCount] = e; } if (count >= Header.ebxCount + Header.resCount) { ChunkEntry e = ChunkList[count - (int)(Header.ebxCount + Header.resCount)]; e._sha1 = sha1; ChunkList[count - (int)(Header.ebxCount + Header.resCount)] = e; } count++; } }
/// <summary> /// Parses the superbundle layout for Frostbite 2013.2 (Battlefield 4) superbundles /// Reads out the bundle, chunk entries /// </summary> /// <param name="p_Object">DbObject of the superbundle's table of contents (.toc)</param> protected override void ParseLayout(DbObject p_Object) { // Read out all of the bundles var s_Bundles = p_Object["bundles"].Value as DbObject; if (s_Bundles != null) { for (var i = 0; i < s_Bundles.Count; ++i) { var s_BundleEntry = new BundleEntry(s_Bundles[i].Value as DbObject, this); BundleEntries.TryAdd(s_BundleEntry.ID.ToLowerInvariant(), s_BundleEntry); } } // Read out all of the chunks var s_Chunks = p_Object["chunks"].Value as DbObject; if (s_Chunks != null) { for (var i = 0; i < s_Chunks.Count; ++i) { var s_ChunkEntry = new ChunkEntry(s_Chunks[i].Value as DbObject, this); ChunkEntries.TryAdd(s_ChunkEntry.ID, s_ChunkEntry); } } }
public static EncryptedChunkEntry Read(Stream input, Endian endian) { EncryptedChunkEntry instance; instance.Chunk = ChunkEntry.Read(input, endian); instance.CryptoInfo = CryptoInfo.Read(input, endian); return(instance); }
private void ReadChunkListData(Stream data, bool fast = false) { for (int i = 0; i < Header.chunkCount; i++) { ChunkEntry e = ChunkList[i]; e._data = ReadPayload(data, e._originalSize, fast); ChunkList[i] = e; } }
private void ReadChunkList(Stream data) { ChunkList = new List <ChunkEntry>(); for (int i = 0; i < Header.chunkCount; i++) { ChunkEntry e = new ChunkEntry(); e.id = new byte[16]; data.Read(e.id, 0, 16); e.rangeStart = Helpers.ReadLEUShort(data); e.logicalSize = Helpers.ReadLEUShort(data); e.logicalOffset = Helpers.ReadLEInt(data); e._originalSize = e.logicalOffset + e.logicalSize; ChunkList.Add(e); } }
public static ChunkEntry Read(Stream input, Endian endian) { var instance = new ChunkEntry(); instance.SHA1 = new SHA1(input.ReadBytes(20)); instance.Offset = input.ReadValueU32(endian); instance.Size = input.ReadValueU32(endian); instance.TailSize = input.ReadValueU32(endian); instance.DataIndex = input.ReadValueU8(); instance.IsEncrypted = input.ReadValueB8(); var padding = input.ReadBytes(2); if (padding.Any(b => b != 0) == true) { throw new FormatException(); } return(instance); }
public GeometryResource(Stream s) : base(s) { if (ChunkEntries.Count != 1) { throw new InvalidDataException(String.Format("Expected one chunk, found {0}.", ChunkEntries.Count)); } ChunkEntry chunk = ChunkEntries[0]; ChunkEntries.Remove(chunk); using (MemoryStream ms = new MemoryStream()) { new BinaryWriter(ms).Write(chunk.RCOLBlock.AsBytes); ms.Flush(); ms.Position = 0; GEOM geom = new GEOM(OnResourceChanged, ms); ChunkEntries.Add(new ChunkEntry(OnResourceChanged, chunk.TGIBlock, geom)); } }
public static CatalogFile Read(Stream input) { const Endian endian = Endian.Little; var magic1 = input.ReadValueU64(endian); var magic2 = input.ReadValueU64(endian); if (magic1 != Signature || magic2 != Signature) { throw new FormatException(); } var instance = new CatalogFile(); var chunkCount = input.ReadValueU32(endian); var patchCount = input.ReadValueU32(endian); var encryptedChunkCount = input.ReadValueU32(endian); var unknown1C = input.ReadValueS32(endian); var unknown20 = input.ReadValueS32(endian); var unknown24 = input.ReadValueS32(endian); if (patchCount != 0 || unknown1C != 0 || unknown20 != 0 || unknown24 != 0) { throw new FormatException(); } instance.ChunkEntries.Clear(); for (int i = 0; i < chunkCount; i++) { var chunk = ChunkEntry.Read(input, endian); if (chunk.IsEncrypted == true) { throw new FormatException(); } instance.ChunkEntries.Add(chunk); } for (int i = 0; i < encryptedChunkCount; i++) { var encryptedChunk = EncryptedChunkEntry.Read(input, endian); if (encryptedChunk.Chunk.IsEncrypted == false) { throw new FormatException(); } if (encryptedChunk.Chunk.Size != encryptedChunk.CryptoInfo.Size) { throw new FormatException(); } instance.EncryptedChunkEntries.Add(encryptedChunk); } if (input.Position != input.Length) { throw new FormatException(); } return(instance); }
private void ReadChunkList(Stream data) { ChunkList = new List<ChunkEntry>(); for (int i = 0; i < Header.chunkCount; i++) { ChunkEntry e = new ChunkEntry(); e.id = new byte[16]; data.Read(e.id, 0, 16); e.rangeStart = Helpers.ReadLEUShort(data); e.logicalSize = Helpers.ReadLEUShort(data); e.logicalOffset = Helpers.ReadLEInt(data); e._originalSize = e.logicalOffset + e.logicalSize; ChunkList.Add(e); } }
private async void DownloadJob(object Args) { object[] ArgsArray = (object[])Args; List <FileEntry> Files = (List <FileEntry>)ArgsArray[0]; int Offset = (int)ArgsArray[1], Total = Files.Count; try { CurrentUICulture = Instance.OSCulture; for (;;) { int CurrentChunk, CurrentFile; lock (ProgressLock) { if (FileIndex == Total) { return; } if (Files[FileIndex].Chunks.Count == 0) { ChunkIndex = 0; FileIndex++; continue; } CurrentChunk = ChunkIndex++; if (ChunkIndex > Files[CurrentFile = FileIndex].Chunks.Count) { ChunkIndex = 0; if (++FileIndex == Total) { return; } continue; } } if (DownloadFailed || Token.IsCancellationRequested) { return; } FileEntry File = Files[CurrentFile]; ChunkEntry Chunk = File.Chunks[CurrentChunk]; using (FileStream Writer = new FileStream($@"{BaseDownloadPath}\{File.Name}", FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite)) { long Position; if (File.IsSliced) { Position = 0L; for (int Iterator = 0; Iterator < CurrentChunk; Iterator++) { Position += File.Chunks[Iterator].UncompressedSize; } Writer.Position = Position; } else { Writer.Position = Position = Chunk.Offset; } byte[] Data = new byte[Chunk.UncompressedSize]; Writer.Read(Data, 0, Chunk.UncompressedSize); bool HashMatch = ComputeAdlerHash(Data) == Chunk.Checksum; if (!HashMatch) { Data = null; string GID = BitConverter.ToString(Chunk.GID).Replace("-", string.Empty), Message = null; for (int AttemptsCount = 0; AttemptsCount < 5 + ThreadsCount; AttemptsCount++) { try { Data = await DownloadSteamChunk(BaseURLs[AttemptsCount < 5 ? Offset : AttemptsCount - 5], GID, Chunk.CompressedSize, Token); } catch (Exception Exception) { while (Exception is AggregateException) { Exception = Exception.InnerException; } Message = Exception.Message; } if (Token.IsCancellationRequested) { return; } if (Data is null) { continue; } int ErrorIndex = 0; try { Data = AESDecrypt(Data, DepotKey, Decryptors[Offset]); ErrorIndex++; Data = Data[0] == 'V' && Data[1] == 'Z' ? Decompressor.Decompress(Data, Offset) : Decompress(Data); } catch { Data = null; Message = LocString(LocCode.DecryptionFailure + ErrorIndex); continue; } if (ComputeAdlerHash(Data) != Chunk.Checksum) { Data = null; Message = LocString(LocCode.AdlerHashMismatch); continue; } break; } if (Data is null) { Log($"({BaseURLs[Offset]}) Failed to download chunk {GID}: {Message}"); throw new ValidatorException(Message); } Writer.Position = Position; Writer.Write(Data, 0, Chunk.UncompressedSize); } lock (ProgressLock) { if (HashMatch) { Progress.IncreaseNoETA(Chunk.CompressedSize); } else { Progress.Increase(Chunk.CompressedSize); } } } } } catch (Exception Exception) { DownloadFailed = true; CaughtException = Exception; } finally { WaitHandles[Offset].Set(); } }
void Read(FileReader reader) { //File is empty so return if (reader.BaseStream.Length <= 4) { return; } Dictionary <int, Chunk> globalChunkList = new Dictionary <int, Chunk>(); int globalIndex = 0; reader.SetByteOrder(false); while (reader.Position <= reader.BaseStream.Length - 12) { //Read through all sections that use an identifier //These sections determine when a file is used or else using raw data. ushort identifier = reader.ReadUInt16(); if (identifier == ChunkInfoIdenfier) { //Skip padding ushort flag = reader.ReadUInt16(); ChunkEntry entry = new ChunkEntry(); entry.ChunkSize = reader.ReadUInt32(); entry.ChunkOffset = reader.ReadUInt32(); entry.ChunkType = (ChunkFileType)reader.ReadUInt16(); entry.Flags = reader.ReadUInt16(); entry.Flags2 = reader.ReadUInt32(); //Child Count or File Size entry.Flags3 = reader.ReadUInt32(); //Child Start Index or File Offset Files.Add(entry); globalChunkList.Add(globalIndex, entry); //File entries shift global index by 2 globalIndex += 2; //Additional chunk entry if ((int)entry.ChunkType == 0x11) //This file seems to use same hash as some of the model files. { ChunkEntry secondaryEntry = new ChunkEntry(); secondaryEntry.Flags2 = reader.ReadUInt32(); //Child Count or File Size secondaryEntry.Flags3 = reader.ReadUInt32(); //Child Start Index or File Offset secondaryEntry.ChunkType = (ChunkFileType)reader.ReadUInt16(); secondaryEntry.Flags = reader.ReadUInt16(); Files.Add(secondaryEntry); globalChunkList.Add(globalIndex, entry); //Extra entries shift global index by 1 globalIndex += 1; } //Extension to the existing file entry? //Possibly includes both sub chunks and data offset/size chunks in one if ((int)entry.ChunkType == 0x20) { var Flags2 = reader.ReadUInt32(); //Child Count or File Size var Flags3 = reader.ReadUInt32(); //Child Start Index or File Offset var ChunkType = (ChunkFileType)reader.ReadUInt16(); var Flags = reader.ReadUInt16(); // //Extra entries shift global index by 1 globalIndex += 1; } } else { reader.Seek(-2); ChunkDataEntry subEntry = new ChunkDataEntry(); subEntry.ChunkType = reader.ReadEnum <ChunkDataType>(false); //The type of chunk. 0x8701B5 for example for texture info subEntry.Flags = reader.ReadUInt16(); subEntry.ChunkSize = reader.ReadUInt32(); subEntry.ChunkOffset = reader.ReadUInt32(); byte blockFlag = (byte)((subEntry.Flags >> 12)); if (blockFlag < 8) { subEntry.BlockIndex = blockFlag; } DataEntries.Add(subEntry); globalChunkList.Add(globalIndex, subEntry); globalIndex += 1; } } if (Files.Count == 0) { var file = new ChunkEntry(); file.SubData.AddRange(DataEntries); Files.Add(file); } for (int i = 0; i < DataEntries.Count; i++) { if (DataEntries[i].ChunkType == ChunkDataType.BoneStart || DataEntries[i].ChunkType == (ChunkDataType)0xC800 || DataEntries[i].ChunkType == (ChunkDataType)0x6200 || DataEntries[i].ChunkType == (ChunkDataType)0x6500) { // Console.WriteLine($"BONEFLAGS {DataEntries[i].ChunkFlags} {((DataEntries[i].ChunkFlags >> 12) > 2)}"); for (int f = 0; f < DataEntries[i].ChunkSize; f++) { DataEntries[i].SubData.Add((ChunkDataEntry)globalChunkList[(int)DataEntries[i].ChunkOffset + f]); } } } for (int i = 0; i < Files.Count; i++) { if (Files[i].HasSubData && globalChunkList.ContainsKey((int)Files[i].Flags3)) { Files[i].BeginIndex = (int)Files[i].Flags3; for (int f = 0; f < Files[i].Flags2; f++) { Files[i].SubData.Add((ChunkDataEntry)globalChunkList[Files[i].BeginIndex + f]); } } } }