public bool GetEncodingEntry(ulong hash, out EncodingEntry enc) { var rootInfos = Root.GetEntries(hash); if (rootInfos.Any()) { return(Encoding.GetEntry(rootInfos.First().MD5, out enc)); } if ((CASCConfig.LoadFlags & LoadFlags.Install) != 0) { var installInfos = Install.GetEntries().Where(e => Hasher.ComputeHash(e.Name) == hash && e.Tags.Any(t => t.Type == 1 && t.Name == Root.Locale.ToString())); if (installInfos.Any()) { return(Encoding.GetEntry(installInfos.First().MD5, out enc)); } installInfos = Install.GetEntries().Where(e => Hasher.ComputeHash(e.Name) == hash); if (installInfos.Any()) { return(Encoding.GetEntry(installInfos.First().MD5, out enc)); } } enc = default; return(false); }
public EncodingHandler(BinaryReader stream) { stream.Skip(2); // EN byte Version = stream.ReadByte(); // must be 1 byte CKeyLength = stream.ReadByte(); byte EKeyLength = stream.ReadByte(); int CKeyPageSize = stream.ReadInt16BE() * 1024; // KB to bytes int EKeyPageSize = stream.ReadInt16BE() * 1024; // KB to bytes int CKeyPageCount = stream.ReadInt32BE(); int EKeyPageCount = stream.ReadInt32BE(); byte unk1 = stream.ReadByte(); // must be 0 int ESpecBlockSize = stream.ReadInt32BE(); stream.Skip(ESpecBlockSize); //string[] strings = Encoding.ASCII.GetString(stream.ReadBytes(ESpecBlockSize)).Split(new[] { '\0' }, StringSplitOptions.RemoveEmptyEntries); stream.Skip(CKeyPageCount * 32); //ValueTuple<byte[], byte[]>[] aEntries = new ValueTuple<byte[], byte[]>[CKeyPageCount]; //for (int i = 0; i < CKeyPageCount; ++i) //{ // byte[] firstHash = stream.ReadBytes(16); // byte[] blockHash = stream.ReadBytes(16); // aEntries[i] = (firstHash, blockHash); //} long chunkStart = stream.BaseStream.Position; for (int i = 0; i < CKeyPageCount; ++i) { byte keysCount; while ((keysCount = stream.ReadByte()) != 0) { long fileSize = stream.ReadInt40BE(); MD5Hash cKey = stream.Read <MD5Hash>(); EncodingEntry entry = new EncodingEntry() { Size = fileSize }; // how do we handle multiple keys? for (int ki = 0; ki < keysCount; ++ki) { MD5Hash eKey = stream.Read <MD5Hash>(); // use first key for now if (ki == 0) { entry.Key = eKey; } //else // Console.WriteLine("Multiple encoding keys for MD5 {0}: {1}", md5.ToHexString(), key.ToHexString()); //Console.WriteLine("Encoding {0:D2} {1} {2} {3} {4}", keysCount, aEntries[i].Item1.ToHexString(), aEntries[i].Item2.ToHexString(), md5.ToHexString(), key.ToHexString()); } //Encodings[md5] = entry; EncodingData.Add(cKey, entry); } // each chunk is 4096 bytes, and zero padding at the end long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart) % CHUNK_SIZE); if (remaining > 0) { stream.BaseStream.Position += remaining; } } stream.Skip(EKeyPageCount * 32); //for (int i = 0; i < EKeyPageCount; ++i) //{ // byte[] firstKey = stream.ReadBytes(16); // byte[] blockHash = stream.ReadBytes(16); //} long chunkStart2 = stream.BaseStream.Position; for (int i = 0; i < EKeyPageCount; ++i) { byte[] eKey = stream.ReadBytes(16); int eSpecIndex = stream.ReadInt32BE(); long fileSize = stream.ReadInt40BE(); // each chunk is 4096 bytes, and zero padding at the end long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart2) % CHUNK_SIZE); if (remaining > 0) { stream.BaseStream.Position += remaining; } } // string block till the end of file //EncodingData.Dump(); }
public bool GetEntry(MD5Hash md5, out EncodingEntry enc) => EncodingData.TryGetValue(md5, out enc);
public EncodingHandler(BinaryReader stream) { stream.Skip(2); // EN byte Version = stream.ReadByte(); // must be 1 byte CKeyLength = stream.ReadByte(); byte EKeyLength = stream.ReadByte(); int CKeyPageSize = stream.ReadInt16BE() * 1024; // KB to bytes int EKeyPageSize = stream.ReadInt16BE() * 1024; // KB to bytes int CKeyPageCount = stream.ReadInt32BE(); int EKeyPageCount = stream.ReadInt32BE(); byte unk1 = stream.ReadByte(); // must be 0 int ESpecBlockSize = stream.ReadInt32BE(); // stream.Skip(ESpecBlockSize); string[] strings = Encoding.ASCII.GetString(stream.ReadBytes(ESpecBlockSize)).Split(new[] { '\0' }, StringSplitOptions.RemoveEmptyEntries); // stream.Skip(CKeyPageCount * 32); ValueTuple <byte[], byte[]>[] aEntries = new ValueTuple <byte[], byte[]> [CKeyPageCount]; for (int i = 0; i < CKeyPageCount; ++i) { var firstHash = stream.ReadBytes(16); var blockHash = stream.ReadBytes(16); aEntries[i] = (firstHash, blockHash); } long chunkStart = stream.BaseStream.Position; for (int i = 0; i < CKeyPageCount; ++i) { byte keysCount; while ((keysCount = stream.ReadByte()) != 0) { var entry = new EncodingEntry() { Size = stream.ReadInt40BE() }; var cKey = stream.Read <MD5Hash>(); // how do we handle multiple keys? for (var key = 0; key < keysCount; ++key) { // use first key for now if (key == 0) { entry.Key = stream.Read <MD5Hash>(); } else { stream.ReadBytes(16); } } EncodingData.Add(cKey, entry); } // each chunk is 4096 bytes, and zero padding at the end long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart) % CHUNK_SIZE); if (remaining > 0) { stream.BaseStream.Position += remaining; } } // stream.Skip(EKeyPageCount * 32); for (int i = 0; i < EKeyPageCount; ++i) { var firstKey = stream.ReadBytes(16); var blockHash = stream.ReadBytes(16); } long chunkStart2 = stream.BaseStream.Position; while (stream.BaseStream.Position < chunkStart2 + CHUNK_SIZE * EKeyPageCount) { var remaining = CHUNK_SIZE - (stream.BaseStream.Position - chunkStart2) % CHUNK_SIZE; var eKey = stream.ReadBytes(16); int eSpecIndex = stream.ReadInt32BE(); long fileSize = stream.ReadInt40BE(); if (remaining < 25) { stream.BaseStream.Position += remaining; } if (eSpecIndex == int.MaxValue) { break; } } // string block till the end of file }
public EncodingHandler(BinaryReader stream, BackgroundWorkerEx worker) { worker?.ReportProgress(0, "Loading \"encoding\"..."); stream.Skip(2); // EN byte b1 = stream.ReadByte(); byte checksumSizeA = stream.ReadByte(); byte checksumSizeB = stream.ReadByte(); ushort flagsA = stream.ReadUInt16(); ushort flagsB = stream.ReadUInt16(); int numEntriesA = stream.ReadInt32BE(); int numEntriesB = stream.ReadInt32BE(); byte b4 = stream.ReadByte(); int stringBlockSize = stream.ReadInt32BE(); stream.Skip(stringBlockSize); //string[] strings = Encoding.ASCII.GetString(stream.ReadBytes(stringBlockSize)).Split(new[] { '\0' }, StringSplitOptions.RemoveEmptyEntries); stream.Skip(numEntriesA * 32); //for (int i = 0; i < numEntriesA; ++i) //{ // byte[] firstHash = stream.ReadBytes(16); // byte[] blockHash = stream.ReadBytes(16); //} long chunkStart = stream.BaseStream.Position; for (int i = 0; i < numEntriesA; ++i) { ushort keysCount; while ((keysCount = stream.ReadUInt16()) != 0) { int fileSize = stream.ReadInt32BE(); MD5Hash md5 = stream.Read <MD5Hash>(); EncodingEntry entry = new EncodingEntry() { Size = fileSize }; // how do we handle multiple keys? for (int ki = 0; ki < keysCount; ++ki) { MD5Hash key = stream.Read <MD5Hash>(); // use first key for now if (ki == 0) { entry.Key = key; } else { Logger.WriteLine("Multiple encoding keys for MD5 {0}: {1}", md5.ToHexString(), key.ToHexString()); } } //Encodings[md5] = entry; EncodingData.Add(md5, entry); } // each chunk is 4096 bytes, and zero padding at the end long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart) % CHUNK_SIZE); if (remaining > 0) { stream.BaseStream.Position += remaining; } worker?.ReportProgress((int)((i + 1) / (float)numEntriesA * 100)); } stream.Skip(numEntriesB * 32); //for (int i = 0; i < numEntriesB; ++i) //{ // byte[] firstKey = stream.ReadBytes(16); // byte[] blockHash = stream.ReadBytes(16); //} long chunkStart2 = stream.BaseStream.Position; for (int i = 0; i < numEntriesB; ++i) { byte[] key = stream.ReadBytes(16); int stringIndex = stream.ReadInt32BE(); byte unk1 = stream.ReadByte(); int fileSize = stream.ReadInt32BE(); // each chunk is 4096 bytes, and zero padding at the end long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart2) % CHUNK_SIZE); if (remaining > 0) { stream.BaseStream.Position += remaining; } } // string block till the end of file }
public EncodingHandler(BinaryReader stream, BackgroundWorkerEx worker) { worker?.ReportProgress(0, "Loading \"encoding\"..."); stream.Skip(2); // EN byte Version = stream.ReadByte(); // must be 1 byte CKeyLength = stream.ReadByte(); byte EKeyLength = stream.ReadByte(); int CKeyPageSize = stream.ReadInt16BE() * 1024; // KB to bytes int EKeyPageSize = stream.ReadInt16BE() * 1024; // KB to bytes int CKeyPageCount = stream.ReadInt32BE(); int EKeyPageCount = stream.ReadInt32BE(); byte unk1 = stream.ReadByte(); // must be 0 int ESpecBlockSize = stream.ReadInt32BE(); //stream.Skip(ESpecBlockSize); string[] strings = Encoding.ASCII.GetString(stream.ReadBytes(ESpecBlockSize)).Split(new[] { '\0' }, StringSplitOptions.None); //for (int i = 0; i < strings.Length; i++) //{ // Logger.WriteLine($"ESpec {i:D6} {strings[i]}"); //} stream.Skip(CKeyPageCount * 32); //ValueTuple<MD5Hash, MD5Hash>[] cKeyPageData = new ValueTuple<MD5Hash, MD5Hash>[CKeyPageCount]; //for (int i = 0; i < CKeyPageCount; i++) //{ // MD5Hash firstHash = stream.Read<MD5Hash>(); // MD5Hash blockHash = stream.Read<MD5Hash>(); // cKeyPageData[i] = (firstHash, blockHash); //} long chunkStart = stream.BaseStream.Position; for (int i = 0; i < CKeyPageCount; i++) { byte keysCount; while ((keysCount = stream.ReadByte()) != 0) { long fileSize = stream.ReadInt40BE(); MD5Hash cKey = stream.Read <MD5Hash>(); EncodingEntry entry = new EncodingEntry() { Size = fileSize, Keys = new List <MD5Hash>(keysCount) }; // how do we handle multiple keys? for (int ki = 0; ki < keysCount; ++ki) { MD5Hash eKey = stream.Read <MD5Hash>(); entry.Keys.Add(eKey); EKeyToCKey.Add(eKey, cKey); //Logger.WriteLine($"Encoding {i:D7} {ki:D2} {cKey.ToHexString()} {eKey.ToHexString()} {fileSize}"); } EncodingData.Add(cKey, entry); } // each chunk is 4096 bytes, and zero padding at the end long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart) % CHUNK_SIZE); if (remaining > 0) { stream.BaseStream.Position += remaining; } worker?.ReportProgress((int)((i + 1) / (float)CKeyPageCount * 100)); } stream.Skip(EKeyPageCount * 32); //ValueTuple<MD5Hash, MD5Hash>[] eKeyPageData = new ValueTuple<MD5Hash, MD5Hash>[EKeyPageCount]; //for (int i = 0; i < EKeyPageCount; i++) //{ // MD5Hash firstKey = stream.Read<MD5Hash>(); // MD5Hash blockHash = stream.Read<MD5Hash>(); // eKeyPageData[i] = (firstKey, blockHash); //} long chunkStart2 = stream.BaseStream.Position; Regex regex = new Regex(@"(?<=e:\{)([0-9A-F]{16})(?=,)", RegexOptions.Compiled); for (int i = 0; i < EKeyPageCount; i++) { while (true) { // each chunk is 4096 bytes, and zero padding at the end long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart2) % CHUNK_SIZE); if (remaining < 25) { stream.BaseStream.Position += remaining; break; } MD5Hash eKey = stream.Read <MD5Hash>(); int eSpecIndex = stream.ReadInt32BE(); long fileSize = stream.ReadInt40BE(); if (eSpecIndex == -1) { stream.BaseStream.Position += remaining; break; } string eSpec = strings[eSpecIndex]; var matches = regex.Matches(eSpec); if (matches.Count != 0) { var keys = matches.Cast <Match>().Select(m => BitConverter.ToUInt64(m.Value.FromHexString(), 0)).ToList(); EncryptionData.Add(eKey, keys); //Logger.WriteLine($"Encoding {i:D7} {eKey.ToHexString()} {eSpecIndex} {fileSize} {eSpec} {string.Join(",", keys.Select(x => $"{x:X16}"))}"); } else { //Logger.WriteLine($"Encoding {i:D7} {eKey.ToHexString()} {eSpecIndex} {fileSize} {eSpec}"); } } } // string block till the end of file }