public bool GetEncodingEntry(ulong hash, out EncodingEntry enc) { var rootInfos = RootHandler.GetEntries(hash); if (rootInfos.Any()) { return(EncodingHandler.GetEntry(rootInfos.First().MD5, out enc)); } if ((CASCConfig.LoadFlags & LoadFlags.Install) != 0) { var installInfos = Install.GetEntries().Where(e => Hasher.ComputeHash(e.Name) == hash && e.Tags.Any(t => t.Type == 1 && t.Name == RootHandler.Locale.ToString())); if (installInfos.Any()) { return(EncodingHandler.GetEntry(installInfos.First().MD5, out enc)); } installInfos = Install.GetEntries().Where(e => Hasher.ComputeHash(e.Name) == hash); if (installInfos.Any()) { return(EncodingHandler.GetEntry(installInfos.First().MD5, out enc)); } } enc = default(EncodingEntry); return(false); }
public void SaveFileTo(ulong hash, string extractPath, string fullName) { EncodingEntry encInfo = GetEncodingEntry(hash); if (encInfo != null) { ExtractFile(encInfo.Key, extractPath, fullName); return; } if (CASCConfig.ThrowOnFileNotFound) { throw new FileNotFoundException(fullName); } }
public override Stream OpenFile(ulong hash) { EncodingEntry encInfo = GetEncodingEntry(hash); if (encInfo != null) { return(OpenFile(encInfo.Key)); } if (CASCConfig.ThrowOnFileNotFound) { throw new FileNotFoundException(string.Format("{0:X16}", hash)); } return(null); }
public EncodingHandler(BinaryReader stream, BackgroundWorkerEx worker) { worker?.ReportProgress(0, "Loading \"encoding\"..."); stream.Skip(2); // EN byte b1 = stream.ReadByte(); byte checksumSizeA = stream.ReadByte(); byte checksumSizeB = stream.ReadByte(); ushort flagsA = stream.ReadUInt16(); ushort flagsB = stream.ReadUInt16(); int numEntriesA = stream.ReadInt32BE(); int numEntriesB = stream.ReadInt32BE(); byte b4 = stream.ReadByte(); int stringBlockSize = stream.ReadInt32BE(); stream.Skip(stringBlockSize); //string[] strings = Encoding.ASCII.GetString(stream.ReadBytes(stringBlockSize)).Split(new[] { '\0' }, StringSplitOptions.RemoveEmptyEntries); stream.Skip(numEntriesA * 32); //for (int i = 0; i < numEntriesA; ++i) //{ // byte[] firstHash = stream.ReadBytes(16); // byte[] blockHash = stream.ReadBytes(16); //} long chunkStart = stream.BaseStream.Position; for (int i = 0; i < numEntriesA; ++i) { ushort keysCount; while ((keysCount = stream.ReadUInt16()) != 0) { int fileSize = stream.ReadInt32BE(); byte[] md5 = stream.ReadBytes(16); EncodingEntry entry = new EncodingEntry(); entry.Size = fileSize; // how do we handle multiple keys? for (int ki = 0; ki < keysCount; ++ki) { byte[] key = stream.ReadBytes(16); // use first key for now if (ki == 0) { entry.Key = key; } else { Logger.WriteLine("Multiple encoding keys for MD5 {0}: {1}", md5.ToHexString(), key.ToHexString()); } } //Encodings[md5] = entry; EncodingData.Add(md5, entry); } // each chunk is 4096 bytes, and zero padding at the end long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart) % CHUNK_SIZE); if (remaining > 0) { stream.BaseStream.Position += remaining; } worker?.ReportProgress((int)((i + 1) / (float)numEntriesA * 100)); } stream.Skip(numEntriesB * 32); //for (int i = 0; i < numEntriesB; ++i) //{ // byte[] firstKey = stream.ReadBytes(16); // byte[] blockHash = stream.ReadBytes(16); //} long chunkStart2 = stream.BaseStream.Position; for (int i = 0; i < numEntriesB; ++i) { byte[] key = stream.ReadBytes(16); int stringIndex = stream.ReadInt32BE(); byte unk1 = stream.ReadByte(); int fileSize = stream.ReadInt32BE(); // each chunk is 4096 bytes, and zero padding at the end long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart2) % CHUNK_SIZE); if (remaining > 0) { stream.BaseStream.Position += remaining; } } // string block till the end of file }
public bool GetEncodingEntry(ulong hash, out EncodingEntry enc) { var rootInfos = RootHandler.GetEntries(hash); if (rootInfos.Any()) return EncodingHandler.GetEntry(rootInfos.First().MD5, out enc); if ((CASCConfig.LoadFlags & LoadFlags.Install) != 0) { var installInfos = Install.GetEntries().Where(e => Hasher.ComputeHash(e.Name) == hash); if (installInfos.Any()) return EncodingHandler.GetEntry(installInfos.First().MD5, out enc); } enc = default(EncodingEntry); return false; }
public D3RootHandler(BinaryReader stream, BackgroundWorkerEx worker, CASCHandler casc) { worker?.ReportProgress(0, "Loading \"root\"..."); byte b1 = stream.ReadByte(); byte b2 = stream.ReadByte(); byte b3 = stream.ReadByte(); byte b4 = stream.ReadByte(); int count = stream.ReadInt32(); for (int j = 0; j < count; j++) { byte[] md5 = stream.ReadBytes(16); string name = stream.ReadCString(); var entries = new List <D3RootEntry>(); D3RootData[name] = entries; EncodingEntry enc = casc.Encoding.GetEntry(md5); using (BinaryReader s = new BinaryReader(casc.OpenFile(enc.Key))) { if (s != null) { uint magic = s.ReadUInt32(); int nEntries0 = s.ReadInt32(); for (int i = 0; i < nEntries0; i++) { entries.Add(D3RootEntry.Read(0, s)); } int nEntries1 = s.ReadInt32(); for (int i = 0; i < nEntries1; i++) { entries.Add(D3RootEntry.Read(1, s)); } int nNamedEntries = s.ReadInt32(); for (int i = 0; i < nNamedEntries; i++) { entries.Add(D3RootEntry.Read(2, s)); } } } worker?.ReportProgress((int)((j + 1) / (float)(count + 2) * 100)); } // Parse CoreTOC.dat var coreTocEntry = D3RootData["Base"].Find(e => e.Name == "CoreTOC.dat"); EncodingEntry enc1 = casc.Encoding.GetEntry(coreTocEntry.MD5); using (var file = casc.OpenFile(enc1.Key)) tocParser = new CoreTOCParser(file); worker?.ReportProgress((int)((count + 1) / (float)(count + 2) * 100)); // Parse Packages.dat var pkgEntry = D3RootData["Base"].Find(e => e.Name == "Data_D3\\PC\\Misc\\Packages.dat"); EncodingEntry enc2 = casc.Encoding.GetEntry(pkgEntry.MD5); using (var file = casc.OpenFile(enc2.Key)) pkgParser = new PackagesParser(file); worker?.ReportProgress(100); }
private CASCHandler(CASCConfig config, CDNHandler cdn, BackgroundWorker worker) { this.config = config; this.cdn = cdn; if (!config.OnlineMode) { var idxFiles = GetIdxFiles(this.config.BasePath); if (idxFiles.Count == 0) throw new FileNotFoundException("idx files missing!"); if (worker != null) worker.ReportProgress(0); int idxIndex = 0; foreach (var idx in idxFiles) { using (var fs = new FileStream(idx, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) using (var br = new BinaryReader(fs)) { int h2Len = br.ReadInt32(); int h2Check = br.ReadInt32(); byte[] h2 = br.ReadBytes(h2Len); long padPos = (8 + h2Len + 0x0F) & 0xFFFFFFF0; fs.Position = padPos; int dataLen = br.ReadInt32(); int dataCheck = br.ReadInt32(); int numBlocks = dataLen / 18; for (int i = 0; i < numBlocks; i++) { IndexEntry info = new IndexEntry(); byte[] key = br.ReadBytes(9); int indexHigh = br.ReadByte(); int indexLow = br.ReadInt32BE(); info.Index = (int)((byte)(indexHigh << 2) | ((indexLow & 0xC0000000) >> 30)); info.Offset = (indexLow & 0x3FFFFFFF); info.Size = br.ReadInt32(); // duplicate keys wtf... //IndexData[key] = info; // use last key if (!LocalIndexData.ContainsKey(key)) // use first key LocalIndexData.Add(key, info); } padPos = (dataLen + 0x0FFF) & 0xFFFFF000; fs.Position = padPos; fs.Position += numBlocks * 18; //for (int i = 0; i < numBlocks; i++) //{ // var bytes = br.ReadBytes(18); // unknown data //} if (fs.Position != fs.Position) throw new Exception("idx file under read"); } if (worker != null) worker.ReportProgress((int)((float)++idxIndex / (float)idxFiles.Count * 100)); } Logger.WriteLine("CASCHandler: loaded {0} indexes", LocalIndexData.Count); } if (worker != null) worker.ReportProgress(0); using (var fs = OpenEncodingFile()) using (var br = new BinaryReader(fs)) { br.ReadBytes(2); // EN byte b1 = br.ReadByte(); byte b2 = br.ReadByte(); byte b3 = br.ReadByte(); ushort s1 = br.ReadUInt16(); ushort s2 = br.ReadUInt16(); int numEntries = br.ReadInt32BE(); int i1 = br.ReadInt32BE(); byte b4 = br.ReadByte(); int entriesOfs = br.ReadInt32BE(); fs.Position += entriesOfs; // skip strings fs.Position += numEntries * 32; //for (int i = 0; i < numEntries; ++i) //{ // br.ReadBytes(16); // br.ReadBytes(16); //} for (int i = 0; i < numEntries; ++i) { ushort keysCount; while ((keysCount = br.ReadUInt16()) != 0) { int fileSize = br.ReadInt32BE(); byte[] md5 = br.ReadBytes(16); var entry = new EncodingEntry(); entry.Size = fileSize; for (int ki = 0; ki < keysCount; ++ki) { byte[] key = br.ReadBytes(16); entry.Keys.Add(key); } //Encodings[md5] = entry; EncodingData.Add(md5, entry); } //br.ReadBytes(28); while (br.PeekChar() == 0) fs.Position++; if (worker != null) worker.ReportProgress((int)((float)fs.Position / (float)fs.Length * 100)); } //var pos = br.BaseStream.Position; //for (int i = 0; i < i1; ++i) //{ // br.ReadBytes(16); // br.ReadBytes(16); //} Logger.WriteLine("CASCHandler: loaded {0} encoding data", EncodingData.Count); } if (worker != null) worker.ReportProgress(0); using (var fs = OpenRootFile()) using (var br = new BinaryReader(fs)) { while (fs.Position < fs.Length) { int count = br.ReadInt32(); RootBlock block = new RootBlock(); block.Unk1 = br.ReadUInt32(); block.Flags = (LocaleFlags)br.ReadUInt32(); if (block.Flags == LocaleFlags.None) throw new Exception("block.Flags == LocaleFlags.None"); RootEntry[] entries = new RootEntry[count]; for (var i = 0; i < count; ++i) { entries[i] = new RootEntry(); entries[i].Block = block; entries[i].Unk1 = br.ReadInt32(); } for (var i = 0; i < count; ++i) { entries[i].MD5 = br.ReadBytes(16); ulong hash = br.ReadUInt64(); entries[i].Hash = hash; // don't load other locales //if (block.Flags != LocaleFlags.All && (block.Flags & LocaleFlags.enUS) == 0) // continue; if (!RootData.ContainsKey(hash)) { RootData[hash] = new List<RootEntry>(); RootData[hash].Add(entries[i]); } else RootData[hash].Add(entries[i]); } if (worker != null) worker.ReportProgress((int)((float)fs.Position / (float)fs.Length * 100)); } Logger.WriteLine("CASCHandler: loaded {0} root data", RootData.Count); } if (worker != null) worker.ReportProgress(0); }
public OWRootHandler(BinaryReader stream, BackgroundWorkerEx worker, CASCHandler casc) { worker?.ReportProgress(0, "Loading \"root\"..."); string str = Encoding.ASCII.GetString(stream.ReadBytes((int)stream.BaseStream.Length)); string[] array = str.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries); // need to figure out what to do with those apm files for (int i = 1; i < array.Length; i++) { string[] filedata = array[i].Split('|'); if (Path.GetExtension(filedata[2]) == ".apm") { // add apm file for dev purposes ulong fileHash1 = Hasher.ComputeHash(filedata[2]); RootData[fileHash1] = new RootEntry() { MD5 = filedata[0].ToByteArray(), Block = RootBlock.Empty }; CASCFile.FileNames[fileHash1] = filedata[2]; // add files listed in apm file byte[] md5 = filedata[0].ToByteArray(); EncodingEntry enc = casc.Encoding.GetEntry(md5); using (BinaryReader s = new BinaryReader(casc.OpenFile(enc.Key))) { if (s != null) { // still need to figure out complete apm structure // at start of file there's a lot of data that is same in all apm files s.BaseStream.Position = 0xC; uint count = s.ReadUInt32(); s.BaseStream.Position = 0x894; // size of each entry seems to be 0x48 bytes (0x2C bytes unk data; int size; ulong unk; byte[16] md5) for (int j = 0; j < count; j++) { s.BaseStream.Position += 0x2C; // skip unknown int size = s.ReadInt32(); // size (matches size in encoding file) s.BaseStream.Position += 8; // skip unknown byte[] md5_2 = s.ReadBytes(16); EncodingEntry enc2 = casc.Encoding.GetEntry(md5_2); if (enc2 == null) { throw new Exception("enc2 == null"); } string fakeName = Path.GetFileNameWithoutExtension(filedata[2]) + "/" + md5_2.ToHexString(); ulong fileHash = Hasher.ComputeHash(fakeName); RootData[fileHash] = new RootEntry() { MD5 = md5_2, Block = RootBlock.Empty }; CASCFile.FileNames[fileHash] = fakeName; } } } } } int current = 0; Func <string, LocaleFlags> tag2locale = (s) => { LocaleFlags locale; if (Enum.TryParse(s, out locale)) { return(locale); } return(LocaleFlags.All); }; foreach (var entry in casc.Encoding.Entries) { DownloadEntry dl = casc.Download.GetEntry(entry.Value.Key); if (dl != null) { string fakeName = "unknown" + "/" + entry.Key[0].ToString("X2") + "/" + entry.Key.ToHexString(); var locales = dl.Tags.Where(tag => tag.Value.Type == 4).Select(tag => tag2locale(tag.Key)); LocaleFlags locale = LocaleFlags.None; foreach (var loc in locales) { locale |= loc; } ulong fileHash = Hasher.ComputeHash(fakeName); RootData.Add(fileHash, new RootEntry() { MD5 = entry.Key, Block = new RootBlock() { LocaleFlags = locale } }); CASCFile.FileNames[fileHash] = fakeName; } worker?.ReportProgress((int)(++current / (float)casc.Encoding.Count * 100)); } }
public CASCHandler(CASCFolder root, BackgroundWorker worker) { if (!OnlineMode) { var idxFiles = GetIdxFiles(Properties.Settings.Default.WowPath); if (idxFiles.Count == 0) { throw new FileNotFoundException("idx files missing!"); } worker.ReportProgress(0); int idxIndex = 0; foreach (var idx in idxFiles) { using (var fs = new FileStream(idx, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) using (var br = new BinaryReader(fs)) { int h2Len = br.ReadInt32(); int h2Check = br.ReadInt32(); byte[] h2 = br.ReadBytes(h2Len); long padPos = (8 + h2Len + 0x0F) & 0xFFFFFFF0; fs.Position = padPos; int dataLen = br.ReadInt32(); int dataCheck = br.ReadInt32(); int numBlocks = dataLen / 18; for (int i = 0; i < numBlocks; i++) { IndexEntry info = new IndexEntry(); byte[] key = br.ReadBytes(9); int indexHigh = br.ReadByte(); int indexLow = br.ReadInt32BE(); info.Index = (int)((byte)(indexHigh << 2) | ((indexLow & 0xC0000000) >> 30)); info.Offset = (indexLow & 0x3FFFFFFF); info.Size = br.ReadInt32(); // duplicate keys wtf... //IndexData[key] = info; // use last key if (!LocalIndexData.ContainsKey(key)) // use first key { LocalIndexData.Add(key, info); } } padPos = (dataLen + 0x0FFF) & 0xFFFFF000; fs.Position = padPos; fs.Position += numBlocks * 18; //for (int i = 0; i < numBlocks; i++) //{ // var bytes = br.ReadBytes(18); // unknown data //} if (fs.Position != fs.Position) { throw new Exception("idx file under read"); } } worker.ReportProgress((int)((float)++idxIndex / (float)idxFiles.Count * 100)); } Logger.WriteLine("CASCHandler: loaded {0} indexes", LocalIndexData.Count); } worker.ReportProgress(0); using (var fs = OpenEncodingFile()) using (var br = new BinaryReader(fs)) { br.ReadBytes(2); // EN byte b1 = br.ReadByte(); byte b2 = br.ReadByte(); byte b3 = br.ReadByte(); ushort s1 = br.ReadUInt16(); ushort s2 = br.ReadUInt16(); int numEntries = br.ReadInt32BE(); int i1 = br.ReadInt32BE(); byte b4 = br.ReadByte(); int entriesOfs = br.ReadInt32BE(); fs.Position += entriesOfs; // skip strings fs.Position += numEntries * 32; //for (int i = 0; i < numEntries; ++i) //{ // br.ReadBytes(16); // br.ReadBytes(16); //} for (int i = 0; i < numEntries; ++i) { ushort keysCount; while ((keysCount = br.ReadUInt16()) != 0) { int fileSize = br.ReadInt32BE(); byte[] md5 = br.ReadBytes(16); var entry = new EncodingEntry(); entry.Size = fileSize; for (int ki = 0; ki < keysCount; ++ki) { byte[] key = br.ReadBytes(16); entry.Keys.Add(key); } //Encodings[md5] = entry; EncodingData.Add(md5, entry); } //br.ReadBytes(28); while (br.PeekChar() == 0) { fs.Position++; } worker.ReportProgress((int)((float)fs.Position / (float)fs.Length * 100)); } //var pos = br.BaseStream.Position; //for (int i = 0; i < i1; ++i) //{ // br.ReadBytes(16); // br.ReadBytes(16); //} Logger.WriteLine("CASCHandler: loaded {0} encoding data", EncodingData.Count); } worker.ReportProgress(0); using (var fs = OpenRootFile()) using (var br = new BinaryReader(fs)) { while (fs.Position < fs.Length) { int count = br.ReadInt32(); RootBlock block = new RootBlock(); block.Unk1 = br.ReadUInt32(); block.Flags = (LocaleFlags)br.ReadUInt32(); if (block.Flags == LocaleFlags.None) { throw new Exception("block.Flags == LocaleFlags.None"); } RootEntry[] entries = new RootEntry[count]; for (var i = 0; i < count; ++i) { entries[i] = new RootEntry(); entries[i].Block = block; entries[i].Unk1 = br.ReadInt32(); } for (var i = 0; i < count; ++i) { entries[i].MD5 = br.ReadBytes(16); ulong hash = br.ReadUInt64(); entries[i].Hash = hash; // don't load other locales //if (block.Flags != LocaleFlags.All && (block.Flags & LocaleFlags.enUS) == 0) // continue; if (!RootData.ContainsKey(hash)) { RootData[hash] = new List <RootEntry>(); RootData[hash].Add(entries[i]); } else { RootData[hash].Add(entries[i]); } } worker.ReportProgress((int)((float)fs.Position / (float)fs.Length * 100)); } Logger.WriteLine("CASCHandler: loaded {0} root data", RootData.Count); } worker.ReportProgress(0); if (File.Exists(listFile)) { FolderNames[Hasher.ComputeHash("root")] = "root"; using (StreamReader sr = new StreamReader(listFile)) { string file; int filesCount = 0; CASCFolder folder = root; while ((file = sr.ReadLine()) != null) { ulong fileHash = Hasher.ComputeHash(file); // skip invalid names if (!RootData.ContainsKey(fileHash)) { Logger.WriteLine("Invalid file name: {0}", file); continue; } filesCount++; string[] parts = file.Split('\\'); for (int i = 0; i < parts.Length; ++i) { bool isFile = (i == parts.Length - 1); ulong hash = isFile ? fileHash : Hasher.ComputeHash(parts[i]); ICASCEntry entry = folder.GetEntry(hash); if (entry == null) { if (isFile) { entry = new CASCFile(hash); FileNames[hash] = file; } else { entry = new CASCFolder(hash); FolderNames[hash] = parts[i]; } folder.SubEntries[hash] = entry; if (isFile) { folder = root; break; } } folder = entry as CASCFolder; } if ((filesCount % 1000) == 0) { worker.ReportProgress((int)((float)sr.BaseStream.Position / (float)sr.BaseStream.Length * 100)); } } Logger.WriteLine("CASCHandler: loaded {0} file names", FileNames.Count); } } else { throw new FileNotFoundException("list file missing!"); } }
public bool GetEntry(MD5Hash md5, out EncodingEntry enc) => EncodingData.TryGetValue(md5, out enc);
public bool GetEntry(MD5Hash md5, out EncodingEntry enc) { return(EncodingData.TryGetValue(md5, out enc)); }
private CASCHandler(CASCConfig config, CDNHandler cdn, BackgroundWorker worker) { this.config = config; this.cdn = cdn; if (!config.OnlineMode) { var idxFiles = GetIdxFiles(this.config.BasePath); if (idxFiles.Count == 0) { throw new FileNotFoundException("idx files missing!"); } if (worker != null) { worker.ReportProgress(0); } int idxIndex = 0; foreach (var idx in idxFiles) { using (var fs = new FileStream(idx, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) using (var br = new BinaryReader(fs)) { int h2Len = br.ReadInt32(); int h2Check = br.ReadInt32(); byte[] h2 = br.ReadBytes(h2Len); long padPos = (8 + h2Len + 0x0F) & 0xFFFFFFF0; fs.Position = padPos; int dataLen = br.ReadInt32(); int dataCheck = br.ReadInt32(); int numBlocks = dataLen / 18; for (int i = 0; i < numBlocks; i++) { IndexEntry info = new IndexEntry(); byte[] key = br.ReadBytes(9); int indexHigh = br.ReadByte(); int indexLow = br.ReadInt32BE(); info.Index = (int)((byte)(indexHigh << 2) | ((indexLow & 0xC0000000) >> 30)); info.Offset = (indexLow & 0x3FFFFFFF); info.Size = br.ReadInt32(); // duplicate keys wtf... //IndexData[key] = info; // use last key if (!LocalIndexData.ContainsKey(key)) // use first key { LocalIndexData.Add(key, info); } } padPos = (dataLen + 0x0FFF) & 0xFFFFF000; fs.Position = padPos; fs.Position += numBlocks * 18; //for (int i = 0; i < numBlocks; i++) //{ // var bytes = br.ReadBytes(18); // unknown data //} if (fs.Position != fs.Position) { throw new Exception("idx file under read"); } } if (worker != null) { worker.ReportProgress((int)((float)++idxIndex / (float)idxFiles.Count * 100)); } } Logger.WriteLine("CASCHandler: loaded {0} indexes", LocalIndexData.Count); } if (worker != null) { worker.ReportProgress(0); } using (var fs = OpenEncodingFile()) using (var br = new BinaryReader(fs)) { br.ReadBytes(2); // EN byte b1 = br.ReadByte(); byte b2 = br.ReadByte(); byte b3 = br.ReadByte(); ushort s1 = br.ReadUInt16(); ushort s2 = br.ReadUInt16(); int numEntries = br.ReadInt32BE(); int i1 = br.ReadInt32BE(); byte b4 = br.ReadByte(); int entriesOfs = br.ReadInt32BE(); fs.Position += entriesOfs; // skip strings fs.Position += numEntries * 32; //for (int i = 0; i < numEntries; ++i) //{ // br.ReadBytes(16); // br.ReadBytes(16); //} for (int i = 0; i < numEntries; ++i) { ushort keysCount; while ((keysCount = br.ReadUInt16()) != 0) { int fileSize = br.ReadInt32BE(); byte[] md5 = br.ReadBytes(16); var entry = new EncodingEntry(); entry.Size = fileSize; for (int ki = 0; ki < keysCount; ++ki) { byte[] key = br.ReadBytes(16); entry.Keys.Add(key); } //Encodings[md5] = entry; EncodingData.Add(md5, entry); } //br.ReadBytes(28); while (br.PeekChar() == 0) { fs.Position++; } if (worker != null) { worker.ReportProgress((int)((float)fs.Position / (float)fs.Length * 100)); } } //var pos = br.BaseStream.Position; //for (int i = 0; i < i1; ++i) //{ // br.ReadBytes(16); // br.ReadBytes(16); //} Logger.WriteLine("CASCHandler: loaded {0} encoding data", EncodingData.Count); } if (worker != null) { worker.ReportProgress(0); } using (var fs = OpenRootFile()) using (var br = new BinaryReader(fs)) { while (fs.Position < fs.Length) { int count = br.ReadInt32(); RootBlock block = new RootBlock(); block.Unk1 = br.ReadUInt32(); block.Flags = (LocaleFlags)br.ReadUInt32(); if (block.Flags == LocaleFlags.None) { throw new Exception("block.Flags == LocaleFlags.None"); } RootEntry[] entries = new RootEntry[count]; for (var i = 0; i < count; ++i) { entries[i] = new RootEntry(); entries[i].Block = block; entries[i].Unk1 = br.ReadInt32(); } for (var i = 0; i < count; ++i) { entries[i].MD5 = br.ReadBytes(16); ulong hash = br.ReadUInt64(); entries[i].Hash = hash; // don't load other locales //if (block.Flags != LocaleFlags.All && (block.Flags & LocaleFlags.enUS) == 0) // continue; if (!RootData.ContainsKey(hash)) { RootData[hash] = new List <RootEntry>(); RootData[hash].Add(entries[i]); } else { RootData[hash].Add(entries[i]); } } if (worker != null) { worker.ReportProgress((int)((float)fs.Position / (float)fs.Length * 100)); } } Logger.WriteLine("CASCHandler: loaded {0} root data", RootData.Count); } if (worker != null) { worker.ReportProgress(0); } }
public CASCHandler(CASCFolder root, BackgroundWorker worker) { if (!OnlineMode) { var idxFiles = GetIdxFiles(Properties.Settings.Default.WowPath); if (idxFiles.Count == 0) throw new FileNotFoundException("idx files missing!"); worker.ReportProgress(0); int idxIndex = 0; foreach (var idx in idxFiles) { using (var fs = new FileStream(idx, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) using (var br = new BinaryReader(fs)) { int h2Len = br.ReadInt32(); int h2Check = br.ReadInt32(); byte[] h2 = br.ReadBytes(h2Len); long padPos = (8 + h2Len + 0x0F) & 0xFFFFFFF0; fs.Position = padPos; int dataLen = br.ReadInt32(); int dataCheck = br.ReadInt32(); int numBlocks = dataLen / 18; for (int i = 0; i < numBlocks; i++) { IndexEntry info = new IndexEntry(); byte[] key = br.ReadBytes(9); int indexHigh = br.ReadByte(); int indexLow = br.ReadInt32BE(); info.Index = (int)((byte)(indexHigh << 2) | ((indexLow & 0xC0000000) >> 30)); info.Offset = (indexLow & 0x3FFFFFFF); info.Size = br.ReadInt32(); // duplicate keys wtf... //IndexData[key] = info; // use last key if (!LocalIndexData.ContainsKey(key)) // use first key LocalIndexData.Add(key, info); } padPos = (dataLen + 0x0FFF) & 0xFFFFF000; fs.Position = padPos; fs.Position += numBlocks * 18; //for (int i = 0; i < numBlocks; i++) //{ // var bytes = br.ReadBytes(18); // unknown data //} if (fs.Position != fs.Position) throw new Exception("idx file under read"); } worker.ReportProgress((int)((float)++idxIndex / (float)idxFiles.Count * 100)); } Logger.WriteLine("CASCHandler: loaded {0} indexes", LocalIndexData.Count); } worker.ReportProgress(0); using (var fs = OpenEncodingFile()) using (var br = new BinaryReader(fs)) { br.ReadBytes(2); // EN byte b1 = br.ReadByte(); byte b2 = br.ReadByte(); byte b3 = br.ReadByte(); ushort s1 = br.ReadUInt16(); ushort s2 = br.ReadUInt16(); int numEntries = br.ReadInt32BE(); int i1 = br.ReadInt32BE(); byte b4 = br.ReadByte(); int entriesOfs = br.ReadInt32BE(); fs.Position += entriesOfs; // skip strings fs.Position += numEntries * 32; //for (int i = 0; i < numEntries; ++i) //{ // br.ReadBytes(16); // br.ReadBytes(16); //} for (int i = 0; i < numEntries; ++i) { ushort keysCount; while ((keysCount = br.ReadUInt16()) != 0) { int fileSize = br.ReadInt32BE(); byte[] md5 = br.ReadBytes(16); var entry = new EncodingEntry(); entry.Size = fileSize; for (int ki = 0; ki < keysCount; ++ki) { byte[] key = br.ReadBytes(16); entry.Keys.Add(key); } //Encodings[md5] = entry; EncodingData.Add(md5, entry); } //br.ReadBytes(28); while (br.PeekChar() == 0) fs.Position++; worker.ReportProgress((int)((float)fs.Position / (float)fs.Length * 100)); } //var pos = br.BaseStream.Position; //for (int i = 0; i < i1; ++i) //{ // br.ReadBytes(16); // br.ReadBytes(16); //} Logger.WriteLine("CASCHandler: loaded {0} encoding data", EncodingData.Count); } worker.ReportProgress(0); using (var fs = OpenRootFile()) using (var br = new BinaryReader(fs)) { while (fs.Position < fs.Length) { int count = br.ReadInt32(); RootBlock block = new RootBlock(); block.Unk1 = br.ReadUInt32(); block.Flags = (LocaleFlags)br.ReadUInt32(); if (block.Flags == LocaleFlags.None) throw new Exception("block.Flags == LocaleFlags.None"); RootEntry[] entries = new RootEntry[count]; for (var i = 0; i < count; ++i) { entries[i] = new RootEntry(); entries[i].Block = block; entries[i].Unk1 = br.ReadInt32(); } for (var i = 0; i < count; ++i) { entries[i].MD5 = br.ReadBytes(16); ulong hash = br.ReadUInt64(); entries[i].Hash = hash; // don't load other locales //if (block.Flags != LocaleFlags.All && (block.Flags & LocaleFlags.enUS) == 0) // continue; if (!RootData.ContainsKey(hash)) { RootData[hash] = new List<RootEntry>(); RootData[hash].Add(entries[i]); } else RootData[hash].Add(entries[i]); } worker.ReportProgress((int)((float)fs.Position / (float)fs.Length * 100)); } Logger.WriteLine("CASCHandler: loaded {0} root data", RootData.Count); } worker.ReportProgress(0); if (File.Exists(listFile)) { FolderNames[Hasher.ComputeHash("root")] = "root"; using (StreamReader sr = new StreamReader(listFile)) { string file; int filesCount = 0; CASCFolder folder = root; while ((file = sr.ReadLine()) != null) { ulong fileHash = Hasher.ComputeHash(file); // skip invalid names if (!RootData.ContainsKey(fileHash)) { Logger.WriteLine("Invalid file name: {0}", file); continue; } filesCount++; string[] parts = file.Split('\\'); for (int i = 0; i < parts.Length; ++i) { bool isFile = (i == parts.Length - 1); ulong hash = isFile ? fileHash : Hasher.ComputeHash(parts[i]); ICASCEntry entry = folder.GetEntry(hash); if (entry == null) { if (isFile) { entry = new CASCFile(hash); FileNames[hash] = file; } else { entry = new CASCFolder(hash); FolderNames[hash] = parts[i]; } folder.SubEntries[hash] = entry; if (isFile) { folder = root; break; } } folder = entry as CASCFolder; } if ((filesCount % 1000) == 0) worker.ReportProgress((int)((float)sr.BaseStream.Position / (float)sr.BaseStream.Length * 100)); } Logger.WriteLine("CASCHandler: loaded {0} file names", FileNames.Count); } } else { throw new FileNotFoundException("list file missing!"); } }
public EncodingHandler(BinaryReader stream, BackgroundWorkerEx worker) { worker?.ReportProgress(0, "Loading \"encoding\"..."); stream.Skip(2); // EN byte b1 = stream.ReadByte(); byte checksumSizeA = stream.ReadByte(); byte checksumSizeB = stream.ReadByte(); ushort flagsA = stream.ReadUInt16(); ushort flagsB = stream.ReadUInt16(); int numEntriesA = stream.ReadInt32BE(); int numEntriesB = stream.ReadInt32BE(); byte b4 = stream.ReadByte(); int stringBlockSize = stream.ReadInt32BE(); stream.Skip(stringBlockSize); //string[] strings = Encoding.ASCII.GetString(stream.ReadBytes(stringBlockSize)).Split(new[] { '\0' }, StringSplitOptions.RemoveEmptyEntries); stream.Skip(numEntriesA * 32); //for (int i = 0; i < numEntriesA; ++i) //{ // byte[] firstHash = stream.ReadBytes(16); // byte[] blockHash = stream.ReadBytes(16); //} long chunkStart = stream.BaseStream.Position; for (int i = 0; i < numEntriesA; ++i) { ushort keysCount; while ((keysCount = stream.ReadUInt16()) != 0) { int fileSize = stream.ReadInt32BE(); byte[] md5 = stream.ReadBytes(16); EncodingEntry entry = new EncodingEntry(); entry.Size = fileSize; // how do we handle multiple keys? for (int ki = 0; ki < keysCount; ++ki) { byte[] key = stream.ReadBytes(16); // use first key for now if (ki == 0) entry.Key = key; else Logger.WriteLine("Multiple encoding keys for MD5 {0}: {1}", md5.ToHexString(), key.ToHexString()); } //Encodings[md5] = entry; EncodingData.Add(md5, entry); } // each chunk is 4096 bytes, and zero padding at the end long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart) % CHUNK_SIZE); if (remaining > 0) stream.BaseStream.Position += remaining; worker?.ReportProgress((int)((i + 1) / (float)numEntriesA * 100)); } stream.Skip(numEntriesB * 32); //for (int i = 0; i < numEntriesB; ++i) //{ // byte[] firstKey = stream.ReadBytes(16); // byte[] blockHash = stream.ReadBytes(16); //} long chunkStart2 = stream.BaseStream.Position; for (int i = 0; i < numEntriesB; ++i) { byte[] key = stream.ReadBytes(16); int stringIndex = stream.ReadInt32BE(); byte unk1 = stream.ReadByte(); int fileSize = stream.ReadInt32BE(); // each chunk is 4096 bytes, and zero padding at the end long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart2) % CHUNK_SIZE); if (remaining > 0) stream.BaseStream.Position += remaining; } // string block till the end of file }
public bool GetEntry(MD5Hash md5, out EncodingEntry enc) { return EncodingData.TryGetValue(md5, out enc); }