public Destiny2RootHandler(BinaryReader stream, BackgroundWorkerEx worker) { worker?.ReportProgress(0, "Loading \"root\"..."); // root file for Destiny 2 is game executable... worker?.ReportProgress(100); }
public Wc3RootHandler(BinaryReader stream, BackgroundWorkerEx worker) { worker?.ReportProgress(0, "Loading \"root\"..."); using (StreamReader sr = new StreamReader(stream.BaseStream)) { string line; while ((line = sr.ReadLine()) != null) { string[] tokens = line.Split('|'); if (tokens.Length != 3 && tokens.Length != 4) { throw new InvalidDataException("tokens.Length != 3 && tokens.Length != 4"); } string file; if (tokens[0].IndexOf(':') != -1) { string[] tokens2 = tokens[0].Split(':'); if (tokens2.Length == 2 || tokens2.Length == 3 || tokens2.Length == 4) { file = Path.Combine(tokens2); } else { throw new InvalidDataException("tokens2.Length"); } } else { file = tokens[0]; } if (!Enum.TryParse(tokens[2], out LocaleFlags locale)) { locale = LocaleFlags.All; } ulong fileHash = Hasher.ComputeHash(file); RootData[fileHash] = new RootEntry() { LocaleFlags = locale, ContentFlags = ContentFlags.None, cKey = tokens[1].FromHexString().ToMD5() }; CASCFile.Files[fileHash] = new CASCFile(fileHash, file); } } worker?.ReportProgress(100); }
public DownloadHandler(BinaryReader stream, BackgroundWorkerEx worker) { worker?.ReportProgress(0, "Loading \"download\"..."); stream.Skip(2); // DL byte b1 = stream.ReadByte(); byte b2 = stream.ReadByte(); byte b3 = stream.ReadByte(); int numFiles = stream.ReadInt32BE(); short numTags = stream.ReadInt16BE(); int numMaskBytes = (numFiles + 7) / 8; for (int i = 0; i < numFiles; i++) { MD5Hash key = stream.Read <MD5Hash>(); //byte[] unk = stream.ReadBytes(0xA); stream.Skip(0xA); //var entry = new DownloadEntry() { Index = i, Unk = unk }; var entry = new DownloadEntry() { Index = i }; DownloadData.Add(key, entry); worker?.ReportProgress((int)((i + 1) / (float)numFiles * 100)); } for (int i = 0; i < numTags; i++) { DownloadTag tag = new DownloadTag(); string name = stream.ReadCString(); tag.Type = stream.ReadInt16BE(); byte[] bits = stream.ReadBytes(numMaskBytes); for (int j = 0; j < numMaskBytes; j++) { bits[j] = (byte)((bits[j] * 0x0202020202 & 0x010884422010) % 1023); } tag.Bits = new BitArray(bits); Tags.Add(name, tag); } }
public OwRootHandler(BinaryReader stream, BackgroundWorkerEx worker, CASCHandler casc) { worker?.ReportProgress(0, "Loading \"root\"..."); string str = Encoding.ASCII.GetString(stream.ReadBytes((int)stream.BaseStream.Length)); string[] array = str.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries); List <string> APMNames = new List <string>(); for (int i = 1; i < array.Length; i++) { string[] filedata = array[i].Split('|'); string name = filedata[4]; if (Path.GetExtension(name) == ".apm" && name.Contains("RDEV")) { APMNames.Add(Path.GetFileNameWithoutExtension(name)); if (!name.Contains("L" + LanguageScan)) { continue; } // add apm file for dev purposes ulong apmNameHash = Hasher.ComputeHash(name); MD5Hash apmMD5 = filedata[0].ToByteArray().ToMD5(); _rootData[apmNameHash] = new OWRootEntry() { baseEntry = new RootEntry() { MD5 = apmMD5, LocaleFlags = LocaleFlags.All, ContentFlags = ContentFlags.None } }; CASCFile.Files[apmNameHash] = new CASCFile(apmNameHash, name); if (!casc.Encoding.GetEntry(apmMD5, out EncodingEntry apmEnc)) { continue; } using (Stream apmStream = casc.OpenFile(apmEnc.Key)) { apmFiles.Add(new APMFile(name, apmStream, casc)); } } worker?.ReportProgress((int)(i / (array.Length / 100f))); } APMList = APMNames.ToArray(); APMNames.Clear(); }
public InstallHandler(BinaryReader stream, BackgroundWorkerEx worker) { worker?.ReportProgress(0, "Loading \"install\"..."); stream.ReadBytes(2); // IN byte b1 = stream.ReadByte(); byte b2 = stream.ReadByte(); short numTags = stream.ReadInt16BE(); int numFiles = stream.ReadInt32BE(); int numMaskBytes = (numFiles + 7) / 8; List <InstallTag> Tags = new List <InstallTag>(); for (int i = 0; i < numTags; i++) { InstallTag tag = new InstallTag() { Name = stream.ReadCString(), Type = stream.ReadInt16BE() }; byte[] bits = stream.ReadBytes(numMaskBytes); for (int j = 0; j < numMaskBytes; j++) { bits[j] = (byte)((bits[j] * 0x0202020202 & 0x010884422010) % 1023); } tag.Bits = new BitArray(bits); Tags.Add(tag); } for (int i = 0; i < numFiles; i++) { InstallEntry entry = new InstallEntry() { Name = stream.ReadCString(), MD5 = stream.Read <MD5Hash>(), Size = stream.ReadInt32BE() }; InstallData.Add(entry); entry.Tags = Tags.FindAll(tag => tag.Bits[i]); worker?.ReportProgress((int)((i + 1) / (float)numFiles * 100)); } }
public static void CopyToStream(this Stream src, Stream dst, long len, BackgroundWorkerEx progressReporter = null) { long done = 0; #if NET5_0_OR_GREATER Span <byte> buf = stackalloc byte[0x1000]; #else byte[] buf = new byte[0x1000]; #endif int count; do { if (progressReporter != null && progressReporter.CancellationPending) { return; } #if NET5_0_OR_GREATER count = src.Read(buf); dst.Write(buf.Slice(0, count)); #else count = src.Read(buf, 0, buf.Length); dst.Write(buf, 0, count); #endif done += count; progressReporter?.ReportProgress((int)(done / (float)len * 100)); } while (count > 0); }
public S1RootHandler(BinaryReader stream, BackgroundWorkerEx worker) { worker?.ReportProgress(0, "Loading \"root\"..."); using (StreamReader sr = new StreamReader(stream.BaseStream)) { string line; while ((line = sr.ReadLine()) != null) { string[] tokens = line.Split('|'); string file; LocaleFlags locale = LocaleFlags.All; if (tokens[0].IndexOf(':') != -1) { string[] tokens2 = tokens[0].Split(':'); file = tokens2[0]; locale = (LocaleFlags)Enum.Parse(typeof(LocaleFlags), tokens2[1]); } else { file = tokens[0]; } ulong fileHash = Hasher.ComputeHash(file); RootData[fileHash] = new RootEntry() { LocaleFlags = locale, ContentFlags = ContentFlags.None, MD5 = tokens[1].ToByteArray().ToMD5() }; CASCFile.Files[fileHash] = new CASCFile(fileHash, file); } } worker?.ReportProgress(100); }
public override void LoadListFile(string path, BackgroundWorkerEx worker = null) { worker?.ReportProgress(0, "Loading \"listfile\"..."); Logger.WriteLine("D3RootHandler: loading file names..."); int numFiles = D3RootData.Sum(p => p.Value.Count); int i = 0; foreach (var kv in D3RootData) { foreach (var e in kv.Value) { AddFile(kv.Key, e); worker?.ReportProgress((int)(++i / (float)numFiles * 100)); } } Logger.WriteLine("D3RootHandler: loaded {0} file names", i); }
public static CDNIndexHandler Initialize(CASCConfig config, BackgroundWorkerEx worker) { var handler = new CDNIndexHandler(config, worker); worker?.ReportProgress(0, "Loading \"CDN indexes\"..."); for (int i = 0; i < config.Archives.Count; i++) { string archive = config.Archives[i]; if (config.OnlineMode) { handler.DownloadIndexFile(archive, i); } else { handler.OpenIndexFile(archive, i); } worker?.ReportProgress((int)((i + 1) / (float)config.Archives.Count * 100)); } return(handler); }
public Stream OpenDataFileDirect(MD5Hash key) { var keyStr = key.ToHexString().ToLower(); worker?.ReportProgress(0, string.Format("Downloading \"{0}\" file...", keyStr)); string file = config.CDNPath + "/data/" + keyStr.Substring(0, 2) + "/" + keyStr.Substring(2, 2) + "/" + keyStr; string url = "http://" + config.CDNHost + "/" + file; Stream stream = Cache.OpenFile(file, url, false); if (stream != null) { return(stream); } return(downloader.OpenFile(url)); }
private void CopyToStream(Stream src, Stream dst, long len) { long done = 0; byte[] buf = new byte[0x1000]; int count; do { if (progressReporter != null && progressReporter.CancellationPending) { return; } count = src.Read(buf, 0, buf.Length); dst.Write(buf, 0, count); done += count; progressReporter?.ReportProgress((int)(done / (float)len * 100)); } while (count > 0); }
public Stream OpenDataFileDirect(MD5Hash key) { var keyStr = key.ToHexString().ToLower(); worker?.ReportProgress(0, string.Format("Downloading \"{0}\" file...", keyStr)); string file = config.CDNPath + "/data/" + keyStr.Substring(0, 2) + "/" + keyStr.Substring(2, 2) + "/" + keyStr; Stream stream = CDNCache.Instance.OpenFile(file, false); if (stream != null) { stream.Position = 0; MemoryStream ms = new MemoryStream(); stream.CopyTo(ms); ms.Position = 0; return(ms); } string url = "http://" + config.CDNHost + "/" + file; return(OpenFile(url)); }
public static void CopyToStream(this Stream src, Stream dst, long len, BackgroundWorkerEx progressReporter = null) { long done = 0; // TODO: Span<byte>+stackalloc byte[] buf = new byte[0x4000]; int count; do { if (progressReporter != null && progressReporter.CancellationPending) { return; } count = src.Read(buf, 0, buf.Length); dst.Write(buf, 0, count); done += count; progressReporter?.ReportProgress((int)(done / (float)len * 100)); } while (count > 0); }
public D3RootHandler(BinaryReader stream, BackgroundWorkerEx worker, CASCHandler casc) { worker?.ReportProgress(0, "Loading \"root\"..."); byte b1 = stream.ReadByte(); byte b2 = stream.ReadByte(); byte b3 = stream.ReadByte(); byte b4 = stream.ReadByte(); int count = stream.ReadInt32(); for (int j = 0; j < count; j++) { MD5Hash md5 = stream.Read <MD5Hash>(); string name = stream.ReadCString(); var entries = new List <D3RootEntry>(); D3RootData[name] = entries; if (!casc.Encoding.GetEntry(md5, out EncodingEntry enc)) { continue; } using (BinaryReader s = new BinaryReader(casc.OpenFile(enc.Key))) { uint magic = s.ReadUInt32(); int nEntries0 = s.ReadInt32(); for (int i = 0; i < nEntries0; i++) { entries.Add(D3RootEntry.Read(0, s)); } int nEntries1 = s.ReadInt32(); for (int i = 0; i < nEntries1; i++) { entries.Add(D3RootEntry.Read(1, s)); } int nNamedEntries = s.ReadInt32(); for (int i = 0; i < nNamedEntries; i++) { entries.Add(D3RootEntry.Read(2, s)); } } worker?.ReportProgress((int)((j + 1) / (float)(count + 2) * 100)); } // Parse CoreTOC.dat var coreTocEntry = D3RootData["Base"].Find(e => e.Name == "CoreTOC.dat"); casc.Encoding.GetEntry(coreTocEntry.MD5, out EncodingEntry enc1); using (var file = casc.OpenFile(enc1.Key)) tocParser = new CoreTOCParser(file); worker?.ReportProgress((int)((count + 1) / (float)(count + 2) * 100)); // Parse Packages.dat var pkgEntry = D3RootData["Base"].Find(e => e.Name == "Data_D3\\PC\\Misc\\Packages.dat"); casc.Encoding.GetEntry(pkgEntry.MD5, out EncodingEntry enc2); using (var file = casc.OpenFile(enc2.Key)) pkgParser = new PackagesParser(file); worker?.ReportProgress(100); }
public EncodingHandler(BinaryReader stream, BackgroundWorkerEx worker) { worker?.ReportProgress(0, "Loading \"encoding\"..."); stream.Skip(2); // EN byte b1 = stream.ReadByte(); byte checksumSizeA = stream.ReadByte(); byte checksumSizeB = stream.ReadByte(); ushort flagsA = stream.ReadUInt16(); ushort flagsB = stream.ReadUInt16(); int numEntriesA = stream.ReadInt32BE(); int numEntriesB = stream.ReadInt32BE(); byte b4 = stream.ReadByte(); int stringBlockSize = stream.ReadInt32BE(); stream.Skip(stringBlockSize); //string[] strings = Encoding.ASCII.GetString(stream.ReadBytes(stringBlockSize)).Split(new[] { '\0' }, StringSplitOptions.RemoveEmptyEntries); stream.Skip(numEntriesA * 32); //for (int i = 0; i < numEntriesA; ++i) //{ // byte[] firstHash = stream.ReadBytes(16); // byte[] blockHash = stream.ReadBytes(16); //} long chunkStart = stream.BaseStream.Position; for (int i = 0; i < numEntriesA; ++i) { ushort keysCount; while ((keysCount = stream.ReadUInt16()) != 0) { int fileSize = stream.ReadInt32BE(); MD5Hash md5 = stream.Read <MD5Hash>(); EncodingEntry entry = new EncodingEntry() { Size = fileSize }; // how do we handle multiple keys? for (int ki = 0; ki < keysCount; ++ki) { MD5Hash key = stream.Read <MD5Hash>(); // use first key for now if (ki == 0) { entry.Key = key; } else { Logger.WriteLine("Multiple encoding keys for MD5 {0}: {1}", md5.ToHexString(), key.ToHexString()); } } //Encodings[md5] = entry; EncodingData.Add(md5, entry); } // each chunk is 4096 bytes, and zero padding at the end long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart) % CHUNK_SIZE); if (remaining > 0) { stream.BaseStream.Position += remaining; } worker?.ReportProgress((int)((i + 1) / (float)numEntriesA * 100)); } stream.Skip(numEntriesB * 32); //for (int i = 0; i < numEntriesB; ++i) //{ // byte[] firstKey = stream.ReadBytes(16); // byte[] blockHash = stream.ReadBytes(16); //} long chunkStart2 = stream.BaseStream.Position; for (int i = 0; i < numEntriesB; ++i) { byte[] key = stream.ReadBytes(16); int stringIndex = stream.ReadInt32BE(); byte unk1 = stream.ReadByte(); int fileSize = stream.ReadInt32BE(); // each chunk is 4096 bytes, and zero padding at the end long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart2) % CHUNK_SIZE); if (remaining > 0) { stream.BaseStream.Position += remaining; } } // string block till the end of file }
public WowRootHandler(BinaryReader stream, BackgroundWorkerEx worker) { worker?.ReportProgress(0, "Loading \"root\"..."); int magic = stream.ReadInt32(); int numFilesTotal = 0, numFilesWithNameHash = 0, numFilesRead = 0; const int TSFMMagic = 0x4D465354; if (magic == TSFMMagic) { numFilesTotal = stream.ReadInt32(); numFilesWithNameHash = stream.ReadInt32(); } else { stream.BaseStream.Position -= 4; } while (stream.BaseStream.Position < stream.BaseStream.Length) { int count = stream.ReadInt32(); numFilesRead += count; ContentFlags contentFlags = (ContentFlags)stream.ReadUInt32(); LocaleFlags localeFlags = (LocaleFlags)stream.ReadUInt32(); if (localeFlags == LocaleFlags.None) { throw new InvalidDataException("block.LocaleFlags == LocaleFlags.None"); } if (contentFlags != ContentFlags.None && (contentFlags & (ContentFlags.F00000001 | ContentFlags.Windows | ContentFlags.MacOS | ContentFlags.Alternate | ContentFlags.F00020000 | ContentFlags.F00080000 | ContentFlags.F00100000 | ContentFlags.F00200000 | ContentFlags.F00400000 | ContentFlags.F02000000 | ContentFlags.NotCompressed | ContentFlags.NoNameHash | ContentFlags.F20000000)) == 0) { throw new InvalidDataException("block.ContentFlags != ContentFlags.None"); } RootEntry[] entries = new RootEntry[count]; int[] filedataIds = new int[count]; int fileDataIndex = 0; for (var i = 0; i < count; ++i) { entries[i].LocaleFlags = localeFlags; entries[i].ContentFlags = contentFlags; filedataIds[i] = fileDataIndex + stream.ReadInt32(); fileDataIndex = filedataIds[i] + 1; } //Console.WriteLine("Block: {0} {1} (size {2})", block.ContentFlags, block.LocaleFlags, count); ulong[] nameHashes = null; if (magic == TSFMMagic) { for (var i = 0; i < count; ++i) { entries[i].cKey = stream.Read <MD5Hash>(); } if ((contentFlags & ContentFlags.NoNameHash) == 0) { nameHashes = new ulong[count]; for (var i = 0; i < count; ++i) { nameHashes[i] = stream.ReadUInt64(); } } } else { nameHashes = new ulong[count]; for (var i = 0; i < count; ++i) { entries[i].cKey = stream.Read <MD5Hash>(); nameHashes[i] = stream.ReadUInt64(); } } for (var i = 0; i < count; ++i) { int fileDataId = filedataIds[i]; //Logger.WriteLine("filedataid {0}", fileDataId); ulong hash; if (nameHashes == null) { hash = FileDataHash.ComputeHash(fileDataId); } else { hash = nameHashes[i]; } RootData.Add(fileDataId, entries[i]); //Console.WriteLine("File: {0:X8} {1:X16} {2}", entries[i].FileDataId, hash, entries[i].MD5.ToHexString()); if (FileDataStore.TryGetValue(fileDataId, out ulong hash2)) { if (hash2 == hash) { // duplicate, skipping } else { Logger.WriteLine("ERROR: got miltiple hashes for filedataid {0}", fileDataId); } continue; } FileDataStore.Add(fileDataId, hash); FileDataStoreReverse.Add(hash, fileDataId); if (nameHashes != null) { // generate our custom hash as well so we can still find file without calling GetHashByFileDataId in some weird cases ulong fileDataHash = FileDataHash.ComputeHash(fileDataId); FileDataStoreReverse.Add(fileDataHash, fileDataId); } } worker?.ReportProgress((int)(stream.BaseStream.Position / (float)stream.BaseStream.Length * 100)); } }
public EncodingHandler(BinaryReader stream, BackgroundWorkerEx worker) { worker?.ReportProgress(0, "Loading \"encoding\"..."); stream.Skip(2); // EN byte Version = stream.ReadByte(); // must be 1 byte CKeyLength = stream.ReadByte(); byte EKeyLength = stream.ReadByte(); int CKeyPageSize = stream.ReadInt16BE() * 1024; // KB to bytes int EKeyPageSize = stream.ReadInt16BE() * 1024; // KB to bytes int CKeyPageCount = stream.ReadInt32BE(); int EKeyPageCount = stream.ReadInt32BE(); byte unk1 = stream.ReadByte(); // must be 0 int ESpecBlockSize = stream.ReadInt32BE(); //stream.Skip(ESpecBlockSize); string[] strings = Encoding.ASCII.GetString(stream.ReadBytes(ESpecBlockSize)).Split(new[] { '\0' }, StringSplitOptions.None); //for (int i = 0; i < strings.Length; i++) //{ // Logger.WriteLine($"ESpec {i:D6} {strings[i]}"); //} stream.Skip(CKeyPageCount * 32); //ValueTuple<MD5Hash, MD5Hash>[] cKeyPageData = new ValueTuple<MD5Hash, MD5Hash>[CKeyPageCount]; //for (int i = 0; i < CKeyPageCount; i++) //{ // MD5Hash firstHash = stream.Read<MD5Hash>(); // MD5Hash blockHash = stream.Read<MD5Hash>(); // cKeyPageData[i] = (firstHash, blockHash); //} long chunkStart = stream.BaseStream.Position; for (int i = 0; i < CKeyPageCount; i++) { byte keysCount; while ((keysCount = stream.ReadByte()) != 0) { long fileSize = stream.ReadInt40BE(); MD5Hash cKey = stream.Read <MD5Hash>(); EncodingEntry entry = new EncodingEntry() { Size = fileSize, Keys = new List <MD5Hash>(keysCount) }; // how do we handle multiple keys? for (int ki = 0; ki < keysCount; ++ki) { MD5Hash eKey = stream.Read <MD5Hash>(); entry.Keys.Add(eKey); EKeyToCKey.Add(eKey, cKey); //Logger.WriteLine($"Encoding {i:D7} {ki:D2} {cKey.ToHexString()} {eKey.ToHexString()} {fileSize}"); } EncodingData.Add(cKey, entry); } // each chunk is 4096 bytes, and zero padding at the end long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart) % CHUNK_SIZE); if (remaining > 0) { stream.BaseStream.Position += remaining; } worker?.ReportProgress((int)((i + 1) / (float)CKeyPageCount * 100)); } stream.Skip(EKeyPageCount * 32); //ValueTuple<MD5Hash, MD5Hash>[] eKeyPageData = new ValueTuple<MD5Hash, MD5Hash>[EKeyPageCount]; //for (int i = 0; i < EKeyPageCount; i++) //{ // MD5Hash firstKey = stream.Read<MD5Hash>(); // MD5Hash blockHash = stream.Read<MD5Hash>(); // eKeyPageData[i] = (firstKey, blockHash); //} long chunkStart2 = stream.BaseStream.Position; Regex regex = new Regex(@"(?<=e:\{)([0-9A-F]{16})(?=,)", RegexOptions.Compiled); for (int i = 0; i < EKeyPageCount; i++) { while (true) { // each chunk is 4096 bytes, and zero padding at the end long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart2) % CHUNK_SIZE); if (remaining < 25) { stream.BaseStream.Position += remaining; break; } MD5Hash eKey = stream.Read <MD5Hash>(); int eSpecIndex = stream.ReadInt32BE(); long fileSize = stream.ReadInt40BE(); if (eSpecIndex == -1) { stream.BaseStream.Position += remaining; break; } string eSpec = strings[eSpecIndex]; var matches = regex.Matches(eSpec); if (matches.Count != 0) { var keys = matches.Cast <Match>().Select(m => BitConverter.ToUInt64(m.Value.FromHexString(), 0)).ToList(); EncryptionData.Add(eKey, keys); //Logger.WriteLine($"Encoding {i:D7} {eKey.ToHexString()} {eSpecIndex} {fileSize} {eSpec} {string.Join(",", keys.Select(x => $"{x:X16}"))}"); } else { //Logger.WriteLine($"Encoding {i:D7} {eKey.ToHexString()} {eSpecIndex} {fileSize} {eSpec}"); } } } // string block till the end of file }
public WowRootHandler(BinaryReader stream, BackgroundWorkerEx worker) { worker?.ReportProgress(0, "Loading \"root\"..."); while (stream.BaseStream.Position < stream.BaseStream.Length) { int count = stream.ReadInt32(); ContentFlags contentFlags = (ContentFlags)stream.ReadUInt32(); LocaleFlags localeFlags = (LocaleFlags)stream.ReadUInt32(); if (localeFlags == LocaleFlags.None) { throw new Exception("block.LocaleFlags == LocaleFlags.None"); } if (contentFlags != ContentFlags.None && (contentFlags & (ContentFlags.F00000008 | ContentFlags.F00000010 | ContentFlags.LowViolence | ContentFlags.NoCompression | ContentFlags.F20000000)) == 0) { throw new Exception("block.ContentFlags != ContentFlags.None"); } RootEntry[] entries = new RootEntry[count]; int[] filedataIds = new int[count]; int fileDataIndex = 0; for (var i = 0; i < count; ++i) { entries[i].LocaleFlags = localeFlags; entries[i].ContentFlags = contentFlags; filedataIds[i] = fileDataIndex + stream.ReadInt32(); fileDataIndex = filedataIds[i] + 1; } //Console.WriteLine("Block: {0} {1} (size {2})", block.ContentFlags, block.LocaleFlags, count); for (var i = 0; i < count; ++i) { entries[i].MD5 = stream.Read <MD5Hash>(); ulong hash = stream.ReadUInt64(); RootData.Add(hash, entries[i]); //Console.WriteLine("File: {0:X8} {1:X16} {2}", entries[i].FileDataId, hash, entries[i].MD5.ToHexString()); int fileDataId = filedataIds[i]; if (FileDataStore.TryGetValue(fileDataId, out ulong hash2)) { if (hash2 == hash) { // duplicate, skipping continue; } else { Logger.WriteLine("ERROR: got miltiple hashes for filedataid {0}", fileDataId); continue; } } FileDataStore.Add(fileDataId, hash); FileDataStoreReverse.Add(hash, fileDataId); } worker?.ReportProgress((int)(stream.BaseStream.Position / (float)stream.BaseStream.Length * 100)); } }
public override void LoadListFile(string path, BackgroundWorkerEx worker = null) { CASCFile.Files.Clear(); if (LoadPreHashedListFile("listfile.bin", path, worker)) { return; } using (var _ = new PerfCounter("WowRootHandler::LoadListFile()")) { worker?.ReportProgress(0, "Loading \"listfile\"..."); if (!File.Exists(path)) { Logger.WriteLine("WowRootHandler: list file missing!"); return; } Logger.WriteLine("WowRootHandler: loading file names..."); Dictionary <string, Dictionary <ulong, string> > dirData = new Dictionary <string, Dictionary <ulong, string> >(StringComparer.OrdinalIgnoreCase) { [""] = new Dictionary <ulong, string>() }; using (var fs = new FileStream("listfile.bin", FileMode.Create)) using (var bw = new BinaryWriter(fs)) using (var fs2 = File.Open(path, FileMode.Open)) using (var sr = new StreamReader(fs2)) { string file; while ((file = sr.ReadLine()) != null) { ulong fileHash = Hasher.ComputeHash(file); // skip invalid names if (!RootData.ContainsKey(fileHash)) { Logger.WriteLine("Invalid file name: {0}", file); continue; } CASCFile.Files[fileHash] = new CASCFile(fileHash, file); int dirSepIndex = file.LastIndexOf('\\'); if (dirSepIndex >= 0) { string key = file.Substring(0, dirSepIndex); if (!dirData.ContainsKey(key)) { dirData[key] = new Dictionary <ulong, string>(); } dirData[key][fileHash] = file.Substring(dirSepIndex + 1); } else { dirData[""][fileHash] = file; } worker?.ReportProgress((int)(sr.BaseStream.Position / (float)sr.BaseStream.Length * 100)); } bw.Write(dirData.Count); // count of dirs foreach (var dir in dirData) { bw.Write(dir.Key); // dir name Logger.WriteLine(dir.Key); bw.Write(dirData[dir.Key].Count); // count of files in dir foreach (var fh in dirData[dir.Key]) { bw.Write(fh.Key); // file name hash bw.Write(fh.Value); // file name (without dir name) } } Logger.WriteLine("WowRootHandler: loaded {0} valid file names", CASCFile.Files.Count); } File.SetLastWriteTime("listfile.bin", File.GetLastWriteTime(path)); } }
public override void LoadListFile(string path, BackgroundWorkerEx worker = null) { worker?.ReportProgress(0, "Loading \"listfile\"..."); Logger.WriteLine("OWRootHandler: loading file names..."); float pkgOnePct = apmFiles.Sum(a => a.Packages.Length) / 100f; int pkgCount = 0; foreach (var apm in apmFiles) { for (int i = 0; i < apm.Packages.Length; i++) { APMPackage package = apm.Packages[i]; MD5Hash pkgIndexMD5 = package.indexContentKey; string apmName = Path.GetFileNameWithoutExtension(apm.Name); string pkgName = string.Format("{0}/package_{1:X4}_{2:X16}", apmName, i, package.packageKey); string fakeName = string.Format("{0}_index", pkgName); ulong fileHash = Hasher.ComputeHash(fakeName); Logger.WriteLine("Adding package: {0:X16} {1}", fileHash, package.indexContentKey.ToHexString()); if (_rootData.ContainsKey(fileHash)) { if (!_rootData[fileHash].baseEntry.MD5.EqualsTo(package.indexContentKey)) { Logger.WriteLine("Weird duplicate package: {0:X16} {1}", fileHash, package.indexContentKey.ToHexString()); } else { Logger.WriteLine("Duplicate package: {0:X16} {1}", fileHash, package.indexContentKey.ToHexString()); } continue; } _rootData[fileHash] = new OWRootEntry() { baseEntry = new RootEntry() { MD5 = pkgIndexMD5, LocaleFlags = LocaleFlags.All, ContentFlags = ContentFlags.None } }; CASCFile.Files[fileHash] = new CASCFile(fileHash, fakeName); PackageIndex pkgIndex = apm.Indexes[i]; fakeName = string.Format("{0}_bundle_{1:X16}", pkgName, pkgIndex.bundleKey); fileHash = Hasher.ComputeHash(fakeName); Logger.WriteLine("Adding bundle: {0:X16} {1}", fileHash, pkgIndex.bundleContentKey.ToHexString()); if (_rootData.ContainsKey(fileHash)) { if (!_rootData[fileHash].baseEntry.MD5.EqualsTo(pkgIndex.bundleContentKey)) { Logger.WriteLine("Weird duplicate bundle: {0:X16} {1}", fileHash, pkgIndex.bundleContentKey.ToHexString()); } else { Logger.WriteLine("Duplicate bundle: {0:X16} {1}", fileHash, pkgIndex.bundleContentKey.ToHexString()); } continue; } _rootData[fileHash] = new OWRootEntry() { baseEntry = new RootEntry() { MD5 = pkgIndex.bundleContentKey, LocaleFlags = LocaleFlags.All, ContentFlags = ContentFlags.None }, pkgIndex = pkgIndex }; CASCFile.Files[fileHash] = new CASCFile(fileHash, fakeName); PackageIndexRecord[] records = apm.Records[i]; for (int k = 0; k < records.Length; k++) { fakeName = string.Format("files/{0:X3}/{1:X12}.{0:X3}", KeyToTypeID(records[k].Key), records[k].Key & 0xFFFFFFFFFFFF); fileHash = Hasher.ComputeHash(fakeName); //Logger.WriteLine("Adding package record: key {0:X16} hash {1} flags {2:X8}", fileHash, records[k].contentKey.ToHexString(), records[k].flags); if (_rootData.ContainsKey(fileHash)) { if (!_rootData[fileHash].baseEntry.MD5.EqualsTo(records[k].ContentKey)) { Logger.WriteLine("Weird duplicate package record: {0:X16} {1}", fileHash, records[k].ContentKey.ToHexString()); } //else // Logger.WriteLine("Duplicate package record: {0:X16} {1}", fileHash, records[k].contentKey.ToHexString()); continue; } _rootData[fileHash] = new OWRootEntry() { baseEntry = new RootEntry() { MD5 = records[k].ContentKey, LocaleFlags = LocaleFlags.All, ContentFlags = (ContentFlags)records[k].Flags }, pkgIndex = pkgIndex, pkgIndexRec = records[k] }; CASCFile.Files[fileHash] = new CASCFile(fileHash, fakeName); } worker?.ReportProgress((int)(++pkgCount / pkgOnePct)); } } Logger.WriteLine("OWRootHandler: loaded {0} file names", _rootData.Count); }
public override void LoadListFile(string path, BackgroundWorkerEx worker = null) { //CASCFile.Files.Clear(); using (var _ = new PerfCounter("WowRootHandler::LoadListFile()")) { worker?.ReportProgress(0, "Loading \"listfile\"..."); if (!File.Exists(path)) { Logger.WriteLine("WowRootHandler: list file missing!"); return; } bool isCsv = Path.GetExtension(path) == ".csv"; Logger.WriteLine($"WowRootHandler: loading listfile {path}..."); using (var fs2 = File.Open(path, FileMode.Open)) using (var sr = new StreamReader(fs2)) { string line; char[] splitChar = isCsv ? new char[] { ';' } : new char[] { ' ' }; while ((line = sr.ReadLine()) != null) { string[] tokens = line.Split(splitChar, 2); if (tokens.Length != 2) { Logger.WriteLine($"Invalid line in listfile: {line}"); continue; } if (!int.TryParse(tokens[0], out int fileDataId)) { Logger.WriteLine($"Invalid line in listfile: {line}"); continue; } // skip invalid names if (!RootData.ContainsKey(fileDataId)) { Logger.WriteLine($"Invalid fileDataId in listfile: {line}"); continue; } string file = tokens[1]; ulong fileHash = FileDataStore[fileDataId]; if (!CASCFile.Files.ContainsKey(fileHash)) { CASCFile.Files.Add(fileHash, new CASCFile(fileHash, file)); } else { Logger.WriteLine($"Duplicate fileDataId {fileDataId} detected: {line}"); } worker?.ReportProgress((int)(sr.BaseStream.Position / (float)sr.BaseStream.Length * 100)); } } Logger.WriteLine($"WowRootHandler: loaded {CASCFile.Files.Count} valid file names"); } }
public EncodingHandler(BinaryReader stream, BackgroundWorkerEx worker) { worker?.ReportProgress(0, "Loading \"encoding\"..."); stream.Skip(2); // EN byte Version = stream.ReadByte(); // must be 1 byte CKeyLength = stream.ReadByte(); byte EKeyLength = stream.ReadByte(); int CKeyPageSize = stream.ReadInt16BE() * 1024; // KB to bytes int EKeyPageSize = stream.ReadInt16BE() * 1024; // KB to bytes int CKeyPageCount = stream.ReadInt32BE(); int EKeyPageCount = stream.ReadInt32BE(); byte unk1 = stream.ReadByte(); // must be 0 int ESpecBlockSize = stream.ReadInt32BE(); stream.Skip(ESpecBlockSize); //string[] strings = Encoding.ASCII.GetString(stream.ReadBytes(ESpecBlockSize)).Split(new[] { '\0' }, StringSplitOptions.RemoveEmptyEntries); stream.Skip(CKeyPageCount * 32); //ValueTuple<byte[], byte[]>[] aEntries = new ValueTuple<byte[], byte[]>[CKeyPageCount]; //for (int i = 0; i < CKeyPageCount; ++i) //{ // byte[] firstHash = stream.ReadBytes(16); // byte[] blockHash = stream.ReadBytes(16); // aEntries[i] = (firstHash, blockHash); //} long chunkStart = stream.BaseStream.Position; for (int i = 0; i < CKeyPageCount; ++i) { byte keysCount; while ((keysCount = stream.ReadByte()) != 0) { long fileSize = stream.ReadInt40BE(); MD5Hash cKey = stream.Read <MD5Hash>(); EncodingEntry entry = new EncodingEntry() { Size = fileSize }; // how do we handle multiple keys? for (int ki = 0; ki < keysCount; ++ki) { MD5Hash eKey = stream.Read <MD5Hash>(); // use first key for now if (ki == 0) { entry.Key = eKey; } //else // Logger.WriteLine("Multiple encoding keys for MD5 {0}: {1}", md5.ToHexString(), key.ToHexString()); //Logger.WriteLine("Encoding {0:D2} {1} {2} {3} {4}", keysCount, aEntries[i].Item1.ToHexString(), aEntries[i].Item2.ToHexString(), md5.ToHexString(), key.ToHexString()); } //Encodings[md5] = entry; EncodingData.Add(cKey, entry); } // each chunk is 4096 bytes, and zero padding at the end long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart) % CHUNK_SIZE); if (remaining > 0) { stream.BaseStream.Position += remaining; } worker?.ReportProgress((int)((i + 1) / (float)CKeyPageCount * 100)); } stream.Skip(EKeyPageCount * 32); //for (int i = 0; i < EKeyPageCount; ++i) //{ // byte[] firstKey = stream.ReadBytes(16); // byte[] blockHash = stream.ReadBytes(16); //} long chunkStart2 = stream.BaseStream.Position; for (int i = 0; i < EKeyPageCount; ++i) { byte[] eKey = stream.ReadBytes(16); int eSpecIndex = stream.ReadInt32BE(); long fileSize = stream.ReadInt40BE(); // each chunk is 4096 bytes, and zero padding at the end long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart2) % CHUNK_SIZE); if (remaining > 0) { stream.BaseStream.Position += remaining; } } // string block till the end of file //EncodingData.Dump(); }