public ReportProgress ( int percentProgress ) : void | ||
percentProgress | int | |
return | void |
public OwRootHandler(BinaryReader stream, BackgroundWorkerEx worker, CASCHandler casc) { worker?.ReportProgress(0, "Loading \"root\"..."); string str = Encoding.ASCII.GetString(stream.ReadBytes((int)stream.BaseStream.Length)); string[] array = str.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries); List <string> APMNames = new List <string>(); for (int i = 1; i < array.Length; i++) { string[] filedata = array[i].Split('|'); string name = filedata[4]; if (Path.GetExtension(name) == ".apm" && name.Contains("RDEV")) { APMNames.Add(Path.GetFileNameWithoutExtension(name)); if (!name.Contains("L" + LanguageScan)) { continue; } // add apm file for dev purposes ulong apmNameHash = Hasher.ComputeHash(name); MD5Hash apmMD5 = filedata[0].ToByteArray().ToMD5(); _rootData[apmNameHash] = new OWRootEntry() { baseEntry = new RootEntry() { MD5 = apmMD5, LocaleFlags = LocaleFlags.All, ContentFlags = ContentFlags.None } }; CASCFile.FileNames[apmNameHash] = name; EncodingEntry apmEnc; if (!casc.Encoding.GetEntry(apmMD5, out apmEnc)) { continue; } using (Stream apmStream = casc.OpenFile(apmEnc.Key)) { apmFiles.Add(new APMFile(name, apmStream, casc)); } } worker?.ReportProgress((int)(i / (array.Length / 100f))); } APMList = APMNames.ToArray(); APMNames.Clear(); }
public DownloadHandler(BinaryReader stream, BackgroundWorkerEx worker) { worker?.ReportProgress(0, "Loading \"download\"..."); stream.Skip(2); // DL byte b1 = stream.ReadByte(); byte b2 = stream.ReadByte(); byte b3 = stream.ReadByte(); int numFiles = stream.ReadInt32BE(); short numTags = stream.ReadInt16BE(); int numMaskBytes = (numFiles + 7) / 8; for (int i = 0; i < numFiles; i++) { MD5Hash key = stream.Read <MD5Hash>(); //byte[] unk = stream.ReadBytes(0xA); stream.Skip(0xA); //var entry = new DownloadEntry() { Index = i, Unk = unk }; var entry = new DownloadEntry() { Index = i }; DownloadData.Add(key, entry); worker?.ReportProgress((int)((i + 1) / (float)numFiles * 100)); } for (int i = 0; i < numTags; i++) { DownloadTag tag = new DownloadTag(); string name = stream.ReadCString(); tag.Type = stream.ReadInt16BE(); byte[] bits = stream.ReadBytes(numMaskBytes); for (int j = 0; j < numMaskBytes; j++) { bits[j] = (byte)((bits[j] * 0x0202020202 & 0x010884422010) % 1023); } tag.Bits = new BitArray(bits); Tags.Add(name, tag); } }
public InstallHandler(BinaryReader stream, BackgroundWorkerEx worker) { worker?.ReportProgress(0, "Loading \"install\"..."); stream.ReadBytes(2); // IN byte b1 = stream.ReadByte(); byte b2 = stream.ReadByte(); short numTags = stream.ReadInt16BE(); int numFiles = stream.ReadInt32BE(); int numMaskBytes = (numFiles + 7) / 8; List <InstallTag> Tags = new List <InstallTag>(); for (int i = 0; i < numTags; i++) { InstallTag tag = new InstallTag() { Name = stream.ReadCString(), Type = stream.ReadInt16BE() }; byte[] bits = stream.ReadBytes(numMaskBytes); for (int j = 0; j < numMaskBytes; j++) { bits[j] = (byte)((bits[j] * 0x0202020202 & 0x010884422010) % 1023); } tag.Bits = new BitArray(bits); Tags.Add(tag); } for (int i = 0; i < numFiles; i++) { InstallEntry entry = new InstallEntry() { Name = stream.ReadCString(), MD5 = stream.Read <MD5Hash>(), Size = stream.ReadInt32BE() }; InstallData.Add(entry); entry.Tags = Tags.FindAll(tag => tag.Bits[i]); worker?.ReportProgress((int)((i + 1) / (float)numFiles * 100)); } }
public S1RootHandler(BinaryReader stream, BackgroundWorkerEx worker) { worker?.ReportProgress(0, "Loading \"root\"..."); using (StreamReader sr = new StreamReader(stream.BaseStream)) { string line; while ((line = sr.ReadLine()) != null) { string[] tokens = line.Split('|'); string file; LocaleFlags locale = LocaleFlags.All; if (tokens[0].IndexOf(':') != -1) { string[] tokens2 = tokens[0].Split(':'); file = tokens2[0]; locale = (LocaleFlags)Enum.Parse(typeof(LocaleFlags), tokens2[1]); } else { file = tokens[0]; } ulong fileHash = Hasher.ComputeHash(file); RootData[fileHash] = new RootEntry() { LocaleFlags = locale, ContentFlags = ContentFlags.None, MD5 = tokens[1].ToByteArray().ToMD5() }; CASCFile.FileNames[fileHash] = file; } } worker?.ReportProgress(100); }
public override void LoadListFile(string path, BackgroundWorkerEx worker = null) { worker?.ReportProgress(0, "Loading \"listfile\"..."); Logger.WriteLine("D3RootHandler: loading file names..."); int numFiles = D3RootData.Sum(p => p.Value.Count); int i = 0; foreach (var kv in D3RootData) { foreach (var e in kv.Value) { AddFile(kv.Key, e); worker?.ReportProgress((int)(++i / (float)numFiles * 100)); } } Logger.WriteLine("D3RootHandler: loaded {0} file names", i); }
public static CDNIndexHandler Initialize(CASCConfig config, BackgroundWorkerEx worker) { var handler = new CDNIndexHandler(config, worker); worker?.ReportProgress(0, "Loading \"CDN indexes\"..."); for (int i = 0; i < config.Archives.Count; i++) { string archive = config.Archives[i]; if (config.OnlineMode) { handler.DownloadIndexFile(archive, i); } else { handler.OpenIndexFile(archive, i); } worker?.ReportProgress((int)((i + 1) / (float)config.Archives.Count * 100)); } return(handler); }
public Stream OpenDataFileDirect(byte[] key) { var keyStr = key.ToHexString().ToLower(); worker?.ReportProgress(0, string.Format("Downloading \"{0}\" file...", keyStr)); string file = CASCConfig.CDNPath + "/data/" + keyStr.Substring(0, 2) + "/" + keyStr.Substring(2, 2) + "/" + keyStr; string url = "http://" + CASCConfig.CDNHost + "/" + file; Stream stream = Cache.OpenFile(file, url, false); if (stream != null) { return(stream); } return(downloader.OpenFile(url)); }
private void CopyToStream(Stream src, Stream dst, long len) { long done = 0; byte[] buf = new byte[0x1000]; int count; do { if (progressReporter != null && progressReporter.CancellationPending) { return; } count = src.Read(buf, 0, buf.Length); dst.Write(buf, 0, count); done += count; progressReporter?.ReportProgress((int)(done / (float)len * 100)); } while (count > 0); }
public override void LoadListFile(string path, BackgroundWorkerEx worker = null) { worker?.ReportProgress(0, "Loading \"listfile\"..."); Logger.WriteLine("OWRootHandler: loading file names..."); float pkgOnePct = apmFiles.Sum(a => a.Packages.Length) / 100f; int pkgCount = 0; foreach (var apm in apmFiles) { for (int i = 0; i < apm.Packages.Length; i++) { APMPackage package = apm.Packages[i]; MD5Hash pkgIndexMD5 = package.indexContentKey; string apmName = Path.GetFileNameWithoutExtension(apm.Name); string pkgName = string.Format("{0}/package_{1:X4}_{2:X16}", apmName, i, package.packageKey); string fakeName = string.Format("{0}_index", pkgName); ulong fileHash = Hasher.ComputeHash(fakeName); Logger.WriteLine("Adding package: {0:X16} {1}", fileHash, package.indexContentKey.ToHexString()); if (_rootData.ContainsKey(fileHash)) { if (!_rootData[fileHash].baseEntry.MD5.EqualsTo(package.indexContentKey)) { Logger.WriteLine("Weird duplicate package: {0:X16} {1}", fileHash, package.indexContentKey.ToHexString()); } else { Logger.WriteLine("Duplicate package: {0:X16} {1}", fileHash, package.indexContentKey.ToHexString()); } continue; } _rootData[fileHash] = new OWRootEntry() { baseEntry = new RootEntry() { MD5 = pkgIndexMD5, LocaleFlags = LocaleFlags.All, ContentFlags = ContentFlags.None } }; CASCFile.Files[fileHash] = new CASCFile(fileHash, fakeName); PackageIndex pkgIndex = apm.Indexes[i]; fakeName = string.Format("{0}_bundle_{1:X16}", pkgName, pkgIndex.bundleKey); fileHash = Hasher.ComputeHash(fakeName); Logger.WriteLine("Adding bundle: {0:X16} {1}", fileHash, pkgIndex.bundleContentKey.ToHexString()); if (_rootData.ContainsKey(fileHash)) { if (!_rootData[fileHash].baseEntry.MD5.EqualsTo(pkgIndex.bundleContentKey)) { Logger.WriteLine("Weird duplicate bundle: {0:X16} {1}", fileHash, pkgIndex.bundleContentKey.ToHexString()); } else { Logger.WriteLine("Duplicate bundle: {0:X16} {1}", fileHash, pkgIndex.bundleContentKey.ToHexString()); } continue; } _rootData[fileHash] = new OWRootEntry() { baseEntry = new RootEntry() { MD5 = pkgIndex.bundleContentKey, LocaleFlags = LocaleFlags.All, ContentFlags = ContentFlags.None }, pkgIndex = pkgIndex }; CASCFile.Files[fileHash] = new CASCFile(fileHash, fakeName); PackageIndexRecord[] records = apm.Records[i]; for (int k = 0; k < records.Length; k++) { fakeName = string.Format("files/{0:X3}/{1:X12}.{0:X3}", KeyToTypeID(records[k].Key), records[k].Key & 0xFFFFFFFFFFFF); fileHash = Hasher.ComputeHash(fakeName); //Logger.WriteLine("Adding package record: key {0:X16} hash {1} flags {2:X8}", fileHash, records[k].contentKey.ToHexString(), records[k].flags); if (_rootData.ContainsKey(fileHash)) { if (!_rootData[fileHash].baseEntry.MD5.EqualsTo(records[k].ContentKey)) { Logger.WriteLine("Weird duplicate package record: {0:X16} {1}", fileHash, records[k].ContentKey.ToHexString()); } //else // Logger.WriteLine("Duplicate package record: {0:X16} {1}", fileHash, records[k].contentKey.ToHexString()); continue; } _rootData[fileHash] = new OWRootEntry() { baseEntry = new RootEntry() { MD5 = records[k].ContentKey, LocaleFlags = LocaleFlags.All, ContentFlags = (ContentFlags)records[k].Flags }, pkgIndex = pkgIndex, pkgIndexRec = records[k] }; CASCFile.Files[fileHash] = new CASCFile(fileHash, fakeName); } worker?.ReportProgress((int)(++pkgCount / pkgOnePct)); } } Logger.WriteLine("OWRootHandler: loaded {0} file names", _rootData.Count); }
public EncodingHandler(BinaryReader stream, BackgroundWorkerEx worker) { worker?.ReportProgress(0, "Loading \"encoding\"..."); stream.Skip(2); // EN byte b1 = stream.ReadByte(); byte checksumSizeA = stream.ReadByte(); byte checksumSizeB = stream.ReadByte(); ushort flagsA = stream.ReadUInt16(); ushort flagsB = stream.ReadUInt16(); int numEntriesA = stream.ReadInt32BE(); int numEntriesB = stream.ReadInt32BE(); byte b4 = stream.ReadByte(); int stringBlockSize = stream.ReadInt32BE(); stream.Skip(stringBlockSize); //string[] strings = Encoding.ASCII.GetString(stream.ReadBytes(stringBlockSize)).Split(new[] { '\0' }, StringSplitOptions.RemoveEmptyEntries); stream.Skip(numEntriesA * 32); //for (int i = 0; i < numEntriesA; ++i) //{ // byte[] firstHash = stream.ReadBytes(16); // byte[] blockHash = stream.ReadBytes(16); //} long chunkStart = stream.BaseStream.Position; for (int i = 0; i < numEntriesA; ++i) { ushort keysCount; while ((keysCount = stream.ReadUInt16()) != 0) { int fileSize = stream.ReadInt32BE(); byte[] md5 = stream.ReadBytes(16); EncodingEntry entry = new EncodingEntry(); entry.Size = fileSize; // how do we handle multiple keys? for (int ki = 0; ki < keysCount; ++ki) { byte[] key = stream.ReadBytes(16); // use first key for now if (ki == 0) { entry.Key = key; } else { Logger.WriteLine("Multiple encoding keys for MD5 {0}: {1}", md5.ToHexString(), key.ToHexString()); } } //Encodings[md5] = entry; EncodingData.Add(md5, entry); } // each chunk is 4096 bytes, and zero padding at the end long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart) % CHUNK_SIZE); if (remaining > 0) { stream.BaseStream.Position += remaining; } worker?.ReportProgress((int)((i + 1) / (float)numEntriesA * 100)); } stream.Skip(numEntriesB * 32); //for (int i = 0; i < numEntriesB; ++i) //{ // byte[] firstKey = stream.ReadBytes(16); // byte[] blockHash = stream.ReadBytes(16); //} long chunkStart2 = stream.BaseStream.Position; for (int i = 0; i < numEntriesB; ++i) { byte[] key = stream.ReadBytes(16); int stringIndex = stream.ReadInt32BE(); byte unk1 = stream.ReadByte(); int fileSize = stream.ReadInt32BE(); // each chunk is 4096 bytes, and zero padding at the end long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart2) % CHUNK_SIZE); if (remaining > 0) { stream.BaseStream.Position += remaining; } } // string block till the end of file }
public WowRootHandler(BinaryReader stream, BackgroundWorkerEx worker) { worker?.ReportProgress(0, "Loading \"root\"..."); while (stream.BaseStream.Position < stream.BaseStream.Length) { int count = stream.ReadInt32(); ContentFlags contentFlags = (ContentFlags)stream.ReadUInt32(); LocaleFlags localeFlags = (LocaleFlags)stream.ReadUInt32(); if (localeFlags == LocaleFlags.None) { throw new Exception("block.LocaleFlags == LocaleFlags.None"); } if (contentFlags != ContentFlags.None && (contentFlags & (ContentFlags.F00000008 | ContentFlags.F00000010 | ContentFlags.LowViolence | ContentFlags.NoCompression | ContentFlags.F20000000)) == 0) { throw new Exception("block.ContentFlags != ContentFlags.None"); } RootEntry[] entries = new RootEntry[count]; int[] filedataIds = new int[count]; int fileDataIndex = 0; for (var i = 0; i < count; ++i) { entries[i].LocaleFlags = localeFlags; entries[i].ContentFlags = contentFlags; filedataIds[i] = fileDataIndex + stream.ReadInt32(); fileDataIndex = filedataIds[i] + 1; } //Console.WriteLine("Block: {0} {1} (size {2})", block.ContentFlags, block.LocaleFlags, count); for (var i = 0; i < count; ++i) { entries[i].MD5 = stream.Read <MD5Hash>(); ulong hash = stream.ReadUInt64(); RootData.Add(hash, entries[i]); //Console.WriteLine("File: {0:X8} {1:X16} {2}", entries[i].FileDataId, hash, entries[i].MD5.ToHexString()); int fileDataId = filedataIds[i]; if (FileDataStore.TryGetValue(fileDataId, out ulong hash2)) { if (hash2 == hash) { // duplicate, skipping continue; } else { Logger.WriteLine("ERROR: got miltiple hashes for filedataid {0}", fileDataId); continue; } } FileDataStore.Add(fileDataId, hash); FileDataStoreReverse.Add(hash, fileDataId); } worker?.ReportProgress((int)(stream.BaseStream.Position / (float)stream.BaseStream.Length * 100)); } }
public override void LoadListFile(string path, BackgroundWorkerEx worker = null) { if (LoadPreHashedListFile("listfile.bin", path, worker)) { return; } using (var _ = new PerfCounter("WowRootHandler::LoadListFile()")) { worker?.ReportProgress(0, "Loading \"listfile\"..."); if (!File.Exists(path)) { Logger.WriteLine("WowRootHandler: list file missing!"); return; } Logger.WriteLine("WowRootHandler: loading file names..."); Dictionary <string, Dictionary <ulong, string> > dirData = new Dictionary <string, Dictionary <ulong, string> >(StringComparer.OrdinalIgnoreCase) { [""] = new Dictionary <ulong, string>() }; using (var fs = new FileStream("listfile.bin", FileMode.Create)) using (var bw = new BinaryWriter(fs)) using (var fs2 = File.Open(path, FileMode.Open)) using (var sr = new StreamReader(fs2)) { string file; while ((file = sr.ReadLine()) != null) { ulong fileHash = Hasher.ComputeHash(file); // skip invalid names if (!RootData.ContainsKey(fileHash)) { Logger.WriteLine("Invalid file name: {0}", file); continue; } CASCFile.Files[fileHash] = new CASCFile(fileHash, file); int dirSepIndex = file.LastIndexOf('\\'); if (dirSepIndex >= 0) { string key = file.Substring(0, dirSepIndex); if (!dirData.ContainsKey(key)) { dirData[key] = new Dictionary <ulong, string>(); } dirData[key][fileHash] = file.Substring(dirSepIndex + 1); } else { dirData[""][fileHash] = file; } worker?.ReportProgress((int)(sr.BaseStream.Position / (float)sr.BaseStream.Length * 100)); } bw.Write(dirData.Count); // count of dirs foreach (var dir in dirData) { bw.Write(dir.Key); // dir name Logger.WriteLine(dir.Key); bw.Write(dirData[dir.Key].Count); // count of files in dir foreach (var fh in dirData[dir.Key]) { bw.Write(fh.Key); // file name hash bw.Write(fh.Value); // file name (without dir name) } } Logger.WriteLine("WowRootHandler: loaded {0} valid file names", CASCFile.Files.Count); } File.SetLastWriteTime("listfile.bin", File.GetLastWriteTime(path)); } }
public D3RootHandler(BinaryReader stream, BackgroundWorkerEx worker, CASCHandler casc) { worker?.ReportProgress(0, "Loading \"root\"..."); byte b1 = stream.ReadByte(); byte b2 = stream.ReadByte(); byte b3 = stream.ReadByte(); byte b4 = stream.ReadByte(); int count = stream.ReadInt32(); for (int j = 0; j < count; j++) { byte[] md5 = stream.ReadBytes(16); string name = stream.ReadCString(); var entries = new List <D3RootEntry>(); D3RootData[name] = entries; EncodingEntry enc = casc.Encoding.GetEntry(md5); using (BinaryReader s = new BinaryReader(casc.OpenFile(enc.Key))) { if (s != null) { uint magic = s.ReadUInt32(); int nEntries0 = s.ReadInt32(); for (int i = 0; i < nEntries0; i++) { entries.Add(D3RootEntry.Read(0, s)); } int nEntries1 = s.ReadInt32(); for (int i = 0; i < nEntries1; i++) { entries.Add(D3RootEntry.Read(1, s)); } int nNamedEntries = s.ReadInt32(); for (int i = 0; i < nNamedEntries; i++) { entries.Add(D3RootEntry.Read(2, s)); } } } worker?.ReportProgress((int)((j + 1) / (float)(count + 2) * 100)); } // Parse CoreTOC.dat var coreTocEntry = D3RootData["Base"].Find(e => e.Name == "CoreTOC.dat"); EncodingEntry enc1 = casc.Encoding.GetEntry(coreTocEntry.MD5); using (var file = casc.OpenFile(enc1.Key)) tocParser = new CoreTOCParser(file); worker?.ReportProgress((int)((count + 1) / (float)(count + 2) * 100)); // Parse Packages.dat var pkgEntry = D3RootData["Base"].Find(e => e.Name == "Data_D3\\PC\\Misc\\Packages.dat"); EncodingEntry enc2 = casc.Encoding.GetEntry(pkgEntry.MD5); using (var file = casc.OpenFile(enc2.Key)) pkgParser = new PackagesParser(file); worker?.ReportProgress(100); }
public OWRootHandler(BinaryReader stream, BackgroundWorkerEx worker, CASCHandler casc) { worker?.ReportProgress(0, "Loading \"root\"..."); string str = Encoding.ASCII.GetString(stream.ReadBytes((int)stream.BaseStream.Length)); string[] array = str.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries); // need to figure out what to do with those apm files for (int i = 1; i < array.Length; i++) { string[] filedata = array[i].Split('|'); if (Path.GetExtension(filedata[2]) == ".apm") { // add apm file for dev purposes ulong fileHash1 = Hasher.ComputeHash(filedata[2]); RootData[fileHash1] = new RootEntry() { MD5 = filedata[0].ToByteArray(), Block = RootBlock.Empty }; CASCFile.FileNames[fileHash1] = filedata[2]; // add files listed in apm file byte[] md5 = filedata[0].ToByteArray(); EncodingEntry enc = casc.Encoding.GetEntry(md5); using (BinaryReader s = new BinaryReader(casc.OpenFile(enc.Key))) { if (s != null) { // still need to figure out complete apm structure // at start of file there's a lot of data that is same in all apm files s.BaseStream.Position = 0xC; uint count = s.ReadUInt32(); s.BaseStream.Position = 0x894; // size of each entry seems to be 0x48 bytes (0x2C bytes unk data; int size; ulong unk; byte[16] md5) for (int j = 0; j < count; j++) { s.BaseStream.Position += 0x2C; // skip unknown int size = s.ReadInt32(); // size (matches size in encoding file) s.BaseStream.Position += 8; // skip unknown byte[] md5_2 = s.ReadBytes(16); EncodingEntry enc2 = casc.Encoding.GetEntry(md5_2); if (enc2 == null) { throw new Exception("enc2 == null"); } string fakeName = Path.GetFileNameWithoutExtension(filedata[2]) + "/" + md5_2.ToHexString(); ulong fileHash = Hasher.ComputeHash(fakeName); RootData[fileHash] = new RootEntry() { MD5 = md5_2, Block = RootBlock.Empty }; CASCFile.FileNames[fileHash] = fakeName; } } } } } int current = 0; Func <string, LocaleFlags> tag2locale = (s) => { LocaleFlags locale; if (Enum.TryParse(s, out locale)) { return(locale); } return(LocaleFlags.All); }; foreach (var entry in casc.Encoding.Entries) { DownloadEntry dl = casc.Download.GetEntry(entry.Value.Key); if (dl != null) { string fakeName = "unknown" + "/" + entry.Key[0].ToString("X2") + "/" + entry.Key.ToHexString(); var locales = dl.Tags.Where(tag => tag.Value.Type == 4).Select(tag => tag2locale(tag.Key)); LocaleFlags locale = LocaleFlags.None; foreach (var loc in locales) { locale |= loc; } ulong fileHash = Hasher.ComputeHash(fakeName); RootData.Add(fileHash, new RootEntry() { MD5 = entry.Key, Block = new RootBlock() { LocaleFlags = locale } }); CASCFile.FileNames[fileHash] = fakeName; } worker?.ReportProgress((int)(++current / (float)casc.Encoding.Count * 100)); } }