private CDNIndexHandler(CASCConfig cascConfig, ProgressReportSlave worker, Cache cache) { _config = cascConfig; _worker = worker; _downloader = new SyncDownloader(); _cache = cache; }
protected void Read(BinaryReader reader, string name, ProgressReportSlave worker = null) { reader.BaseStream.Position = 0; uint cmfVersion = reader.ReadUInt32(); reader.BaseStream.Position = 0; // todo: the int of Header21 is converted to uint without checking if (CMFHeaderCommon.IsV22(cmfVersion)) { Header = reader.Read <CMFHeader22>().Upgrade(); } else if (cmfVersion >= 39028) { Header = reader.Read <CMFHeader21>().Upgrade(); } else { Header = reader.Read <CMFHeader20>().Upgrade(); } worker?.ReportProgress(0, $"Loading CMF {name}..."); if (Header.Magic >> 8 == ENCRYPTED_MAGIC) { using (BinaryReader decryptedReader = DecryptCMF(reader, name)) { ParseCMF(decryptedReader); } } else { ParseCMF(reader); } }
public static CDNIndexHandler Initialize(CASCConfig config, ProgressReportSlave worker, Cache cache) { CDNIndexHandler handler = new CDNIndexHandler(config, worker, cache); worker?.ReportProgress(0, "Loading \"CDN indexes\"..."); for (int i = 0; i < config.Archives.Count; i++) { string archive = config.Archives[i]; if (config.OnlineMode) { handler.DownloadIndexFile(archive, i); } else { try { handler.OpenIndexFile(archive, i); } catch { handler.DownloadIndexFile(archive, i); } } worker?.ReportProgress((int)((i + 1) / (float)config.Archives.Count * 100)); } return(handler); }
private void GatherFirstCMF(CASCHandler casc, ProgressReportSlave worker = null) { worker?.ReportProgress(0, "Rebuilding occurence list..."); int c = 0; ContentManifestFile.HashData[] data = CMF.Map.Values.ToArray(); Parallel.For(0, data.Length, new ParallelOptions { MaxDegreeOfParallelism = CASCConfig.MaxThreads }, i => { c++; if (worker != null && c % 500 == 0) { worker.ReportProgress((int)((float)c / CMF.Map.Count * 100)); } if (FirstOccurence.ContainsKey(data[i].GUID)) { return; } if (SaneChecking && !casc.EncodingHandler.HasEntry(data[i].HashKey)) { return; } FirstOccurence.TryAdd(data[i].GUID, new Types.PackageRecord { GUID = data[i].GUID, LoadHash = data[i].HashKey, Size = data[i].Size, Offset = 0, Flags = 0 }); }); }
public void Clear() { _cdnIndexData.Clear(); _cdnIndexData = null; _config = null; _worker = null; _downloader = null; }
public GUIDCollection(CASCConfig Config, CASCHandler CASC, ProgressReportSlave Slave) { this.Config = Config; this.CASC = CASC; this.Slave = Slave; long total = CASC.RootHandler.RootFiles.Count + CASC.RootHandler.APMFiles.SelectMany(x => x.FirstOccurence).LongCount(); Slave?.ReportProgress(0, "Building file tree..."); long c = 0; foreach (KeyValuePair <string, MD5Hash> entry in this.CASC.RootHandler.RootFiles.OrderBy(x => x.Key).ToArray()) { c++; Slave?.ReportProgress((int)(((float)c / (float)total) * 100)); AddEntry(entry.Key, 0, null, entry.Value, 0, 0, ContentFlags.None, LocaleFlags.None); } foreach (ApplicationPackageManifest apm in this.CASC.RootHandler.APMFiles.OrderBy(x => x.Name).ToArray()) { foreach (KeyValuePair <ulong, PackageRecord> record in apm.FirstOccurence.OrderBy(x => x.Key).ToArray()) { c++; if (c % 10000 == 0) { Slave?.ReportProgress((int)(((float)c / (float)total) * 100)); } ushort typeVal = teResourceGUID.Type(record.Key); string typeStr = typeVal.ToString("X3"); DataHelper.DataType typeData = DataHelper.GetDataType(typeVal); if (typeData != DataHelper.DataType.Unknown) { typeStr = $"{typeStr} ({typeData.ToString()})"; } AddEntry($"files/{Path.GetFileNameWithoutExtension(apm.Name)}/{typeStr}", record.Key, apm, record.Value.LoadHash, (int)record.Value.Size, (int)record.Value.Offset, record.Value.Flags, apm.Locale); } } Slave?.ReportProgress(0, "Sorting tree..."); long t = GetSize(Root); Sort(Root, 0, t); NotifyPropertyChanged(nameof(Data)); NotifyPropertyChanged(nameof(Root)); try { SelectedEntries = Data["RetailClient"]?.Files; } catch (KeyNotFoundException) { // } }
/// <summary>Read a CMF file</summary> /// <param name="name">APM name</param> /// <param name="stream">Source stream</param> /// <param name="worker">Background worker</param> public ContentManifestFile(string name, Stream stream, ProgressReportSlave worker) { //using (Stream file = File.OpenWrite(Path.GetFileName(name))) { // stream.CopyTo(file); // stream.Position = 0; //} //Entries = new ApplicationPackageManifest.Types.Entry[0]; //Map = new Dictionary<ulong, HashData>(0); //HashList = new HashData[0]; //return; using (BinaryReader reader = new BinaryReader(stream)) { Read(reader, name, worker); } }
public static LocalIndexHandler Initialize(CASCConfig config, ProgressReportSlave worker) { LocalIndexHandler handler = new LocalIndexHandler(config); List <string> idxFiles = GetIdxFiles(config); if (idxFiles.Count == 0) { throw new FileNotFoundException("idx files missing!"); } worker?.ReportProgress(0, "Loading \"local indexes\"..."); int idxIndex = 0; foreach (string idx in idxFiles) { handler.ParseIndex(idx); worker?.ReportProgress((int)(++idxIndex / (float)idxFiles.Count * 100)); } return(handler); }
public EncodingHandler(BinaryReader stream, ProgressReportSlave worker) { worker?.ReportProgress(0, "Loading \"encoding\"..."); stream.Skip(2); // EN byte b1 = stream.ReadByte(); byte checksumSizeA = stream.ReadByte(); byte checksumSizeB = stream.ReadByte(); ushort flagsA = stream.ReadUInt16(); ushort flagsB = stream.ReadUInt16(); int numEntriesA = stream.ReadInt32BE(); int numEntriesB = stream.ReadInt32BE(); byte b4 = stream.ReadByte(); int stringBlockSize = stream.ReadInt32BE(); stream.Skip(stringBlockSize); //string[] strings = Encoding.ASCII.GetString(stream.ReadBytes(stringBlockSize)).Split(new[] { '\0' }, StringSplitOptions.RemoveEmptyEntries); stream.Skip(numEntriesA * 32); //for (int i = 0; i < numEntriesA; ++i) //{ // byte[] firstHash = stream.ReadBytes(16); // byte[] blockHash = stream.ReadBytes(16); //} long chunkStart = stream.BaseStream.Position; for (int i = 0; i < numEntriesA; ++i) { ushort keysCount; while ((keysCount = stream.ReadUInt16()) != 0) { int fileSize = stream.ReadInt32BE(); MD5Hash md5 = stream.Read <MD5Hash>(); EncodingEntry entry = new EncodingEntry { Size = fileSize }; // how do we handle multiple keys? for (int ki = 0; ki < keysCount; ++ki) { MD5Hash key = stream.Read <MD5Hash>(); // use first key for now if (ki == 0) { entry.Key = key; } else { // todo: log spam //Debugger.Log(0, "CASC", $"Multiple encoding keys for MD5 {md5.ToHexString()}: {key.ToHexString()}\r\n"); } } _encodingData.Add(md5, entry); } // each chunk is 4096 bytes, and zero padding at the end long remaining = ChunkSize - (stream.BaseStream.Position - chunkStart) % ChunkSize; if (remaining > 0) { stream.BaseStream.Position += remaining; } worker?.ReportProgress((int)((i + 1) / (float)numEntriesA * 100)); } stream.Skip(numEntriesB * 32); long chunkStart2 = stream.BaseStream.Position; for (int i = 0; i < numEntriesB; ++i) { byte[] key = stream.ReadBytes(16); int stringIndex = stream.ReadInt32BE(); byte unk1 = stream.ReadByte(); int fileSize = stream.ReadInt32BE(); // each chunk is 4096 bytes, and zero padding at the end long remaining = ChunkSize - (stream.BaseStream.Position - chunkStart2) % ChunkSize; if (remaining > 0) { stream.BaseStream.Position += remaining; } } // string block till the end of file }
/// <summary>Create a new handler</summary> public static CASCHandler Open(CASCConfig config, ProgressReportSlave worker = null) { return(new CASCHandler(config, worker)); }
private CASCHandler(CASCConfig config, ProgressReportSlave worker) { Config = config; if (!config.OnlineMode) { Debugger.Log(0, "CASC", "CASCHandler: loading local indices\r\n"); using (PerfCounter _ = new PerfCounter("LocalIndexHandler.Initialize()")) { LocalIndex = LocalIndexHandler.Initialize(config, worker); } Debugger.Log(0, "CASC", $"CASCHandler: loaded {LocalIndex.Count} local indices\r\n"); } else // todo: supposed to do this? { Debugger.Log(0, "CASC", "CASCHandler: loading CDN indices\r\n"); using (PerfCounter _ = new PerfCounter("CDNIndexHandler.Initialize()")) { CDNIndex = CDNIndexHandler.Initialize(config, worker, Cache); } Debugger.Log(0, "CASC", $"CASCHandler: loaded {CDNIndex.Count} CDN indexes\r\n"); } Debugger.Log(0, "CASC", "CASCHandler: loading encoding entries\r\n"); using (PerfCounter _ = new PerfCounter("new EncodingHandler()")) { using (BinaryReader encodingReader = OpenEncodingKeyFile()) { EncodingHandler = new EncodingHandler(encodingReader, worker); } } Debugger.Log(0, "CASC", $"CASCHandler: loaded {EncodingHandler.Count} encoding entries\r\n"); Debugger.Log(0, "CASC", "CASCHandler: loading root data\r\n"); using (PerfCounter _ = new PerfCounter("new RootHandler()")) { using (BinaryReader rootReader = OpenRootKeyFile()) { RootHandler = new RootHandler(rootReader, worker, this); } } //if ((CASCConfig.LoadFlags & LoadFlags.Download) != 0) //{ // Debugger.Log(0, "CASC", "CASCHandler: loading download data\r\n"); // using (var _ = new PerfCounter("new DownloadHandler()")) // { // using (BinaryReader fs = OpenDownloadFile(EncodingHandler)) // DownloadHandler = new DownloadHandler(fs, worker); // } // Debugger.Log(0, "CASC", $"CASCHandler: loaded {EncodingHandler.Count} download data\r\n"); //} //if ((CASCConfig.LoadFlags & LoadFlags.Install) != 0) { // Debugger.Log(0, "CASC", "CASCHandler: loading install data\r\n"); // using (var _ = new PerfCounter("new InstallHandler()")) // { // using (var fs = OpenInstallFile(EncodingHandler)) // InstallHandler = new InstallHandler(fs, worker); // InstallHandler.Print(); // } // Debugger.Log(0, "CASC", $"CASCHandler: loaded {InstallHandler.Count} install data\r\n"); //} }
private void LoadPackages(CASCHandler casc, ProgressReportSlave worker) { int c = 0; Parallel.For(0, Header.PackageCount, new ParallelOptions { MaxDegreeOfParallelism = CASCConfig.MaxThreads }, i => { c++; if (c % 1000 == 0) { if (!Console.IsOutputRedirected) { Console.Out.Write($"Loading packages: {System.Math.Floor(c / (float)Header.PackageCount * 10000) / 100:F0}% ({c}/{Header.PackageCount})\r"); } worker?.ReportProgress((int)((float)c / Header.PackageCount * 100)); } Types.PackageEntry entry = PackageEntries[i]; if (!CMF.Map.ContainsKey(entry.PackageGUID)) { return; // lol? } EncodingEntry packageEncoding; if (!casc.EncodingHandler.GetEntry(CMF.Map[entry.PackageGUID].HashKey, out packageEncoding)) { return; } using (Stream packageStream = casc.OpenFile(packageEncoding.Key)) using (BinaryReader packageReader = new BinaryReader(packageStream)) { Packages[i] = packageReader.Read <Types.Package>(); if (CMFHeaderCommon.IsV22((uint)Header.Build)) // todo: hack //Packages[i].SiblingCount *= 2; { Packages[i].SiblingCount = 0; } if (Packages[i].SiblingCount > 0) { packageStream.Position = Packages[i].OffsetSiblings; PackageSiblings[i] = packageReader.ReadArray <ulong>((int)Packages[i].SiblingCount); } else { PackageSiblings[i] = new ulong[0]; } packageStream.Position = Packages[i].OffsetRecords; Types.PackageRecordRaw[] recordsRaw; using (GZipStream recordGunzipped = new GZipStream(packageStream, CompressionMode.Decompress)) using (BinaryReader recordReader = new BinaryReader(recordGunzipped)) { recordsRaw = recordReader.ReadArray <Types.PackageRecordRaw>((int)Packages[i].RecordCount); Records[i] = new Types.PackageRecord[Packages[i].RecordCount]; } for (uint j = 0; j < Packages[i].RecordCount; ++j) { Types.PackageRecordRaw rawRecord = recordsRaw[j]; ContentManifestFile.HashData recordCMF = CMF.Map[rawRecord.GUID]; Types.PackageRecord record = new Types.PackageRecord { GUID = rawRecord.GUID, Flags = rawRecord.Flags, Offset = rawRecord.Offset }; if (record.Flags.HasFlag(ContentFlags.Bundle)) { record.LoadHash = CMF.Map[Packages[i].BundleGUID].HashKey; } else { if (CMF.Map.ContainsKey(record.GUID)) { record.LoadHash = recordCMF.HashKey; } } record.Size = recordCMF.Size; if (!FirstOccurence.ContainsKey(record.GUID)) { FirstOccurence[record.GUID] = record; } Records[i][j] = record; } } }); }
public void Load(string name, MD5Hash cmfhash, Stream stream, CASCHandler casc, string cmfname, LocaleFlags locale, ProgressReportSlave worker = null) { Locale = locale; Name = name; CMFHash = cmfhash; CMFName = Path.GetFileName(cmfname); //using (Stream file = File.OpenWrite(Path.GetFileName(name))) { // stream.CopyTo(file); // stream.Position = 0; //} if (!casc.EncodingHandler.GetEntry(cmfhash, out EncodingEntry cmfEncoding)) { return; } if (!casc.Config.LoadContentManifest) { return; } using (Stream cmfStream = casc.OpenFile(cmfEncoding.Key)) { CMF = new ContentManifestFile(CMFName, cmfStream, worker); } FirstOccurence = new ConcurrentDictionary <ulong, Types.PackageRecord>(Environment.ProcessorCount + 2, CMF.Map.Count); using (BinaryReader reader = new BinaryReader(stream)) { ulong build = reader.ReadUInt64(); reader.BaseStream.Position = 0; if (CMFHeaderCommon.IsV22((uint)build)) { Header = reader.Read <Types.Header22>().Upgrade(); } else { Header = reader.Read <Types.Header>(); } if (CMFHeaderCommon.IsV22((uint)Header.Build)) { Entries = reader.ReadArray <Types.Entry>((int)Header.EntryCount); PackageEntries = reader.ReadArray <Types.PackageEntry>((int)Header.PackageCount); } else { Entries = reader.ReadArray <Types.Entry21>((int)Header.EntryCount).Select(x => x.GetEntry()).ToArray(); PackageEntries = reader.ReadArray <Types.PackageEntry21>((int)Header.PackageCount).Select(x => x.GetPackage()).ToArray(); } if (CASCHandler.Cache.CacheAPM && CacheFileExists(Header.Build)) { worker?.ReportProgress(0, "Loading cached data..."); try { if (LoadCache(Header.Build)) // if cache is invalid, we'll regenerate { GatherFirstCMF(casc, worker); return; } } catch { TankLib.Helpers.Logger.Error("CASC", $"Failed to load APM Cache {Path.GetFileName(CacheFile(Header.Build))}"); File.Delete(CacheFile(Header.Build)); } } Packages = new Types.Package[Header.PackageCount]; Records = new Types.PackageRecord[Header.PackageCount][]; PackageSiblings = new ulong[Header.PackageCount][]; worker?.ReportProgress(0, $"Loading {Name} packages"); try { LoadPackages(casc, worker); } catch (AggregateException e) { // return nice exception to RootHandler if (e.InnerException != null) { throw e.InnerException; } } } if (!Console.IsOutputRedirected) { Console.Write(new string(' ', Console.WindowWidth - 1) + "\r"); } if (CASCHandler.Cache.CacheAPM) { TankLib.Helpers.Logger.Debug("CASC", $"Caching APM {name}"); worker?.ReportProgress(0, "Caching data..."); SaveCache(Header.Build); } GatherFirstCMF(casc, worker); }
public RootHandler(BinaryReader stream, ProgressReportSlave worker, CASCHandler casc) { worker?.ReportProgress(0, "Loading APM data..."); string str = Encoding.ASCII.GetString(stream.ReadBytes((int)stream.BaseStream.Length)); string[] array = str.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries); List <string> components = array[0].Substring(1).ToUpper().Split('|').ToList(); components = components.Select(c => c.Split('!')[0]).ToList(); int nameComponentIdx = components.IndexOf("FILENAME"); if (nameComponentIdx == -1) { nameComponentIdx = 0; } int md5ComponentIdx = components.IndexOf("MD5"); if (md5ComponentIdx == -1) { md5ComponentIdx = 1; } components.Clear(); Dictionary <string, MD5Hash> cmfHashes = new Dictionary <string, MD5Hash>(); for (int i = 1; i < array.Length; i++) { string[] filedata = array[i].Split('|'); string name = filedata[nameComponentIdx]; MD5Hash md5 = filedata[md5ComponentIdx].ToByteArray().ToMD5(); RootFiles[name] = md5; if (Path.GetExtension(name) != ".cmf" || !name.Contains("RDEV")) { continue; } if (!IsValidLanguage(casc.Config, name)) { continue; } if (!casc.EncodingHandler.GetEntry(md5, out _)) { continue; } cmfHashes.Add(name, md5); } LoadedAPMWithoutErrors = true; for (int i = 1; i < array.Length; i++) { string[] filedata = array[i].Split('|'); string name = filedata[nameComponentIdx]; if (Path.GetExtension(name) == ".apm") { MD5Hash apmMD5 = filedata[md5ComponentIdx].ToByteArray().ToMD5(); LocaleFlags apmLang = HeurFigureLangFromName(Path.GetFileNameWithoutExtension(name)); if (!name.Contains("RDEV")) { continue; } if (!IsValidLanguage(casc.Config, name)) { continue; } if (!casc.EncodingHandler.GetEntry(apmMD5, out EncodingEntry apmEnc)) { continue; } MD5Hash cmf; string cmfname = $"{Path.GetDirectoryName(name)}/{Path.GetFileNameWithoutExtension(name)}.cmf"; if (cmfHashes.ContainsKey(cmfname)) { cmfHashes.TryGetValue(cmfname, out cmf); } if (casc.Config.LoadPackageManifest) { using (Stream apmStream = casc.OpenFile(apmEnc.Key)) { ApplicationPackageManifest apm = new ApplicationPackageManifest(); try { TankLib.Helpers.Logger.Info("CASC", $"Loading APM {Path.GetFileNameWithoutExtension(name)}"); worker?.ReportProgress(0, $"Loading APM {name}..."); apm.Load(name, cmf, apmStream, casc, cmfname, apmLang, worker); } catch (CryptographicException) { LoadedAPMWithoutErrors = false; if (!casc.Config.APMFailSilent) { worker?.ReportProgress(0, "CMF decryption failed"); TankLib.Helpers.Logger.Error("CASC", "Fatal - CMF deryption failed. Please update DataTool."); Debugger.Log(0, "CASC", $"RootHandler: CMF decryption procedure outdated, unable to parse {name}\r\n"); if (Debugger.IsAttached) { Debugger.Break(); } Environment.Exit(0x636D6614); //Logger.GracefulExit(0x636D6614); } } catch (LocalIndexMissingException) { // something doesn't exist for this language, we can't load continue; } APMFiles.Add(apm); } } } worker?.ReportProgress((int)(i / (array.Length / 100f))); } }