public async static Task <byte[]> RetrieveFileBytes(MD5Hash target) { var targetString = target.ToHexString().ToLower(); var unarchivedName = Path.Combine(CDNCache.cacheDir, "tpr/wow", "data", targetString[0] + "" + targetString[1], targetString[2] + "" + targetString[3], targetString); if (File.Exists(unarchivedName)) { return(BLTE.Parse(await File.ReadAllBytesAsync(unarchivedName))); } if (!indexDictionary.TryGetValue(target, out IndexEntry entry)) { throw new Exception("Unable to find file in archives. File is not available!?"); } var index = indexNames[(int)entry.indexID].ToHexString().ToLower(); var archiveName = Path.Combine(CDNCache.cacheDir, "tpr/wow", "data", index[0] + "" + index[1], index[2] + "" + index[3], index); if (!File.Exists(archiveName)) { Logger.WriteLine("Unable to find archive " + index + " on disk, attempting to stream from CDN instead"); try { return(BLTE.Parse(await CDNCache.Get("data", index, true, false, entry.size, entry.offset))); } catch (Exception e) { Console.WriteLine(e.Message); } } else { using (var stream = new FileStream(archiveName, FileMode.Open, FileAccess.Read, FileShare.Read)) { stream.Seek(entry.offset, SeekOrigin.Begin); try { if (entry.offset > stream.Length || entry.offset + entry.size > stream.Length) { throw new Exception("File is beyond archive length, incomplete archive!"); } var archiveBytes = new byte[entry.size]; await stream.ReadAsync(archiveBytes, 0, (int)entry.size); return(BLTE.Parse(archiveBytes)); } catch (Exception e) { Console.WriteLine(e.Message); } } } return(new byte[0]); }
public static byte[] RetrieveFileBytes(string target, bool raw = false, string cdndir = "tpr/wow") { var unarchivedName = Path.Combine(CDN.cacheDir, cdndir, "data", target[0] + "" + target[1], target[2] + "" + target[3], target); if (File.Exists(unarchivedName)) { if (!raw) { return(BLTE.Parse(File.ReadAllBytes(unarchivedName))); } else { return(File.ReadAllBytes(unarchivedName)); } } if (!indexDictionary.TryGetValue(target.ToByteArray().ToMD5(), out IndexEntry entry)) { throw new Exception("Unable to find file in archives. File is not available!?"); } var index = indexNames[(int)entry.indexID].ToHexString().ToLower(); var archiveName = Path.Combine(CDN.cacheDir, cdndir, "data", index[0] + "" + index[1], index[2] + "" + index[3], index); if (!File.Exists(archiveName)) { throw new FileNotFoundException("Unable to find archive " + index + " on disk!"); } using (var stream = new FileStream(archiveName, FileMode.Open, FileAccess.Read, FileShare.Read)) using (var bin = new BinaryReader(stream)) { bin.BaseStream.Position = entry.offset; try { if (!raw) { return(BLTE.Parse(bin.ReadBytes((int)entry.size))); } else { return(bin.ReadBytes((int)entry.size)); } } catch (Exception e) { Console.WriteLine(e.Message); } } return(new byte[0]); }
public static async Task <InstallFile> GetInstall(string hash, bool parseIt = false) { var install = new InstallFile(); byte[] content = await CDNCache.Get("data", hash); if (!parseIt) { return(install); } using (BinaryReader bin = new BinaryReader(new MemoryStream(BLTE.Parse(content)))) { if (Encoding.UTF8.GetString(bin.ReadBytes(2)) != "IN") { throw new Exception("Error while parsing install file. Did BLTE header size change?"); } bin.ReadByte(); install.hashSize = bin.ReadByte(); if (install.hashSize != 16) { throw new Exception("Unsupported install hash size!"); } install.numTags = bin.ReadUInt16(true); install.numEntries = bin.ReadUInt32(true); int bytesPerTag = ((int)install.numEntries + 7) / 8; install.tags = new InstallTagEntry[install.numTags]; for (var i = 0; i < install.numTags; i++) { install.tags[i].name = bin.ReadCString(); install.tags[i].type = bin.ReadUInt16(true); var filebits = bin.ReadBytes(bytesPerTag); for (int j = 0; j < bytesPerTag; j++) { filebits[j] = (byte)((filebits[j] * 0x0202020202 & 0x010884422010) % 1023); } install.tags[i].files = new BitArray(filebits); } install.entries = new InstallFileEntry[install.numEntries]; for (var i = 0; i < install.numEntries; i++) { install.entries[i].name = bin.ReadCString(); install.entries[i].contentHash = BitConverter.ToString(bin.ReadBytes(install.hashSize)).Replace("-", "").ToLower(); install.entries[i].size = bin.ReadUInt32(true); install.entries[i].tags = new List <string>(); for (var j = 0; j < install.numTags; j++) { if (install.tags[j].files[i] == true) { install.entries[i].tags.Add(install.tags[j].type + "=" + install.tags[j].name); } } } } return(install); }
public static async Task <RootFile> GetRoot(string hash, bool parseIt = false) { var root = new RootFile { entriesLookup = new MultiDictionary <ulong, RootEntry>(), entriesFDID = new MultiDictionary <uint, RootEntry>(), }; byte[] content = await CDNCache.Get("data", hash); if (!parseIt) { return(root); } var namedCount = 0; var unnamedCount = 0; var newRoot = false; using (MemoryStream ms = new MemoryStream(BLTE.Parse(content))) using (BinaryReader bin = new BinaryReader(ms)) { var header = bin.ReadUInt32(); if (header == 1296454484) { uint totalFiles = bin.ReadUInt32(); uint namedFiles = bin.ReadUInt32(); newRoot = true; } else { bin.BaseStream.Position = 0; } while (bin.BaseStream.Position < bin.BaseStream.Length) { var count = bin.ReadUInt32(); var contentFlags = (ContentFlags)bin.ReadUInt32(); var localeFlags = (LocaleFlags)bin.ReadUInt32(); var entries = new RootEntry[count]; var filedataIds = new int[count]; var fileDataIndex = 0; for (var i = 0; i < count; ++i) { entries[i].localeFlags = localeFlags; entries[i].contentFlags = contentFlags; filedataIds[i] = fileDataIndex + bin.ReadInt32(); entries[i].fileDataID = (uint)filedataIds[i]; fileDataIndex = filedataIds[i] + 1; } if (!newRoot) { for (var i = 0; i < count; ++i) { entries[i].md5 = bin.Read <MD5Hash>(); entries[i].lookup = bin.ReadUInt64(); root.entriesLookup.Add(entries[i].lookup, entries[i]); root.entriesFDID.Add(entries[i].fileDataID, entries[i]); } } else { for (var i = 0; i < count; ++i) { entries[i].md5 = bin.Read <MD5Hash>(); } for (var i = 0; i < count; ++i) { if (contentFlags.HasFlag(ContentFlags.NoNames)) { entries[i].lookup = 0; unnamedCount++; } else { entries[i].lookup = bin.ReadUInt64(); namedCount++; root.entriesLookup.Add(entries[i].lookup, entries[i]); } root.entriesFDID.Add(entries[i].fileDataID, entries[i]); } } } } return(root); }
public static async Task <EncodingFile> GetEncoding(string hash, int encodingSize = 0, bool parseTableB = false, bool checkStuff = false) { var encoding = new EncodingFile(); hash = hash.ToLower(); byte[] content = await CDNCache.Get("data", hash); if (encodingSize != 0 && encodingSize != content.Length) { // Re-download file, not expected size. content = await CDNCache.Get("data", hash, true, true); if (encodingSize != content.Length && encodingSize != 0) { throw new Exception("File corrupt/not fully downloaded! Remove " + "data / " + hash[0] + hash[1] + " / " + hash[2] + hash[3] + " / " + hash + " from cache."); } } using (BinaryReader bin = new BinaryReader(new MemoryStream(BLTE.Parse(content)))) { if (Encoding.UTF8.GetString(bin.ReadBytes(2)) != "EN") { throw new Exception("Error while parsing encoding file. Did BLTE header size change?"); } encoding.version = bin.ReadByte(); encoding.cKeyLength = bin.ReadByte(); encoding.eKeyLength = bin.ReadByte(); encoding.cKeyPageSize = bin.ReadUInt16(true); encoding.eKeyPageSize = bin.ReadUInt16(true); encoding.cKeyPageCount = bin.ReadUInt32(true); encoding.eKeyPageCount = bin.ReadUInt32(true); encoding.stringBlockSize = bin.ReadUInt40(true); var headerLength = bin.BaseStream.Position; if (parseTableB) { var stringBlockEntries = new List <string>(); while ((bin.BaseStream.Position - headerLength) != (long)encoding.stringBlockSize) { stringBlockEntries.Add(bin.ReadCString()); } encoding.stringBlockEntries = stringBlockEntries.ToArray(); } else { bin.BaseStream.Position += (long)encoding.stringBlockSize; } /* Table A */ if (checkStuff) { encoding.aHeaders = new EncodingHeaderEntry[encoding.cKeyPageCount]; for (int i = 0; i < encoding.cKeyPageCount; i++) { encoding.aHeaders[i].firstHash = bin.Read <MD5Hash>(); encoding.aHeaders[i].checksum = bin.Read <MD5Hash>(); } } else { bin.BaseStream.Position += encoding.cKeyPageCount * 32; } var tableAstart = bin.BaseStream.Position; Dictionary <MD5Hash, EncodingFileEntry> entries = new Dictionary <MD5Hash, EncodingFileEntry>(new MD5HashComparer()); for (int i = 0; i < encoding.cKeyPageCount; i++) { byte keysCount; while ((keysCount = bin.ReadByte()) != 0) { EncodingFileEntry entry = new EncodingFileEntry() { size = bin.ReadInt40BE() }; var cKey = bin.Read <MD5Hash>(); // @TODO add support for multiple encoding keys for (int key = 0; key < keysCount; key++) { if (key == 0) { entry.eKey = bin.Read <MD5Hash>(); } else { bin.ReadBytes(16); } } entries.Add(cKey, entry); try { encodingCacheLock.EnterUpgradeableReadLock(); if (!encodingDictionary.ContainsKey(cKey)) { try { encodingCacheLock.EnterWriteLock(); encodingDictionary.Add(cKey, entry.eKey); } finally { encodingCacheLock.ExitWriteLock(); } } } finally { encodingCacheLock.ExitUpgradeableReadLock(); } } var remaining = 4096 - ((bin.BaseStream.Position - tableAstart) % 4096); if (remaining > 0) { bin.BaseStream.Position += remaining; } } encoding.aEntries = entries; if (!parseTableB) { return(encoding); } /* Table B */ if (checkStuff) { encoding.bHeaders = new EncodingHeaderEntry[encoding.eKeyPageCount]; for (int i = 0; i < encoding.eKeyPageCount; i++) { encoding.bHeaders[i].firstHash = bin.Read <MD5Hash>(); encoding.bHeaders[i].checksum = bin.Read <MD5Hash>(); } } else { bin.BaseStream.Position += encoding.eKeyPageCount * 32; } var tableBstart = bin.BaseStream.Position; List <EncodingFileDescEntry> b_entries = new List <EncodingFileDescEntry>(); while (bin.BaseStream.Position < tableBstart + 4096 * encoding.eKeyPageCount) { var remaining = 4096 - (bin.BaseStream.Position - tableBstart) % 4096; if (remaining < 25) { bin.BaseStream.Position += remaining; continue; } EncodingFileDescEntry entry = new EncodingFileDescEntry() { key = bin.Read <MD5Hash>(), stringIndex = bin.ReadUInt32(true), compressedSize = bin.ReadUInt40(true) }; if (entry.stringIndex == uint.MaxValue) { break; } b_entries.Add(entry); } encoding.bEntries = b_entries.ToArray(); } return(encoding); }
/// <summary> /// Gets a file from disk, if not available on disk it downloads it first. /// </summary> /// <param name="subFolder">"config", "data" or "patch"</param> /// <param name="file">File</param> /// <param name="returnstream">Whether or not to return byte[] array</param> /// <param name="redownload">Whether or not to redownload the file</param> /// <param name="size">Size (for partial downloads)</param> /// <param name="offset">Offset (for partial downloads)</param> /// <returns></returns> public static async Task <byte[]> Get(string subFolder, string file, bool returnstream = true, bool redownload = false, uint size = 0, uint offset = 0, string cdn = bestCDNEU) { file = file.ToLower(); var target = bestCDNEU + "/tpr/wow/" + subFolder + "/" + file[0] + file[1] + "/" + file[2] + file[3] + "/" + file; var uri = new Uri(target); var cleanname = uri.AbsolutePath; if (redownload || !File.Exists(cacheDir + cleanname)) { try { if (!Directory.Exists(cacheDir + cleanname)) { Directory.CreateDirectory(Path.GetDirectoryName(cacheDir + cleanname)); } Logger.WriteLine("WARNING! Downloading " + cleanname); var request = new HttpRequestMessage(HttpMethod.Get, uri); if (size > 0) { request.Headers.Range = new System.Net.Http.Headers.RangeHeaderValue(offset, offset + size); } using (HttpResponseMessage response = await client.SendAsync(request)) { if (response.IsSuccessStatusCode) { using (MemoryStream mstream = new MemoryStream()) using (HttpContent res = response.Content) { await res.CopyToAsync(mstream); if (isEncrypted) { var cleaned = Path.GetFileNameWithoutExtension(cleanname); var decrypted = BLTE.DecryptFile(cleaned, mstream.ToArray(), decryptionKeyName); // Only write out if this is a full DL if (size == 0) { await File.WriteAllBytesAsync(cacheDir + cleanname, decrypted); } return(decrypted); } else { if (size == 0) { await File.WriteAllBytesAsync(cacheDir + cleanname, mstream.ToArray()); } else { return(mstream.ToArray()); } } } } else { if (cdn != backupCDN) { Logger.WriteLine("Error retrieving file: HTTP status code " + response.StatusCode + " on URL " + target + ", trying on backup CDN.."); return(await Get(subFolder, file, returnstream, redownload, size, offset, backupCDN)); } else { throw new FileNotFoundException("Error retrieving file: HTTP status code " + response.StatusCode + " on URL " + target + ", exhausted all CDNs."); } } } } catch (Exception e) { Logger.WriteLine(e.Message); } } if (returnstream) { return(await File.ReadAllBytesAsync(cacheDir + cleanname)); } else { return(new byte[0]); } }
public static byte[] Get(string url, bool returnstream = true, bool redownload = false) { var uri = new Uri(url.ToLower()); string cleanname = uri.AbsolutePath; if (redownload || !File.Exists(cacheDir + cleanname)) { try { if (!Directory.Exists(cacheDir + cleanname)) { Directory.CreateDirectory(Path.GetDirectoryName(cacheDir + cleanname)); } Logger.WriteLine("Downloading " + cleanname); using (HttpResponseMessage response = client.GetAsync(uri).Result) { if (response.IsSuccessStatusCode) { using (MemoryStream mstream = new MemoryStream()) using (HttpContent res = response.Content) { res.CopyToAsync(mstream); if (isEncrypted) { var cleaned = Path.GetFileNameWithoutExtension(cleanname); var decrypted = BLTE.DecryptFile(cleaned, mstream.ToArray(), decryptionKeyName); File.WriteAllBytes(cacheDir + cleanname, decrypted); return(decrypted); } else { File.WriteAllBytes(cacheDir + cleanname, mstream.ToArray()); } } } else if (response.StatusCode == System.Net.HttpStatusCode.NotFound && !url.StartsWith("http://client04")) { Logger.WriteLine("Not found on primary mirror, retrying on secondary mirror..."); return(Get("http://client04.pdl.wow.battlenet.com.cn/" + cleanname, returnstream, redownload)); } else { throw new FileNotFoundException("Error retrieving file: HTTP status code " + response.StatusCode + " on URL " + url); } } } catch (Exception e) { Logger.WriteLine(e.Message); } } if (returnstream) { return(File.ReadAllBytes(cacheDir + cleanname)); } else { return(new byte[0]); } }
public static RootFile GetRoot(string url, string hash, bool parseIt = false) { var root = new RootFile { entriesLookup = new MultiDictionary <ulong, RootEntry>(), entriesFDID = new MultiDictionary <uint, RootEntry>(), }; byte[] content; if (url.StartsWith("http:")) { content = CDN.Get(url + "data/" + hash[0] + hash[1] + "/" + hash[2] + hash[3] + "/" + hash); } else { content = File.ReadAllBytes(Path.Combine(url, "data", "" + hash[0] + hash[1], "" + hash[2] + hash[3], hash)); } if (!parseIt) { return(root); } var hasher = new Jenkins96(); var namedCount = 0; var unnamedCount = 0; uint totalFiles = 0; uint namedFiles = 0; var newRoot = false; using (BinaryReader bin = new BinaryReader(new MemoryStream(BLTE.Parse(content)))) { var header = bin.ReadUInt32(); if (header == 1296454484) { totalFiles = bin.ReadUInt32(); namedFiles = bin.ReadUInt32(); newRoot = true; } else { bin.BaseStream.Position = 0; } while (bin.BaseStream.Position < bin.BaseStream.Length) { var count = bin.ReadUInt32(); var contentFlags = (ContentFlags)bin.ReadUInt32(); var localeFlags = (LocaleFlags)bin.ReadUInt32(); var entries = new RootEntry[count]; var filedataIds = new int[count]; var fileDataIndex = 0; for (var i = 0; i < count; ++i) { entries[i].localeFlags = localeFlags; entries[i].contentFlags = contentFlags; filedataIds[i] = fileDataIndex + bin.ReadInt32(); entries[i].fileDataID = (uint)filedataIds[i]; fileDataIndex = filedataIds[i] + 1; } if (!newRoot) { for (var i = 0; i < count; ++i) { entries[i].md5 = bin.Read <MD5Hash>(); entries[i].lookup = bin.ReadUInt64(); root.entriesLookup.Add(entries[i].lookup, entries[i]); root.entriesFDID.Add(entries[i].fileDataID, entries[i]); } } else { for (var i = 0; i < count; ++i) { entries[i].md5 = bin.Read <MD5Hash>(); } for (var i = 0; i < count; ++i) { if (contentFlags.HasFlag(ContentFlags.NoNames)) { //entries[i].lookup = hasher.ComputeHash("BY_FDID_" + entries[i].fileDataID); entries[i].lookup = 0; unnamedCount++; } else { entries[i].lookup = bin.ReadUInt64(); namedCount++; root.entriesLookup.Add(entries[i].lookup, entries[i]); } root.entriesFDID.Add(entries[i].fileDataID, entries[i]); } } } } return(root); }