Esempio n. 1
0
        public EncodingHandler(BinaryReader stream, BackgroundWorkerEx worker)
        {
            worker?.ReportProgress(0, "Loading \"encoding\"...");

            stream.Skip(2);                          // EN
            byte Version        = stream.ReadByte(); // must be 1
            byte CKeyLength     = stream.ReadByte();
            byte EKeyLength     = stream.ReadByte();
            int  CKeyPageSize   = stream.ReadInt16BE() * 1024; // KB to bytes
            int  EKeyPageSize   = stream.ReadInt16BE() * 1024; // KB to bytes
            int  CKeyPageCount  = stream.ReadInt32BE();
            int  EKeyPageCount  = stream.ReadInt32BE();
            byte unk1           = stream.ReadByte(); // must be 0
            int  ESpecBlockSize = stream.ReadInt32BE();

            stream.Skip(ESpecBlockSize);
            //string[] strings = Encoding.ASCII.GetString(stream.ReadBytes(ESpecBlockSize)).Split(new[] { '\0' }, StringSplitOptions.RemoveEmptyEntries);

            stream.Skip(CKeyPageCount * 32);
            //ValueTuple<byte[], byte[]>[] aEntries = new ValueTuple<byte[], byte[]>[CKeyPageCount];

            //for (int i = 0; i < CKeyPageCount; ++i)
            //{
            //    byte[] firstHash = stream.ReadBytes(16);
            //    byte[] blockHash = stream.ReadBytes(16);
            //    aEntries[i] = (firstHash, blockHash);
            //}

            long chunkStart = stream.BaseStream.Position;

            for (int i = 0; i < CKeyPageCount; ++i)
            {
                byte keysCount;

                while ((keysCount = stream.ReadByte()) != 0)
                {
                    long    fileSize = stream.ReadInt40BE();
                    MD5Hash cKey     = stream.Read <MD5Hash>();

                    EncodingEntry entry = new EncodingEntry()
                    {
                        Size = fileSize
                    };

                    // how do we handle multiple keys?
                    for (int ki = 0; ki < keysCount; ++ki)
                    {
                        MD5Hash eKey = stream.Read <MD5Hash>();

                        // use first key for now
                        if (ki == 0)
                        {
                            entry.Key = eKey;
                        }
                        //else
                        //    Logger.WriteLine("Multiple encoding keys for MD5 {0}: {1}", md5.ToHexString(), key.ToHexString());

                        //Logger.WriteLine("Encoding {0:D2} {1} {2} {3} {4}", keysCount, aEntries[i].Item1.ToHexString(), aEntries[i].Item2.ToHexString(), md5.ToHexString(), key.ToHexString());
                    }

                    //Encodings[md5] = entry;
                    EncodingData.Add(cKey, entry);
                }

                // each chunk is 4096 bytes, and zero padding at the end
                long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart) % CHUNK_SIZE);

                if (remaining > 0)
                {
                    stream.BaseStream.Position += remaining;
                }

                worker?.ReportProgress((int)((i + 1) / (float)CKeyPageCount * 100));
            }

            stream.Skip(EKeyPageCount * 32);
            //for (int i = 0; i < EKeyPageCount; ++i)
            //{
            //    byte[] firstKey = stream.ReadBytes(16);
            //    byte[] blockHash = stream.ReadBytes(16);
            //}

            long chunkStart2 = stream.BaseStream.Position;

            for (int i = 0; i < EKeyPageCount; ++i)
            {
                byte[] eKey       = stream.ReadBytes(16);
                int    eSpecIndex = stream.ReadInt32BE();
                long   fileSize   = stream.ReadInt40BE();

                // each chunk is 4096 bytes, and zero padding at the end
                long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart2) % CHUNK_SIZE);

                if (remaining > 0)
                {
                    stream.BaseStream.Position += remaining;
                }
            }

            // string block till the end of file

            //EncodingData.Dump();
        }
Esempio n. 2
0
        private void Parse(MD5Hash md5)
        {
            int size = (int)_reader.BaseStream.Length;

            if (size < 8)
            {
                throw new BLTEDecoderException(0, "not enough data: {0}", 8);
            }

            int magic = _reader.ReadInt32();

            if (magic != BLTE_MAGIC)
            {
                throw new BLTEDecoderException(0, "frame header mismatch (bad BLTE file)");
            }

            int headerSize = _reader.ReadInt32BE();

            if (CASCConfig.ValidateData)
            {
                long oldPos = _reader.BaseStream.Position;

                _reader.BaseStream.Position = 0;

                byte[] newHash = _md5.ComputeHash(_reader.ReadBytes(headerSize > 0 ? headerSize : size));

                if (!md5.EqualsTo(newHash))
                {
                    throw new BLTEDecoderException(0, "data corrupted");
                }

                _reader.BaseStream.Position = oldPos;
            }

            int numBlocks = 1;

            if (headerSize > 0)
            {
                if (size < 12)
                {
                    throw new BLTEDecoderException(0, "not enough data: {0}", 12);
                }

                byte[] fcbytes = _reader.ReadBytes(4);

                numBlocks = fcbytes[1] << 16 | fcbytes[2] << 8 | fcbytes[3] << 0;

                if (fcbytes[0] != 0x0F || numBlocks == 0)
                {
                    throw new BLTEDecoderException(0, "bad table format 0x{0:x2}, numBlocks {1}", fcbytes[0], numBlocks);
                }

                int frameHeaderSize = 24 * numBlocks + 12;

                if (headerSize != frameHeaderSize)
                {
                    throw new BLTEDecoderException(0, "header size mismatch");
                }

                if (size < frameHeaderSize)
                {
                    throw new BLTEDecoderException(0, "not enough data: {0}", frameHeaderSize);
                }
            }

            _dataBlocks = new DataBlock[numBlocks];

            for (int i = 0; i < numBlocks; i++)
            {
                DataBlock block = new DataBlock();

                if (headerSize != 0)
                {
                    block.CompSize   = _reader.ReadInt32BE();
                    block.DecompSize = _reader.ReadInt32BE();
                    block.Hash       = _reader.Read <MD5Hash>();
                }
                else
                {
                    block.CompSize   = size - 8;
                    block.DecompSize = size - 8 - 1;
                    block.Hash       = default(MD5Hash);
                }

                _dataBlocks[i] = block;
            }

            _memStream = new MemoryStream(_dataBlocks.Sum(b => b.DecompSize));

            ProcessNextBlock();

            _length = headerSize == 0 ? _memStream.Length : _memStream.Capacity;

            //for (int i = 0; i < _dataBlocks.Length; i++)
            //{
            //    ProcessNextBlock();
            //}
        }
Esempio n. 3
0
        protected override void ExtractFileOnline(MD5Hash key, string path, string name)
        {
            IndexEntry idxInfo = CDNIndex.GetIndexInfo(key);

            ExtractFileOnlineInternal(idxInfo, key, path, name);
        }
Esempio n. 4
0
 public bool GetEntry(MD5Hash md5, out EncodingEntry enc) => EncodingData.TryGetValue(md5, out enc);
Esempio n. 5
0
        public override void LoadListFile(string path, BackgroundWorkerEx worker = null)
        {
            worker?.ReportProgress(0, "Loading \"listfile\"...");

            Logger.WriteLine("OWRootHandler: loading file names...");

            float pkgOnePct = apmFiles.Sum(a => a.Packages.Length) / 100f;

            int pkgCount = 0;

            foreach (var apm in apmFiles)
            {
                for (int i = 0; i < apm.Packages.Length; i++)
                {
                    APMPackage package = apm.Packages[i];

                    MD5Hash pkgIndexMD5 = package.indexContentKey;

                    string apmName  = Path.GetFileNameWithoutExtension(apm.Name);
                    string pkgName  = string.Format("{0}/package_{1:X4}_{2:X16}", apmName, i, package.packageKey);
                    string fakeName = string.Format("{0}_index", pkgName);

                    ulong fileHash = Hasher.ComputeHash(fakeName);
                    Logger.WriteLine("Adding package: {0:X16} {1}", fileHash, package.indexContentKey.ToHexString());
                    if (_rootData.ContainsKey(fileHash))
                    {
                        if (!_rootData[fileHash].baseEntry.MD5.EqualsTo(package.indexContentKey))
                        {
                            Logger.WriteLine("Weird duplicate package: {0:X16} {1}", fileHash, package.indexContentKey.ToHexString());
                        }
                        else
                        {
                            Logger.WriteLine("Duplicate package: {0:X16} {1}", fileHash, package.indexContentKey.ToHexString());
                        }
                        continue;
                    }
                    _rootData[fileHash] = new OWRootEntry()
                    {
                        baseEntry = new RootEntry()
                        {
                            MD5 = pkgIndexMD5, LocaleFlags = LocaleFlags.All, ContentFlags = ContentFlags.None
                        }
                    };

                    CASCFile.Files[fileHash] = new CASCFile(fileHash, fakeName);

                    PackageIndex pkgIndex = apm.Indexes[i];

                    fakeName = string.Format("{0}_bundle_{1:X16}", pkgName, pkgIndex.bundleKey);

                    fileHash = Hasher.ComputeHash(fakeName);
                    Logger.WriteLine("Adding bundle: {0:X16} {1}", fileHash, pkgIndex.bundleContentKey.ToHexString());
                    if (_rootData.ContainsKey(fileHash))
                    {
                        if (!_rootData[fileHash].baseEntry.MD5.EqualsTo(pkgIndex.bundleContentKey))
                        {
                            Logger.WriteLine("Weird duplicate bundle: {0:X16} {1}", fileHash, pkgIndex.bundleContentKey.ToHexString());
                        }
                        else
                        {
                            Logger.WriteLine("Duplicate bundle: {0:X16} {1}", fileHash, pkgIndex.bundleContentKey.ToHexString());
                        }
                        continue;
                    }
                    _rootData[fileHash] = new OWRootEntry()
                    {
                        baseEntry = new RootEntry()
                        {
                            MD5 = pkgIndex.bundleContentKey, LocaleFlags = LocaleFlags.All, ContentFlags = ContentFlags.None
                        },
                        pkgIndex = pkgIndex
                    };

                    CASCFile.Files[fileHash] = new CASCFile(fileHash, fakeName);

                    PackageIndexRecord[] records = apm.Records[i];

                    for (int k = 0; k < records.Length; k++)
                    {
                        fakeName = string.Format("files/{0:X3}/{1:X12}.{0:X3}", KeyToTypeID(records[k].Key), records[k].Key & 0xFFFFFFFFFFFF);

                        fileHash = Hasher.ComputeHash(fakeName);
                        //Logger.WriteLine("Adding package record: key {0:X16} hash {1} flags {2:X8}", fileHash, records[k].contentKey.ToHexString(), records[k].flags);
                        if (_rootData.ContainsKey(fileHash))
                        {
                            if (!_rootData[fileHash].baseEntry.MD5.EqualsTo(records[k].ContentKey))
                            {
                                Logger.WriteLine("Weird duplicate package record: {0:X16} {1}", fileHash, records[k].ContentKey.ToHexString());
                            }
                            //else
                            //    Logger.WriteLine("Duplicate package record: {0:X16} {1}", fileHash, records[k].contentKey.ToHexString());
                            continue;
                        }
                        _rootData[fileHash] = new OWRootEntry()
                        {
                            baseEntry = new RootEntry()
                            {
                                MD5 = records[k].ContentKey, LocaleFlags = LocaleFlags.All, ContentFlags = (ContentFlags)records[k].Flags
                            },
                            pkgIndex    = pkgIndex,
                            pkgIndexRec = records[k]
                        };

                        CASCFile.Files[fileHash] = new CASCFile(fileHash, fakeName);
                    }

                    worker?.ReportProgress((int)(++pkgCount / pkgOnePct));
                }
            }

            Logger.WriteLine("OWRootHandler: loaded {0} file names", _rootData.Count);
        }
Esempio n. 6
0
        protected override Stream OpenFileOnline(MD5Hash key)
        {
            IndexEntry idxInfo = CDNIndex.GetIndexInfo(key);

            return(OpenFileOnlineInternal(idxInfo, key));
        }
Esempio n. 7
0
 protected abstract Stream GetLocalDataStream(MD5Hash key);
Esempio n. 8
0
        private void ParseIndex(Stream stream, int dataIndex)
        {
            using (var br = new BinaryReader(stream))
            {
                stream.Seek(-20, SeekOrigin.End);

                byte version = br.ReadByte();

                if (version != 1)
                {
                    throw new InvalidDataException("ParseIndex -> version");
                }

                byte unk1 = br.ReadByte();

                if (unk1 != 0)
                {
                    throw new InvalidDataException("ParseIndex -> unk1");
                }

                byte unk2 = br.ReadByte();

                if (unk2 != 0)
                {
                    throw new InvalidDataException("ParseIndex -> unk2");
                }

                byte blockSizeKb = br.ReadByte();

                if (blockSizeKb != 4)
                {
                    throw new InvalidDataException("ParseIndex -> blockSizeKb");
                }

                byte offsetBytes = br.ReadByte();

                if (offsetBytes != 4)
                {
                    throw new InvalidDataException("ParseIndex -> offsetBytes");
                }

                byte sizeBytes = br.ReadByte();

                if (sizeBytes != 4)
                {
                    throw new InvalidDataException("ParseIndex -> sizeBytes");
                }

                byte keySizeBytes = br.ReadByte();

                if (keySizeBytes != 16)
                {
                    throw new InvalidDataException("ParseIndex -> keySizeBytes");
                }

                byte checksumSize = br.ReadByte();

                if (checksumSize != 8)
                {
                    throw new InvalidDataException("ParseIndex -> checksumSize");
                }

                int numElements = br.ReadInt32();

                if (numElements * (keySizeBytes + sizeBytes + offsetBytes) > stream.Length)
                {
                    throw new Exception("ParseIndex failed");
                }

                stream.Seek(0, SeekOrigin.Begin);

                for (int i = 0; i < numElements; i++)
                {
                    MD5Hash key = br.Read <MD5Hash>();

                    IndexEntry entry = new IndexEntry
                    {
                        Index  = dataIndex,
                        Size   = br.ReadInt32BE(),
                        Offset = br.ReadInt32BE()
                    };
                    CDNIndexData.Add(key, entry);

                    // each chunk is 4096 bytes, and zero padding at the end
                    long remaining = CHUNK_SIZE - (stream.Position % CHUNK_SIZE);

                    // skip padding
                    if (remaining < 16 + 4 + 4)
                    {
                        stream.Position += remaining;
                    }
                }
            }
        }
Esempio n. 9
0
        private Stream OpenFileLocal(MD5Hash key)
        {
            Stream stream = GetLocalDataStream(key);

            return(new BLTEStream(stream, key));
        }
Esempio n. 10
0
 protected abstract Stream OpenFileOnline(MD5Hash key);
Esempio n. 11
0
 protected abstract void ExtractFileOnline(MD5Hash key, string path, string name);
Esempio n. 12
0
        public EncodingHandler(BinaryReader stream, BackgroundWorkerEx worker)
        {
            worker?.ReportProgress(0, "Loading \"encoding\"...");

            stream.Skip(2);                          // EN
            byte Version        = stream.ReadByte(); // must be 1
            byte CKeyLength     = stream.ReadByte();
            byte EKeyLength     = stream.ReadByte();
            int  CKeyPageSize   = stream.ReadInt16BE() * 1024; // KB to bytes
            int  EKeyPageSize   = stream.ReadInt16BE() * 1024; // KB to bytes
            int  CKeyPageCount  = stream.ReadInt32BE();
            int  EKeyPageCount  = stream.ReadInt32BE();
            byte unk1           = stream.ReadByte(); // must be 0
            int  ESpecBlockSize = stream.ReadInt32BE();

            //stream.Skip(ESpecBlockSize);
            string[] strings = Encoding.ASCII.GetString(stream.ReadBytes(ESpecBlockSize)).Split(new[] { '\0' }, StringSplitOptions.None);

            //for (int i = 0; i < strings.Length; i++)
            //{
            //    Logger.WriteLine($"ESpec {i:D6} {strings[i]}");
            //}

            stream.Skip(CKeyPageCount * 32);
            //ValueTuple<MD5Hash, MD5Hash>[] cKeyPageData = new ValueTuple<MD5Hash, MD5Hash>[CKeyPageCount];

            //for (int i = 0; i < CKeyPageCount; i++)
            //{
            //    MD5Hash firstHash = stream.Read<MD5Hash>();
            //    MD5Hash blockHash = stream.Read<MD5Hash>();
            //    cKeyPageData[i] = (firstHash, blockHash);
            //}

            long chunkStart = stream.BaseStream.Position;

            for (int i = 0; i < CKeyPageCount; i++)
            {
                byte keysCount;

                while ((keysCount = stream.ReadByte()) != 0)
                {
                    long    fileSize = stream.ReadInt40BE();
                    MD5Hash cKey     = stream.Read <MD5Hash>();

                    EncodingEntry entry = new EncodingEntry()
                    {
                        Size = fileSize,
                        Keys = new List <MD5Hash>(keysCount)
                    };

                    // how do we handle multiple keys?
                    for (int ki = 0; ki < keysCount; ++ki)
                    {
                        MD5Hash eKey = stream.Read <MD5Hash>();
                        entry.Keys.Add(eKey);
                        EKeyToCKey.Add(eKey, cKey);
                        //Logger.WriteLine($"Encoding {i:D7} {ki:D2} {cKey.ToHexString()} {eKey.ToHexString()} {fileSize}");
                    }

                    EncodingData.Add(cKey, entry);
                }

                // each chunk is 4096 bytes, and zero padding at the end
                long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart) % CHUNK_SIZE);

                if (remaining > 0)
                {
                    stream.BaseStream.Position += remaining;
                }

                worker?.ReportProgress((int)((i + 1) / (float)CKeyPageCount * 100));
            }

            stream.Skip(EKeyPageCount * 32);
            //ValueTuple<MD5Hash, MD5Hash>[] eKeyPageData = new ValueTuple<MD5Hash, MD5Hash>[EKeyPageCount];

            //for (int i = 0; i < EKeyPageCount; i++)
            //{
            //    MD5Hash firstKey = stream.Read<MD5Hash>();
            //    MD5Hash blockHash = stream.Read<MD5Hash>();
            //    eKeyPageData[i] = (firstKey, blockHash);
            //}

            long chunkStart2 = stream.BaseStream.Position;

            Regex regex = new Regex(@"(?<=e:\{)([0-9A-F]{16})(?=,)", RegexOptions.Compiled);

            for (int i = 0; i < EKeyPageCount; i++)
            {
                while (true)
                {
                    // each chunk is 4096 bytes, and zero padding at the end
                    long remaining = CHUNK_SIZE - ((stream.BaseStream.Position - chunkStart2) % CHUNK_SIZE);

                    if (remaining < 25)
                    {
                        stream.BaseStream.Position += remaining;
                        break;
                    }

                    MD5Hash eKey       = stream.Read <MD5Hash>();
                    int     eSpecIndex = stream.ReadInt32BE();
                    long    fileSize   = stream.ReadInt40BE();

                    if (eSpecIndex == -1)
                    {
                        stream.BaseStream.Position += remaining;
                        break;
                    }

                    string eSpec = strings[eSpecIndex];

                    var matches = regex.Matches(eSpec);
                    if (matches.Count != 0)
                    {
                        var keys = matches.Cast <Match>().Select(m => BitConverter.ToUInt64(m.Value.FromHexString(), 0)).ToList();
                        EncryptionData.Add(eKey, keys);
                        //Logger.WriteLine($"Encoding {i:D7} {eKey.ToHexString()} {eSpecIndex} {fileSize} {eSpec} {string.Join(",", keys.Select(x => $"{x:X16}"))}");
                    }
                    else
                    {
                        //Logger.WriteLine($"Encoding {i:D7} {eKey.ToHexString()} {eSpecIndex} {fileSize} {eSpec}");
                    }
                }
            }
            // string block till the end of file
        }
Esempio n. 13
0
        public D3RootHandler(BinaryReader stream, BackgroundWorkerEx worker, CASCHandler casc)
        {
            worker?.ReportProgress(0, "Loading \"root\"...");

            byte b1 = stream.ReadByte();
            byte b2 = stream.ReadByte();
            byte b3 = stream.ReadByte();
            byte b4 = stream.ReadByte();

            int count = stream.ReadInt32();

            for (int j = 0; j < count; j++)
            {
                MD5Hash md5  = stream.Read <MD5Hash>();
                string  name = stream.ReadCString();

                var entries = new List <D3RootEntry>();
                D3RootData[name] = entries;

                if (!casc.Encoding.GetEntry(md5, out EncodingEntry enc))
                {
                    continue;
                }

                using (BinaryReader s = new BinaryReader(casc.OpenFile(enc.Key)))
                {
                    uint magic = s.ReadUInt32();

                    int nEntries0 = s.ReadInt32();

                    for (int i = 0; i < nEntries0; i++)
                    {
                        entries.Add(D3RootEntry.Read(0, s));
                    }

                    int nEntries1 = s.ReadInt32();

                    for (int i = 0; i < nEntries1; i++)
                    {
                        entries.Add(D3RootEntry.Read(1, s));
                    }

                    int nNamedEntries = s.ReadInt32();

                    for (int i = 0; i < nNamedEntries; i++)
                    {
                        entries.Add(D3RootEntry.Read(2, s));
                    }
                }

                worker?.ReportProgress((int)((j + 1) / (float)(count + 2) * 100));
            }

            // Parse CoreTOC.dat
            var coreTocEntry = D3RootData["Base"].Find(e => e.Name == "CoreTOC.dat");

            casc.Encoding.GetEntry(coreTocEntry.MD5, out EncodingEntry enc1);

            using (var file = casc.OpenFile(enc1.Key))
                tocParser = new CoreTOCParser(file);

            worker?.ReportProgress((int)((count + 1) / (float)(count + 2) * 100));

            // Parse Packages.dat
            var pkgEntry = D3RootData["Base"].Find(e => e.Name == "Data_D3\\PC\\Misc\\Packages.dat");

            casc.Encoding.GetEntry(pkgEntry.MD5, out EncodingEntry enc2);

            using (var file = casc.OpenFile(enc2.Key))
                pkgParser = new PackagesParser(file);

            worker?.ReportProgress(100);
        }