public static int DumpTask(string path, bool imports, bool missinghashes, bool texinfo, bool classinfo) { #region checks var isDirectory = false; var inputFileInfo = new FileInfo(path); var inputDirInfo = new DirectoryInfo(path); if (!inputFileInfo.Exists) { if (!inputDirInfo.Exists) { return(0); } else { isDirectory = true; } } var archives = new List <Archive>(); if (isDirectory) { archives.AddRange(inputDirInfo .GetFiles("*.archive", SearchOption.AllDirectories) .Select(_ => new Archive(_.FullName))); } else { archives.Add(new Archive(inputFileInfo.FullName)); } #endregion var mainController = ServiceLocator.Default.ResolveType <IMainController>(); var logger = ServiceLocator.Default.ResolveType <ILoggerService>(); var missinghashtxt = isDirectory ? Path.Combine(inputDirInfo.FullName, "missinghashes.txt") : $"{inputFileInfo.FullName}.missinghashes.txt"; using var mwriter = File.CreateText(missinghashtxt); var typedict = new ConcurrentDictionary <string, IEnumerable <string> >(); // Parallel foreach (var ar in archives) { if (classinfo) { using var mmf = MemoryMappedFile.CreateFromFile(ar.Filepath, FileMode.Open, ar.Filepath.GetHashMD5(), 0, MemoryMappedFileAccess.Read); var fileinfo = ar.Files.Values; var query = fileinfo.GroupBy( ext => Path.GetExtension(ext.NameStr), file => file, (ext, finfo) => new { Key = ext, File = fileinfo.Where(_ => Path.GetExtension(_.NameStr) == ext) }).ToList(); int progress = 0; var total = query.Count; Console.Write($"Exporting {total} bundle entries "); // foreach extension Parallel.ForEach(query, new ParallelOptions { MaxDegreeOfParallelism = 16 }, result => { if (!string.IsNullOrEmpty(result.Key)) { Parallel.ForEach(result.File, new ParallelOptions { MaxDegreeOfParallelism = 16 }, fi => { var(f, b) = ar.GetFileData(fi.NameHash64, mmf); using var ms = new MemoryStream(f); using var br = new BinaryReader(ms); var cr2w = new CR2WFile(); try { cr2w.ReadImportsAndBuffers(br); } catch (Exception e) { return; } foreach (var chunk in cr2w.Chunks) { var o = chunk.GetDumpObject(br); if (o != null) { Register(o); } } }); } Interlocked.Increment(ref progress); logger.LogProgress(progress / (float)total); Console.WriteLine($"Dumped extension {result.Key}"); }); } if (imports || texinfo) { using var mmf = MemoryMappedFile.CreateFromFile(ar.Filepath, FileMode.Open, ar.Filepath.GetHashMD5(), 0, MemoryMappedFileAccess.Read); int progress = 0; var fileDictionary = new ConcurrentDictionary <ulong, Cr2wChunkInfo>(); var texDictionary = new ConcurrentDictionary <ulong, Cr2wTextureInfo>(); // get info var count = ar.FileCount; Console.Write($"Exporting {count} bundle entries "); Parallel.For(0, count, new ParallelOptions { MaxDegreeOfParallelism = 8 }, i => { var entry = ar.Files.ToList()[i]; var hash = entry.Key; var filename = string.IsNullOrEmpty(entry.Value.NameStr) ? hash.ToString() : entry.Value.NameStr; if (imports) { var(f, buffers) = ar.GetFileData(hash, mmf); // check if cr2w file if (f.Length < 4) { return; } var id = f.Take(4); if (!id.SequenceEqual(MAGIC)) { return; } var cr2w = new CR2WFile(); using var ms = new MemoryStream(f); using var br = new BinaryReader(ms); cr2w.ReadImportsAndBuffers(br); var obj = new Cr2wChunkInfo { Filename = filename, Imports = cr2w.Imports }; fileDictionary.AddOrUpdate(hash, obj, (arg1, o) => obj); } if (texinfo) { if (!string.IsNullOrEmpty(entry.Value.NameStr) && entry.Value.NameStr.Contains(".xbm")) { var(f, buffers) = ar.GetFileData(hash, mmf); // check if cr2w file if (f.Length < 4) { return; } var id = f.Take(4); if (!id.SequenceEqual(MAGIC)) { return; } var cr2w = new CR2WFile(); using var ms = new MemoryStream(f); using var br = new BinaryReader(ms); var result = cr2w.Read(br); if (result != EFileReadErrorCodes.NoError) { return; } if (!(cr2w.Chunks.FirstOrDefault()?.data is CBitmapTexture xbm) || !(cr2w.Chunks[1]?.data is rendRenderTextureBlobPC blob)) { return; } // create dds header var texinfo = new Cr2wTextureInfo() { Filename = filename, width = blob.Header.SizeInfo.Width.val, height = blob.Header.SizeInfo.Height.val, mips = blob.Header.TextureInfo.MipCount.val, slicecount = blob.Header.TextureInfo.SliceCount.val, alignment = blob.Header.TextureInfo.DataAlignment.val, compression = xbm.Setup.Compression, Group = xbm.Setup.Group, rawFormat = xbm.Setup.RawFormat, }; texDictionary.AddOrUpdate(hash, texinfo, (arg1, o) => texinfo); } } Interlocked.Increment(ref progress); logger.LogProgress(progress / (float)count); });
public static int DumpTask(string path, bool imports, bool missinghashes, bool info) { #region checks var isDirectory = false; var inputFileInfo = new FileInfo(path); var inputDirInfo = new DirectoryInfo(path); if (!inputFileInfo.Exists) { if (!inputDirInfo.Exists) { return(0); } else { isDirectory = true; } } var archives = new List <Archive>(); if (isDirectory) { archives.AddRange(inputDirInfo .GetFiles("*.archive", SearchOption.AllDirectories) .Select(_ => new Archive(_.FullName))); } else { archives.Add(new Archive(inputFileInfo.FullName)); } #endregion var missinghashtxt = isDirectory ? Path.Combine(inputDirInfo.FullName, "missinghashes.txt") : $"{inputFileInfo.FullName}.missinghashes.txt"; using var mwriter = File.CreateText(missinghashtxt); foreach (var ar in archives) { using var pb = new ProgressBar(); if (imports || info) { using var mmf = MemoryMappedFile.CreateFromFile(ar.Filepath, FileMode.Open, ar.Filepath.GetHashMD5(), 0, MemoryMappedFileAccess.Read); using var p1 = pb.Progress.Fork(); int progress = 0; var fileDictionary = new ConcurrentDictionary <ulong, Cr2wChunkInfo>(); var texDictionary = new ConcurrentDictionary <ulong, Cr2wTextureInfo>(); //foreach (var (hash, value) in ar.Files) // fileDictionary[hash] = new Cr2wChunkInfo(); // get info var count = ar.FileCount; Parallel.For(0, count, new ParallelOptions { MaxDegreeOfParallelism = 8 }, i => { var entry = ar.Files.ToList()[i]; var hash = entry.Key; var filename = string.IsNullOrEmpty(entry.Value.NameStr) ? hash.ToString() : entry.Value.NameStr; if (imports) { var(f, buffers) = ar.GetFileData(hash, mmf); // check if cr2w file if (f.Length < 4) { return; } var id = f.Take(4); if (!id.SequenceEqual(MAGIC)) { return; } var cr2w = new CR2WFile(); using var ms = new MemoryStream(f); using var br = new BinaryReader(ms); cr2w.ReadImportsAndBuffers(br); var obj = new Cr2wChunkInfo { Filename = filename, Imports = cr2w.Imports }; fileDictionary.AddOrUpdate(hash, obj, (arg1, o) => obj); } if (info) { if (!string.IsNullOrEmpty(entry.Value.NameStr) && entry.Value.NameStr.Contains(".xbm")) { var(f, buffers) = ar.GetFileData(hash, mmf); // check if cr2w file if (f.Length < 4) { return; } var id = f.Take(4); if (!id.SequenceEqual(MAGIC)) { return; } var cr2w = new CR2WFile(); using var ms = new MemoryStream(f); using var br = new BinaryReader(ms); var result = cr2w.Read(br); if (result != EFileReadErrorCodes.NoError) { return; } if (!(cr2w.Chunks.FirstOrDefault()?.data is CBitmapTexture xbm) || !(cr2w.Chunks[1]?.data is rendRenderTextureBlobPC blob)) { return; } // create dds header var texinfo = new Cr2wTextureInfo() { Filename = filename, width = blob.Header.SizeInfo.Width.val, height = blob.Header.SizeInfo.Height.val, mips = blob.Header.TextureInfo.MipCount.val, slicecount = blob.Header.TextureInfo.SliceCount.val, alignment = blob.Header.TextureInfo.DataAlignment.val, compression = xbm.Setup.Compression, Group = xbm.Setup.Group, rawFormat = xbm.Setup.RawFormat, }; texDictionary.AddOrUpdate(hash, texinfo, (arg1, o) => texinfo); } } progress += 1; var perc = progress / (double)count; p1.Report(perc, $"Loading bundle entries: {progress}/{count}"); });