public static int DumpTaskInner(string path, bool imports, bool missinghashes, bool texinfo, bool classinfo) { #region checks if (string.IsNullOrEmpty(path)) { ConsoleFunctions.logger.LogString("Please fill in an input path", Logtype.Error); return(0); } var isDirectory = false; var inputFileInfo = new FileInfo(path); var inputDirInfo = new DirectoryInfo(path); if (!inputFileInfo.Exists) { if (!inputDirInfo.Exists) { return(0); } else { isDirectory = true; } } #endregion var archives = new List <Archive>(); if (isDirectory) { archives.AddRange(inputDirInfo .GetFiles("*.archive", SearchOption.AllDirectories) .Select(_ => new Archive(_.FullName))); } else { archives.Add(new Archive(inputFileInfo.FullName)); } var mainController = ServiceLocator.Default.ResolveType <IHashService>(); var logger = ServiceLocator.Default.ResolveType <ILoggerService>(); var typedict = new ConcurrentDictionary <string, IEnumerable <string> >(); // Parallel foreach (var ar in archives) { if (classinfo) { using var mmf = MemoryMappedFile.CreateFromFile(ar.Filepath, FileMode.Open, ar.Filepath.GetHashMD5(), 0, MemoryMappedFileAccess.Read); var fileinfo = ar.Files.Values; var query = fileinfo.GroupBy( ext => Path.GetExtension(ext.FileName), file => file, (ext, finfo) => new { Key = ext, File = fileinfo.Where(_ => Path.GetExtension(_.FileName) == ext) }).ToList(); var total = query.Count; logger.LogString($"Exporting {total} bundle entries "); Thread.Sleep(1000); int progress = 0; logger.LogProgress(0); // foreach extension Parallel.ForEach(query, result => { if (!string.IsNullOrEmpty(result.Key)) { Parallel.ForEach(result.File, fi => { var(f, b) = ar.GetFileData(fi.NameHash64, mmf); using var ms = new MemoryStream(f); using var br = new BinaryReader(ms); var cr2w = new CR2WFile(); try { cr2w.ReadImportsAndBuffers(br); } catch (Exception e) { return; } foreach (var chunk in cr2w.Chunks) { var o = chunk.GetDumpObject(br); if (o != null) { Register(o); } } }); } Interlocked.Increment(ref progress); logger.LogProgress(progress / (float)total); logger.LogString($"Dumped extension {result.Key}", Logtype.Normal); }); } if (imports || texinfo) { using var mmf = MemoryMappedFile.CreateFromFile(ar.Filepath, FileMode.Open, ar.Filepath.GetHashMD5(), 0, MemoryMappedFileAccess.Read); var fileDictionary = new ConcurrentDictionary <ulong, Cr2wChunkInfo>(); var texDictionary = new ConcurrentDictionary <ulong, Cr2wTextureInfo>(); // get info var count = ar.FileCount; logger.LogString($"Exporting {count} bundle entries "); Thread.Sleep(1000); int progress = 0; logger.LogProgress(0); Parallel.For(0, count, i => { var entry = ar.Files.ToList()[i]; var hash = entry.Key; var filename = string.IsNullOrEmpty(entry.Value.FileName) ? hash.ToString() : entry.Value.FileName; if (imports) { var(f, buffers) = ar.GetFileData(hash, mmf); // check if cr2w file if (f.Length < 4) { return; } var id = f.Take(4); if (!id.SequenceEqual(MAGIC)) { return; } var cr2w = new CR2WFile(); using var ms = new MemoryStream(f); using var br = new BinaryReader(ms); cr2w.ReadImportsAndBuffers(br); var obj = new Cr2wChunkInfo { Filename = filename, Imports = cr2w.Imports }; fileDictionary.AddOrUpdate(hash, obj, (arg1, o) => obj); } if (texinfo) { if (!string.IsNullOrEmpty(entry.Value.FileName) && entry.Value.FileName.Contains(".xbm")) { var(f, buffers) = ar.GetFileData(hash, mmf); // check if cr2w file if (f.Length < 4) { return; } var id = f.Take(4); if (!id.SequenceEqual(MAGIC)) { return; } var cr2w = new CR2WFile(); using var ms = new MemoryStream(f); using var br = new BinaryReader(ms); var result = cr2w.Read(br); if (result != EFileReadErrorCodes.NoError) { return; } if (!(cr2w.Chunks.FirstOrDefault()?.data is CBitmapTexture xbm) || !(cr2w.Chunks[1]?.data is rendRenderTextureBlobPC blob)) { return; } // create dds header var texinfo = new Cr2wTextureInfo() { Filename = filename, width = blob.Header.SizeInfo.Width.val, height = blob.Header.SizeInfo.Height.val, mips = blob.Header.TextureInfo.MipCount.val, slicecount = blob.Header.TextureInfo.SliceCount.val, alignment = blob.Header.TextureInfo.DataAlignment.val, compression = xbm.Setup.Compression, Group = xbm.Setup.Group, rawFormat = xbm.Setup.RawFormat, }; texDictionary.AddOrUpdate(hash, texinfo, (arg1, o) => texinfo); } } Interlocked.Increment(ref progress); logger.LogProgress(progress / (float)count); });
public static int DumpTaskInner(string path, bool imports, bool missinghashes, bool texinfo, bool classinfo, bool dump, bool list) { #region checks if (string.IsNullOrEmpty(path)) { ConsoleFunctions.logger.LogString("Please fill in an input path.", Logtype.Error); return(0); } var isDirectory = false; var inputFileInfo = new FileInfo(path); var inputDirInfo = new DirectoryInfo(path); if (!inputFileInfo.Exists) { if (!inputDirInfo.Exists) { return(0); } else { isDirectory = true; } } #endregion checks var archives = new List <Archive>(); if (isDirectory) { archives.AddRange(inputDirInfo .GetFiles("*.archive", SearchOption.AllDirectories) .Select(_ => new Archive(_.FullName))); } else { archives.Add(new Archive(inputFileInfo.FullName)); } var mainController = ServiceLocator.Default.ResolveType <IHashService>(); var logger = ServiceLocator.Default.ResolveType <ILoggerService>(); var typedict = new ConcurrentDictionary <string, IEnumerable <string> >(); // Parallel foreach (var ar in archives) { if (classinfo) { // using var mmf = MemoryMappedFile.CreateFromFile(ar.Filepath, FileMode.Open, // ar.Filepath.GetHashMD5(), 0, // MemoryMappedFileAccess.Read); var fileinfo = ar.Files.Values; var query = fileinfo.GroupBy( ext => Path.GetExtension(ext.FileName), file => file, (ext, finfo) => new { Key = ext, File = fileinfo.Where(_ => Path.GetExtension(_.FileName) == ext) }).ToList(); var total = query.Count; logger.LogString($"Exporting {total} bundle entries "); Thread.Sleep(1000); int progress = 0; logger.LogProgress(0); // foreach extension Parallel.ForEach(query, result => { if (!string.IsNullOrEmpty(result.Key)) { Parallel.ForEach(result.File, fi => { using var ms = new MemoryStream(); ar.CopyFileToStream(ms, fi.NameHash64, false); var cr2w = ModTools.TryReadCr2WFile(ms); if (cr2w == null) { return; } foreach (var o in cr2w.Chunks.Select(chunk => (chunk as CR2WExportWrapper).GetDumpObject(ms)) .Where(o => o != null)) { Register(o); } }); } Interlocked.Increment(ref progress); logger.LogProgress(progress / (float)total); logger.LogString($"Dumped extension {result.Key}", Logtype.Normal); }); } if (imports || texinfo) { // using var mmf = MemoryMappedFile.CreateFromFile(ar.Filepath, FileMode.Open, // ar.Filepath.GetHashMD5(), 0, // MemoryMappedFileAccess.Read); var fileDictionary = new ConcurrentDictionary <ulong, Cr2wChunkInfo>(); var texDictionary = new ConcurrentDictionary <ulong, Cr2wTextureInfo>(); // get info var count = ar.FileCount; logger.LogString($"Exporting {count} bundle entries "); Thread.Sleep(1000); int progress = 0; logger.LogProgress(0); Parallel.For(0, count, i => { var(hash, fileEntry) = ar.Files.ToList()[i]; var filename = string.IsNullOrEmpty(fileEntry.FileName) ? hash.ToString() : fileEntry.FileName; if (imports) { using var ms = new MemoryStream(); ar.CopyFileToStream(ms, fileEntry.NameHash64, false); var cr2w = ModTools.TryReadCr2WFileHeaders(ms); if (cr2w == null) { return; } var obj = new Cr2wChunkInfo { Filename = filename, Imports = cr2w.Imports }; fileDictionary.AddOrUpdate(hash, obj, (arg1, o) => obj); } if (texinfo) { if (!string.IsNullOrEmpty(fileEntry.FileName) && fileEntry.FileName.Contains(".xbm")) { using var ms = new MemoryStream(); ar.CopyFileToStream(ms, fileEntry.NameHash64, false); var cr2w = ModTools.TryReadCr2WFile(ms); if (cr2w?.Chunks.FirstOrDefault()?.data is not CBitmapTexture xbm || !(cr2w.Chunks[1]?.data is rendRenderTextureBlobPC blob)) { return; } // create dds header var texinfoObj = new Cr2wTextureInfo() { Filename = filename, width = blob.Header.SizeInfo.Width.Value, height = blob.Header.SizeInfo.Height.Value, mips = blob.Header.TextureInfo.MipCount.Value, slicecount = blob.Header.TextureInfo.SliceCount.Value, alignment = blob.Header.TextureInfo.DataAlignment.Value, compression = xbm.Setup.Compression, Group = xbm.Setup.Group, rawFormat = xbm.Setup.RawFormat, }; texDictionary.AddOrUpdate(hash, texinfoObj, (arg1, o) => texinfoObj); } } Interlocked.Increment(ref progress); logger.LogProgress(progress / (float)count); });
private static int Cr2wTaskInner(string path, string outpath, bool all, bool chunks) { // initial checks if (string.IsNullOrEmpty(path)) { logger.LogString("Please fill in an input path.", Logtype.Error); return(0); } var inputFileInfo = new FileInfo(path); if (!inputFileInfo.Exists) { logger.LogString("Input file does not exist.", Logtype.Error); return(0); } var outputDirInfo = string.IsNullOrEmpty(outpath) ? inputFileInfo.Directory : new DirectoryInfo(outpath); if (outputDirInfo == null || !outputDirInfo.Exists) { logger.LogString("Output directory is not valid.", Logtype.Error); return(0); } var f = File.ReadAllBytes(inputFileInfo.FullName); using var ms = new MemoryStream(f); using var br = new BinaryReader(ms); var cr2w = new CR2WFile(); if (all) { cr2w.ReadImportsAndBuffers(br); var obj = new Cr2wChunkInfo { Filename = inputFileInfo.FullName }; obj.Stringdict = cr2w.StringDictionary; obj.Imports = cr2w.Imports; obj.Buffers = cr2w.Buffers; obj.Chunks = cr2w.Chunks; foreach (var chunk in cr2w.Chunks) { obj.ChunkData.Add(chunk.GetDumpObject(br)); } //write File.WriteAllText(Path.Combine(outputDirInfo.FullName, $"{inputFileInfo.Name}.info.json"), JsonConvert.SerializeObject(obj, Formatting.Indented, new JsonSerializerSettings() { ReferenceLoopHandling = ReferenceLoopHandling.Ignore, PreserveReferencesHandling = PreserveReferencesHandling.None, TypeNameHandling = TypeNameHandling.Auto })); logger.LogString($"Finished. Dump file written to {Path.Combine(outputDirInfo.FullName, $"{inputFileInfo.Name}.info.json")}", Logtype.Success); } if (chunks) { br.BaseStream.Seek(0, SeekOrigin.Begin); cr2w.Read(br); //write File.WriteAllText(Path.Combine(outputDirInfo.FullName, $"{inputFileInfo.Name}.json"), JsonConvert.SerializeObject(cr2w, Formatting.Indented, new JsonSerializerSettings() { ReferenceLoopHandling = ReferenceLoopHandling.Ignore, PreserveReferencesHandling = PreserveReferencesHandling.None, TypeNameHandling = TypeNameHandling.None })); logger.LogString($"Finished. Dump file written to {Path.Combine(outputDirInfo.FullName, $"{inputFileInfo.Name}.json")}", Logtype.Success); } return(1); }