private static void Cr2wTaskInner(string path, string outpath, bool chunks, string pattern = "", string regex = "") { #region checks if (string.IsNullOrEmpty(path)) { logger.LogString("Please fill in an input path.", Logtype.Error); return; } var inFileInfo = new FileInfo(path); var inDirInfo = new DirectoryInfo(path); var isDirectory = !inFileInfo.Exists && inDirInfo.Exists; var isFile = inFileInfo.Exists && !inDirInfo.Exists; if (!isDirectory && !isFile) { logger.LogString("Input file does not exist.", Logtype.Error); return; } #endregion Stopwatch watch = new(); watch.Restart(); // get all files var fileInfos = isDirectory ? inDirInfo.GetFiles("*", SearchOption.AllDirectories).ToList() : new List <FileInfo> { inFileInfo }; // check search pattern then regex IEnumerable <FileInfo> finalmatches = fileInfos; if (!string.IsNullOrEmpty(pattern)) { finalmatches = fileInfos.MatchesWildcard(item => item.FullName, pattern); } if (!string.IsNullOrEmpty(regex)) { var searchTerm = new System.Text.RegularExpressions.Regex($@"{regex}"); var queryMatchingFiles = from file in finalmatches let matches = searchTerm.Matches(file.FullName) where matches.Count > 0 select file; finalmatches = queryMatchingFiles; } var finalMatchesList = finalmatches.ToList(); logger.LogString($"Found {finalMatchesList.Count} files to dump.", Logtype.Important); Thread.Sleep(1000); int progress = 0; logger.LogProgress(0); Parallel.ForEach(finalMatchesList, fileInfo => { var outputDirInfo = string.IsNullOrEmpty(outpath) ? fileInfo.Directory : new DirectoryInfo(outpath); if (outputDirInfo == null || !outputDirInfo.Exists) { logger.LogString("Output directory is not valid.", Logtype.Error); return; } if (chunks) { var f = File.ReadAllBytes(fileInfo.FullName); using var fs = new FileStream(fileInfo.FullName, FileMode.Open, FileAccess.Read); var cr2w = ModTools.TryReadCr2WFile(fs); if (cr2w == null) { return; } //write File.WriteAllText(Path.Combine(outputDirInfo.FullName, $"{fileInfo.Name}.json"), JsonConvert.SerializeObject(cr2w, Formatting.Indented, new JsonSerializerSettings() { ReferenceLoopHandling = ReferenceLoopHandling.Ignore, PreserveReferencesHandling = PreserveReferencesHandling.None, TypeNameHandling = TypeNameHandling.None })); } Interlocked.Increment(ref progress); logger.LogProgress(progress / (float)finalMatchesList.Count); }); watch.Stop(); logger.LogString( $"Finished. Dumped {finalMatchesList.Count} files to json in {watch.ElapsedMilliseconds.ToString()}ms.", Logtype.Success); }
public static int DumpTaskInner(string path, bool imports, bool missinghashes, bool texinfo, bool classinfo, bool dump, bool list) { #region checks if (string.IsNullOrEmpty(path)) { ConsoleFunctions.logger.LogString("Please fill in an input path.", Logtype.Error); return(0); } var isDirectory = false; var inputFileInfo = new FileInfo(path); var inputDirInfo = new DirectoryInfo(path); if (!inputFileInfo.Exists) { if (!inputDirInfo.Exists) { return(0); } else { isDirectory = true; } } #endregion checks var archives = new List <Archive>(); if (isDirectory) { archives.AddRange(inputDirInfo .GetFiles("*.archive", SearchOption.AllDirectories) .Select(_ => new Archive(_.FullName))); } else { archives.Add(new Archive(inputFileInfo.FullName)); } var mainController = ServiceLocator.Default.ResolveType <IHashService>(); var logger = ServiceLocator.Default.ResolveType <ILoggerService>(); var typedict = new ConcurrentDictionary <string, IEnumerable <string> >(); // Parallel foreach (var ar in archives) { if (classinfo) { // using var mmf = MemoryMappedFile.CreateFromFile(ar.Filepath, FileMode.Open, // ar.Filepath.GetHashMD5(), 0, // MemoryMappedFileAccess.Read); var fileinfo = ar.Files.Values; var query = fileinfo.GroupBy( ext => Path.GetExtension(ext.FileName), file => file, (ext, finfo) => new { Key = ext, File = fileinfo.Where(_ => Path.GetExtension(_.FileName) == ext) }).ToList(); var total = query.Count; logger.LogString($"Exporting {total} bundle entries "); Thread.Sleep(1000); int progress = 0; logger.LogProgress(0); // foreach extension Parallel.ForEach(query, result => { if (!string.IsNullOrEmpty(result.Key)) { Parallel.ForEach(result.File, fi => { using var ms = new MemoryStream(); ar.CopyFileToStream(ms, fi.NameHash64, false); var cr2w = ModTools.TryReadCr2WFile(ms); if (cr2w == null) { return; } foreach (var o in cr2w.Chunks.Select(chunk => (chunk as CR2WExportWrapper).GetDumpObject(ms)) .Where(o => o != null)) { Register(o); } }); } Interlocked.Increment(ref progress); logger.LogProgress(progress / (float)total); logger.LogString($"Dumped extension {result.Key}", Logtype.Normal); }); } if (imports || texinfo) { // using var mmf = MemoryMappedFile.CreateFromFile(ar.Filepath, FileMode.Open, // ar.Filepath.GetHashMD5(), 0, // MemoryMappedFileAccess.Read); var fileDictionary = new ConcurrentDictionary <ulong, Cr2wChunkInfo>(); var texDictionary = new ConcurrentDictionary <ulong, Cr2wTextureInfo>(); // get info var count = ar.FileCount; logger.LogString($"Exporting {count} bundle entries "); Thread.Sleep(1000); int progress = 0; logger.LogProgress(0); Parallel.For(0, count, i => { var(hash, fileEntry) = ar.Files.ToList()[i]; var filename = string.IsNullOrEmpty(fileEntry.FileName) ? hash.ToString() : fileEntry.FileName; if (imports) { using var ms = new MemoryStream(); ar.CopyFileToStream(ms, fileEntry.NameHash64, false); var cr2w = ModTools.TryReadCr2WFileHeaders(ms); if (cr2w == null) { return; } var obj = new Cr2wChunkInfo { Filename = filename, Imports = cr2w.Imports }; fileDictionary.AddOrUpdate(hash, obj, (arg1, o) => obj); } if (texinfo) { if (!string.IsNullOrEmpty(fileEntry.FileName) && fileEntry.FileName.Contains(".xbm")) { using var ms = new MemoryStream(); ar.CopyFileToStream(ms, fileEntry.NameHash64, false); var cr2w = ModTools.TryReadCr2WFile(ms); if (cr2w?.Chunks.FirstOrDefault()?.data is not CBitmapTexture xbm || !(cr2w.Chunks[1]?.data is rendRenderTextureBlobPC blob)) { return; } // create dds header var texinfoObj = new Cr2wTextureInfo() { Filename = filename, width = blob.Header.SizeInfo.Width.Value, height = blob.Header.SizeInfo.Height.Value, mips = blob.Header.TextureInfo.MipCount.Value, slicecount = blob.Header.TextureInfo.SliceCount.Value, alignment = blob.Header.TextureInfo.DataAlignment.Value, compression = xbm.Setup.Compression, Group = xbm.Setup.Group, rawFormat = xbm.Setup.RawFormat, }; texDictionary.AddOrUpdate(hash, texinfoObj, (arg1, o) => texinfoObj); } } Interlocked.Increment(ref progress); logger.LogProgress(progress / (float)count); });