public static int Cr2wTask(string path, bool all, bool chunks) { // initial checks var inputFileInfo = new FileInfo(path); if (!inputFileInfo.Exists) { return(0); } var f = File.ReadAllBytes(inputFileInfo.FullName); var cr2w = new CR2WFile(); try { using var ms = new MemoryStream(f); using var br = new BinaryReader(ms); cr2w.ReadImportsAndBuffers(br); var obj = new Cr2wDumpObject(); obj.Filename = inputFileInfo.FullName.ToString(); if (all) { obj.Stringdict = cr2w.StringDictionary; obj.Imports = cr2w.Imports; obj.Buffers = cr2w.Buffers; } if (chunks || all) { obj.Chunks = cr2w.Chunks; foreach (var chunk in cr2w.Chunks) { obj.ChunkData.Add(chunk.GetDumpObject(br)); } } //dump texture var joptions = new JsonSerializerOptions { WriteIndented = true }; var jsonstring = JsonSerializer.Serialize(obj, joptions); File.WriteAllText($"{inputFileInfo.FullName}.dump.json", jsonstring); Console.WriteLine($"Finished. Dump file written to {inputFileInfo.FullName}."); } catch (Exception e) { Console.WriteLine(e); throw; } return(1); }
public static int DumpTask(string path /*, bool all, bool strings*/, bool imports /*, bool buffers, bool chunks*/) { // initial checks var isDirectory = false; var inputFileInfo = new FileInfo(path); var inputDirInfo = new DirectoryInfo(path); if (!inputFileInfo.Exists) { if (!inputDirInfo.Exists) { return(0); } else { isDirectory = true; } } var archives = new List <Archive>(); if (isDirectory) { archives.AddRange(inputDirInfo .GetFiles("*.archive", SearchOption.AllDirectories) .Select(_ => new Archive(_.FullName))); } else { archives.Add(new Archive(inputFileInfo.FullName)); } if (imports /*|| strings || buffers || chunks || all*/) { foreach (var ar in archives) { using var mmf = MemoryMappedFile.CreateFromFile(ar._filepath, FileMode.Open, ar._filepath.GetHashMD5(), 0, MemoryMappedFileAccess.Read); var fileDictionary = new ConcurrentDictionary <ulong, Cr2wDumpObject>(); foreach (var(key, value) in ar.HashDictionary) { fileDictionary[key] = new Cr2wDumpObject(); } using var pb = new ProgressBar(); using var p1 = pb.Progress.Fork(); int progress = 0; var count = ar.HashDictionary.Count; Parallel.For(0, count, new ParallelOptions { MaxDegreeOfParallelism = 8 }, i => { var key = ar.GetHashFromIndex(i); var f = ar.GetFileData((int)i, mmf); // check if cr2w file if (f.Length < 4) { return; } var id = f.Take(4); if (!id.SequenceEqual(MAGIC)) { return; } else { } try { var cr2w = new CR2WFile(); using var ms = new MemoryStream(f); using var br = new BinaryReader(ms); cr2w.ReadImportsAndBuffers(br); var obj = new Cr2wDumpObject { Filename = key.ToString() }; //if (strings || all) // obj.Stringdict = cr2w.StringDictionary; if (imports /*|| all*/) { obj.Imports = cr2w.Imports; } //if (buffers || all) // obj.Buffers = cr2w.Buffers; //if (chunks || all) // obj.Chunks = cr2w.Chunks; //fileDictionary[key] = obj; fileDictionary.AddOrUpdate(key, obj, (arg1, o) => obj); } catch (Exception e) { Console.WriteLine($"Could not read file {key}."); //throw; return; } finally { progress += 1; var perc = progress / (double)count; p1.Report(perc, $"Loading bundle entries: {progress}/{count}"); } }); var arobj = new ArchiveDumpObject() { Filename = ar._filepath, FileDictionary = fileDictionary }; // write using var writer = File.CreateText($"{ar._filepath}.imports.txt"); using var hwriter = File.CreateText($"{ar._filepath}.hashes.csv"); hwriter.WriteLine("String,Hash"); List <string> allimports = new List <string>(); foreach (var(key, value) in arobj.FileDictionary) { if (value.Imports == null) { continue; } foreach (var import in value.Imports) { allimports.Add(import.DepotPathStr); } } foreach (var str in allimports.Distinct()) { writer.WriteLine(str); var hash = FNV1A64HashAlgorithm.HashString(str); hwriter.WriteLine($"{str},{hash}"); } Console.WriteLine($"Finished. Dump file written to {ar._filepath}."); } } return(1); }
public static int DumpTask(string path, bool imports, bool missinghashes) { #region checks var isDirectory = false; var inputFileInfo = new FileInfo(path); var inputDirInfo = new DirectoryInfo(path); if (!inputFileInfo.Exists) { if (!inputDirInfo.Exists) { return(0); } else { isDirectory = true; } } var archives = new List <Archive>(); if (isDirectory) { archives.AddRange(inputDirInfo .GetFiles("*.archive", SearchOption.AllDirectories) .Select(_ => new Archive(_.FullName))); } else { archives.Add(new Archive(inputFileInfo.FullName)); } #endregion var missinghashtxt = isDirectory ? Path.Combine(inputDirInfo.FullName, "missinghashes.txt") : $"{inputFileInfo.FullName}.missinghashes.txt"; using var mwriter = File.CreateText(missinghashtxt); foreach (var ar in archives) { using var pb = new ProgressBar(); if (imports) { using var mmf = MemoryMappedFile.CreateFromFile(ar.Filepath, FileMode.Open, ar.Filepath.GetHashMD5(), 0, MemoryMappedFileAccess.Read); using var p1 = pb.Progress.Fork(); int progress = 0; var fileDictionary = new ConcurrentDictionary <ulong, Cr2wDumpObject>(); foreach (var(hash, value) in ar.Files) { fileDictionary[hash] = new Cr2wDumpObject(); } // get info var count = ar.FileCount; Parallel.For(0, count, new ParallelOptions { MaxDegreeOfParallelism = 8 }, i => { var entry = ar.Files.ToList()[i]; var hash = entry.Key; var(f, buffers) = ar.GetFileData(hash, mmf); // check if cr2w file if (f.Length < 4) { return; } var id = f.Take(4); if (!id.SequenceEqual(MAGIC)) { return; } var cr2w = new CR2WFile(); using var ms = new MemoryStream(f); using var br = new BinaryReader(ms); cr2w.ReadImportsAndBuffers(br); var obj = new Cr2wDumpObject { Filename = hash.ToString(), Imports = cr2w.Imports }; fileDictionary.AddOrUpdate(hash, obj, (arg1, o) => obj); progress += 1; var perc = progress / (double)count; p1.Report(perc, $"Loading bundle entries: {progress}/{count}"); }); // write var arobj = new ArchiveDumpObject() { Filename = ar.Filepath, FileDictionary = fileDictionary }; using var writer = File.CreateText($"{ar.Filepath}.imports.txt"); using var hwriter = File.CreateText($"{ar.Filepath}.hashes.csv"); hwriter.WriteLine("String,Hash"); List <string> allimports = new List <string>(); foreach (var(key, value) in arobj.FileDictionary) { if (value.Imports == null) { continue; } allimports.AddRange(value.Imports.Select(import => import.DepotPathStr)); } foreach (var str in allimports.Distinct()) { writer.WriteLine(str); var hash = FNV1A64HashAlgorithm.HashString(str); hwriter.WriteLine($"{str},{hash}"); } Console.WriteLine($"Finished. Dump file written to {ar.Filepath}."); } if (missinghashes) { foreach (var(hash, fileInfoEntry) in ar.Files) { if (fileInfoEntry.NameStr == hash.ToString()) { mwriter.WriteLine(hash); } } } } return(1); }