/// <summary> /// Uncooks a single file by hash. This will both extract and uncook the redengine file /// </summary> /// <param name="ar"></param> /// <param name="hash"></param> /// <param name="outDir"></param> /// <param name="uncookext"></param> /// <param name="flip"></param> /// <returns></returns> public static bool UncookSingle(this Archive.Archive ar, ulong hash, DirectoryInfo outDir, EUncookExtension uncookext = EUncookExtension.dds, bool flip = false) { // checks if (!ar.Files.ContainsKey(hash)) { return(false); } // extract the main file with uncompressed buffers #region unbundle main file using var ms = new MemoryStream(); ar.CopyFileToStream(ms, hash, false); var name = ar.Files[hash].FileName; var outfile = new FileInfo(Path.Combine(outDir.FullName, $"{name}")); if (outfile.Directory == null) { return(false); } Directory.CreateDirectory(outfile.Directory.FullName); using var fs = new FileStream(outfile.FullName, FileMode.Create, FileAccess.Write); ms.Seek(0, SeekOrigin.Begin); ms.CopyTo(fs); #endregion var ext = Path.GetExtension(name)[1..];
/// <summary> /// Extracts a single file + buffers. /// </summary> /// <param name="ar"></param> /// <param name="hash"></param> /// <param name="outDir"></param> /// <returns></returns> public static int ExtractSingle(this Archive.Archive ar, ulong hash, DirectoryInfo outDir) { if (!ar.Files.ContainsKey(hash)) { return(-1); } using var ms = new MemoryStream(); ar.CopyFileToStream(ms, hash, false); string name = ar.Files[hash].FileName; if (string.IsNullOrEmpty(Path.GetExtension(name))) { name += ".bin"; } var outfile = new FileInfo(Path.Combine(outDir.FullName, $"{name}")); if (outfile.Directory == null) { return(-1); } Directory.CreateDirectory(outfile.Directory.FullName); using var fs = new FileStream(outfile.FullName, FileMode.Create, FileAccess.Write); ms.Seek(0, SeekOrigin.Begin); ms.CopyTo(fs); return(1); }
private static int UncookSingleInner(this Archive.Archive ar, ulong hash, DirectoryInfo outDir, EUncookExtension uncookext = EUncookExtension.tga, bool flip = false) { // checks if (!ar.Files.ContainsKey(hash)) { return(-1); } var name = ar.Files[hash].FileName; var outfile = new FileInfo(Path.Combine(outDir.FullName, $"{name}")); if (outfile.Directory == null) { return(-1); } var uncooksuccess = false; var(file, buffers) = ar.GetFileData(hash, true); var cr2w = new CR2WFile(); using var ms = new MemoryStream(file); using var br = new BinaryReader(ms); cr2w.ReadImportsAndBuffers(br); var ext = Path.GetExtension(name)[1..];
/// <summary> /// Extracts all Files to the specified directory. /// </summary> /// <param name="outDir"></param> /// <param name="pattern"></param> /// <param name="regex"></param> /// <returns></returns> public static (List <string>, int) ExtractAll(this Archive.Archive ar, DirectoryInfo outDir, string pattern = "", string regex = "") { var logger = ServiceLocator.Default.ResolveType <ILoggerService>(); var extractedList = new ConcurrentBag <string>(); var failedList = new ConcurrentBag <string>(); // using var mmf = MemoryMappedFile.CreateFromFile(Filepath, FileMode.Open); // check search pattern then regex IEnumerable <ArchiveItem> finalmatches = ar.Files.Values; if (!string.IsNullOrEmpty(pattern)) { finalmatches = ar.Files.Values.MatchesWildcard(item => item.FileName, pattern); } if (!string.IsNullOrEmpty(regex)) { var searchTerm = new System.Text.RegularExpressions.Regex($@"{regex}"); var queryMatchingFiles = from file in finalmatches let matches = searchTerm.Matches(file.FileName) where matches.Count > 0 select file; finalmatches = queryMatchingFiles; } var finalMatchesList = finalmatches.ToList(); logger.LogString($"Found {finalMatchesList.Count} bundle entries to extract.", Logtype.Important); Thread.Sleep(1000); int progress = 0; logger.LogProgress(0); Parallel.ForEach(finalMatchesList, info => { var extracted = ar.ExtractSingleInner(info.NameHash64, outDir); if (extracted != 0) { extractedList.Add(info.FileName); } else { failedList.Add(info.FileName); } Interlocked.Increment(ref progress); logger.LogProgress(progress / (float)finalMatchesList.Count); }); return(extractedList.ToList(), finalMatchesList.Count); }
/// <summary> /// Creates and archive from a folder and packs all files inside into it /// </summary> /// <param name="infolder"></param> /// <param name="outpath"></param> /// <returns></returns> public static Archive.Archive Pack(DirectoryInfo infolder, DirectoryInfo outpath) { if (!infolder.Exists) { return(null); } if (!outpath.Exists) { return(null); } var outfile = Path.Combine(outpath.FullName, $"basegame_{infolder.Name}.archive"); var ar = new Archive.Archive { Filepath = outfile, Table = new ArTable() }; using var fs = new FileStream(outfile, FileMode.Create); using var bw = new BinaryWriter(fs); #region write header ar.Header.Write(bw); bw.Write(new byte[132]); // some weird padding #endregion #region write files var exludedExtensions = new[] { ".buffer", ".dds", ".DS_Store", //Hooray for OSX }; var allfiles = infolder.GetFiles("*", SearchOption.AllDirectories); var parentfiles = allfiles .Where(_ => exludedExtensions.All(x => _.Extension != x)); var fileInfos = parentfiles .OrderBy(_ => FNV1A64HashAlgorithm.HashString(GetRelpath(_))) .ToList(); string GetRelpath(FileInfo infi) => infi.FullName[(infolder.FullName.Length + 1)..];
private static int ExtractSingleInner(this Archive.Archive ar, ulong hash, DirectoryInfo outDir) { var extractsuccess = false; var(file, buffers) = ar.GetFileData(hash, false); if (!ar.Files.ContainsKey(hash)) { return(-1); } string name = ar.Files[hash].FileName; if (string.IsNullOrEmpty(Path.GetExtension(name))) { name += ".bin"; } var outfile = new FileInfo(Path.Combine(outDir.FullName, $"{name}")); if (outfile.Directory == null) { return(-1); } // write main file Directory.CreateDirectory(outfile.Directory.FullName); using var fs = new FileStream(outfile.FullName, FileMode.Create, FileAccess.Write); using var bw = new BinaryWriter(fs); bw.Write(file); extractsuccess = true; // write buffers // buffers are usually(?) appended to the main file // TODO: dump all buffered files and check this for (int j = 0; j < buffers.Count; j++) { var buffer = buffers[j]; bw.Write(buffer); extractsuccess = true; } return(extractsuccess ? 1 : 0); }
/// <summary> /// Creates and archive from a folder and packs all files inside into it /// </summary> /// <param name="infolder"></param> /// <param name="outpath"></param> /// <returns></returns> public static Archive.Archive Pack(DirectoryInfo infolder, DirectoryInfo outpath) { if (!infolder.Exists) { return(null); } if (!outpath.Exists) { return(null); } var outfile = Path.Combine(outpath.FullName, $"basegame_{infolder.Name}.archive"); var ar = new Archive.Archive { ArchiveAbsolutePath = outfile, Index = new Index() }; using var fs = new FileStream(outfile, FileMode.Create); using var bw = new BinaryWriter(fs); #region write header ar.Header.Write(bw); bw.Write(new byte[132]); // some weird padding #endregion #region write files var exludedExtensions = new[] { ".buffer", ".dds", ".DS_Store", //Hooray for OSX }; var allfiles = infolder.GetFiles("*", SearchOption.AllDirectories); var parentfiles = allfiles .Where(_ => exludedExtensions.All(x => _.Extension.ToLower() != x)); var fileInfos = parentfiles .OrderBy(_ => FNV1A64HashAlgorithm.HashString(_.FullName.RelativePath(infolder))) .ToList(); Logger.LogString($"Found {fileInfos.Count} bundle entries to pack.", Logtype.Important); Thread.Sleep(1000); int progress = 0; Logger.LogProgress(0); foreach (var fileInfo in fileInfos) { var relpath = fileInfo.FullName.RelativePath(infolder); var hash = FNV1A64HashAlgorithm.HashString(relpath); if (fileInfo.Extension.ToLower() == ".bin") { hash = ulong.Parse(Path.GetFileNameWithoutExtension(relpath)); } using var fileStream = new FileStream(fileInfo.FullName, FileMode.Open); using var fileBinaryReader = new BinaryReader(fileStream); // fileinfo data uint firstimportidx = (uint)ar.Index.Dependencies.Count; uint lastimportidx = (uint)ar.Index.Dependencies.Count; uint firstoffsetidx = (uint)ar.Index.FileSegments.Count; uint lastoffsetidx = (uint)ar.Index.FileSegments.Count; int flags = 0; var cr2w = ModTools.TryReadCr2WFileHeaders(fileBinaryReader); if (cr2w != null) { //register imports foreach (var cr2WImportWrapper in cr2w.Imports) { if (!ar.Index.Dependencies.Select(_ => _.HashStr).Contains(cr2WImportWrapper.DepotPathStr)) { ar.Index.Dependencies.Add( new Dependency(FNV1A64HashAlgorithm.HashString(cr2WImportWrapper.DepotPathStr))); } } lastimportidx = (uint)ar.Index.Dependencies.Count; // kraken the file and write var cr2wfilesize = (int)cr2w.Header.objectsEnd; fileBinaryReader.BaseStream.Seek(0, SeekOrigin.Begin); var cr2winbuffer = fileBinaryReader.ReadBytes(cr2wfilesize); var offset = bw.BaseStream.Position; var(zsize, crc) = bw.CompressAndWrite(cr2winbuffer); ar.Index.FileSegments.Add(new FileSegment( (ulong)offset, zsize, (uint)cr2winbuffer.Length)); // HINT: each cr2w needs to have the buffer already kraken'd // foreach buffer write var bufferOffsets = cr2w.Buffers.Select(_ => _.Buffer); foreach (var buffer in bufferOffsets) { var bsize = buffer.memSize; var bzsize = buffer.diskSize; //compressed size of the buffer inside the cr2wfile fileBinaryReader.BaseStream.Seek(buffer.offset, SeekOrigin.Begin); var b = fileBinaryReader.ReadBytes((int)bzsize); //read bzsize bytes from the cr2w var boffset = bw.BaseStream.Position; bw.Write(b); ar.Index.FileSegments.Add(new FileSegment( (ulong)boffset, bzsize, bsize)); } lastoffsetidx = (uint)ar.Index.FileSegments.Count; flags = cr2w.Buffers.Count > 0 ? cr2w.Buffers.Count - 1 : 0; } else { // kraken the file and write fileStream.Seek(0, SeekOrigin.Begin); var cr2winbuffer = Catel.IO.StreamExtensions.ToByteArray(fileStream); var(zsize, crc) = bw.CompressAndWrite(cr2winbuffer); ar.Index.FileSegments.Add(new FileSegment((ulong)bw.BaseStream.Position, zsize, (uint)cr2winbuffer.Length)); } // save table data var sha1 = new System.Security.Cryptography.SHA1Managed(); var sha1hash = sha1.ComputeHash(Catel.IO.StreamExtensions.ToByteArray(fileBinaryReader.BaseStream)); //TODO: this is only correct for files with no buffer var item = new FileEntry(hash, DateTime.Now, (uint)flags , firstoffsetidx, lastoffsetidx, firstimportidx, lastimportidx , sha1hash); ar.Index.FileEntries.Add(hash, item); Interlocked.Increment(ref progress); Logger.LogProgress(progress / (float)fileInfos.Count); } ; #endregion #region write footer // padding to page (4096 bytes) bw.PadUntilPage(); // write tables var tableoffset = bw.BaseStream.Position; ar.Index.Write(bw); var tablesize = bw.BaseStream.Position - tableoffset; // padding to page (4096 bytes) bw.PadUntilPage(); var filesize = bw.BaseStream.Position; // write the header again ar.Header.IndexPosition = (ulong)tableoffset; ar.Header.IndexSize = (uint)tablesize; ar.Header.Filesize = (ulong)filesize; bw.BaseStream.Seek(0, SeekOrigin.Begin); ar.Header.Write(bw); #endregion return(ar); #region Local Functions #endregion }
/// <summary> /// Extracts a single file + buffers. /// </summary> /// <param name="ar"></param> /// <param name="hash"></param> /// <param name="outDir"></param> /// <returns></returns> public static int ExtractSingle(this Archive.Archive ar, ulong hash, DirectoryInfo outDir) { // using var mmf = MemoryMappedFile.CreateFromFile(Filepath, FileMode.Open); return(ar.ExtractSingleInner(hash, outDir)); }
/// <summary> /// Uncooks a single file by hash. /// </summary> /// <param name="ar"></param> /// <param name="hash"></param> /// <param name="outDir"></param> /// <param name="uncookext"></param> /// <param name="flip"></param> /// <returns></returns> public static int UncookSingle(this Archive.Archive ar, ulong hash, DirectoryInfo outDir, EUncookExtension uncookext = EUncookExtension.tga, bool flip = false) { // using var mmf = MemoryMappedFile.CreateFromFile(Filepath, FileMode.Open); return(ar.UncookSingleInner(hash, outDir, uncookext, flip)); }