private void WriteStrings(Stream output, IList <IArchiveFileInfo> afis) { var directories = new HashSet <string>(); var files = new List <string>(afis.Count); foreach (var file in OrderFiles(afis)) { directories.Add(file.FilePath.GetDirectory().ToRelative().FullName + "/"); files.Add(file.FilePath.GetName()); } var strings = new MemoryStream(); using var bw = new BinaryWriterX(strings, true); foreach (var s in directories) { bw.WriteString(s, Encoding.ASCII, false); } foreach (var s in files) { bw.WriteString(s, Encoding.ASCII, false); } var compStrings = new MemoryStream(); XfsaSupport.Compress(strings, compStrings, Level5CompressionMethod.Lz10); compStrings.CopyTo(output); while (output.Position % 4 > 0) { output.WriteByte(0); } }
private void WriteFileTable(Stream output, IList <IArchiveFileInfo> files) { var crc16 = Crc16.X25; var fileEntries = new List <GfsaFileEntry>(); var fileOffset = 0; foreach (var fileGroup in OrderFiles(files).Cast <GfsaArchiveFileInfo>().GroupBy(x => x.FilePath.GetDirectory().ToRelative() + "/")) { var localGroup = fileGroup.Select(file => { var entry = new GfsaFileEntry { hash = BinaryPrimitives.ReadUInt16BigEndian(crc16.Compute(Encoding.ASCII.GetBytes(file.FilePath.GetName()))), Offset = fileOffset, Size = (int)file.CompressedSize }; fileOffset += (int)((file.CompressedSize + 3) & ~3); return(entry); }); fileEntries.AddRange(localGroup.OrderBy(x => x.hash)); } XfsaSupport.WriteCompressedTableEntries(output, fileEntries); while (output.Position % 4 > 0) { output.WriteByte(0); } }
private void WriteDirectoryTable(Stream output, IList <IArchiveFileInfo> files) { var crc16 = Crc16.X25; var directoryEntries = new List <GfsaDirectoryEntry>(); var fileIndex = 0; foreach (var group in OrderFiles(files).GroupBy(x => x.FilePath.GetDirectory().ToRelative() + "/")) { directoryEntries.Add(new GfsaDirectoryEntry { hash = BinaryPrimitives.ReadUInt16BigEndian(crc16.Compute(Encoding.ASCII.GetBytes(group.Key))), fileIndex = fileIndex, fileCount = (short)group.Count() }); fileIndex += directoryEntries.Last().fileCount; } XfsaSupport.WriteCompressedTableEntries(output, directoryEntries.OrderBy(x => x.hash)); while (output.Position % 4 > 0) { output.WriteByte(0); } }
public void Save(Stream output, IList <IArchiveFileInfo> files, IProgressContext progress) { // Group files by directory var castedFiles = files.Cast <XfsaArchiveFileInfo <Xfsa2FileEntry> >(); // Build directory, file, and name tables BuildTables(castedFiles, out var directoryEntries, out var directoryHashes, out var fileEntries, out var nameStream); // -- Write file -- using var bw = new BinaryWriterX(output); bw.BaseStream.Position = _headerSize; // Write directory entries _header.directoryEntriesCount = (short)directoryEntries.Count; _header.directoryEntriesOffset = _headerSize; XfsaSupport.WriteCompressedTableEntries(bw.BaseStream, directoryEntries); bw.WriteAlignment(4); // Write directory hashes _header.directoryHashCount = (short)directoryHashes.Count; _header.directoryHashOffset = (int)bw.BaseStream.Position; XfsaSupport.WriteCompressedTableEntries(bw.BaseStream, directoryHashes); bw.WriteAlignment(4); // Write file entry hashes _header.fileEntriesCount = fileEntries.Count; _header.fileEntriesOffset = (int)bw.BaseStream.Position; XfsaSupport.WriteCompressedTableEntries(bw.BaseStream, fileEntries.Select(x => x.Entry)); bw.WriteAlignment(4); // Write name table _header.nameOffset = (int)bw.BaseStream.Position; var nameStreamComp = new MemoryStream(); XfsaSupport.Compress(nameStream, nameStreamComp, Level5CompressionMethod.Lz10); nameStreamComp.CopyTo(bw.BaseStream); bw.WriteAlignment(4); // Write file data _header.dataOffset = (int)bw.BaseStream.Position; foreach (var fileEntry in fileEntries) { bw.BaseStream.Position = _header.dataOffset + fileEntry.Entry.FileOffset; fileEntry.SaveFileData(bw.BaseStream, null); } // Write header bw.BaseStream.Position = 0; bw.WriteType(_header); }
public IList <IArchiveFileInfo> Load(Stream input) { using var br = new BinaryReaderX(input, true); // Header _header = br.ReadType <XfsaHeader>(); // Read directory entries var directoryEntries = XfsaSupport.ReadCompressedTableEntries <Xfsa1DirectoryEntry>(input, _header.directoryEntriesOffset, _header.directoryHashOffset - _header.directoryEntriesOffset, _header.directoryEntriesCount); // Read directory hashes var directoryHashes = XfsaSupport.ReadCompressedTableEntries <uint>(input, _header.directoryHashOffset, _header.fileEntriesOffset - _header.directoryHashOffset, _header.directoryHashCount); // Read file entry table var entries = XfsaSupport.ReadCompressedTableEntries <Xfsa1FileEntry>(input, _header.fileEntriesOffset, _header.nameOffset - _header.fileEntriesOffset, _header.fileEntriesCount); // Read nameTable var nameComp = new SubStream(input, _header.nameOffset, _header.dataOffset - _header.nameOffset); var nameStream = new MemoryStream(); Level5Compressor.Decompress(nameComp, nameStream); // Add Files var names = new BinaryReaderX(nameStream); var result = new List <IArchiveFileInfo>(); foreach (var directory in directoryEntries) { names.BaseStream.Position = directory.DirectoryNameOffset; var directoryName = names.ReadCStringSJIS(); var filesInDirectory = entries.Skip(directory.firstFileIndex).Take(directory.FileCount); foreach (var file in filesInDirectory) { var fileStream = new SubStream(input, _header.dataOffset + file.FileOffset, file.FileSize); names.BaseStream.Position = directory.FileNameStartOffset + file.NameOffset; var fileName = names.ReadCStringSJIS(); result.Add(new XfsaArchiveFileInfo <Xfsa1FileEntry>(fileStream, directoryName + fileName, file) { PluginIds = XfsaSupport.RetrievePluginMapping(fileStream, fileName) }); } } return(result); }
public IList <IArchiveFileInfo> Load(Stream input) { using var br = new BinaryReaderX(input, true); // Read header _header = br.ReadType <GfsaHeader>(); // Read tables var directoryEntries = XfsaSupport.ReadCompressedTableEntries <GfsaDirectoryEntry>(input, _header.directoryOffset, _header.fileOffset - _header.directoryOffset, _header.directoryCount); var fileEntries = XfsaSupport.ReadCompressedTableEntries <GfsaFileEntry>(input, _header.fileOffset, _header.unkOffset - _header.fileOffset, _header.fileCount); input.Position = _header.unkOffset; _unkTable = br.ReadBytes(_header.stringOffset - _header.unkOffset); // Read strings var nameComp = new SubStream(input, _header.stringOffset, _header.fileDataOffset - _header.stringOffset); var nameStream = new MemoryStream(); Level5Compressor.Decompress(nameComp, nameStream); var(directories, files) = ReadStrings(nameStream); // Add files var result = new List <IArchiveFileInfo>(); foreach (var dirEntry in directoryEntries) { var dirName = directories.FirstOrDefault(x => x.Hash == dirEntry.hash)?.Value; for (var fileIndex = dirEntry.fileIndex; fileIndex < dirEntry.fileIndex + dirEntry.fileCount; fileIndex++) { var fileEntry = fileEntries[fileIndex]; var fileName = files.Skip(dirEntry.fileIndex).Take(dirEntry.fileCount).FirstOrDefault(x => x.Hash == fileEntry.hash)?.Value; var fileData = new SubStream(input, _header.fileDataOffset + fileEntry.Offset, fileEntry.Size); result.Add(CreateAfi(fileData, Path.Combine(dirName, fileName), fileEntry)); } } return(result); }