private static DataStream CompressV2(DataStream inputDataStream) { var input = new byte[inputDataStream.Length]; inputDataStream.Read(input, 0, input.Length); DataStream outputDataStream = DataStreamFactory.FromMemory(); var writer = new DataWriter(outputDataStream) { Endianness = EndiannessMode.BigEndian, }; int currentPosition = 0; while (currentPosition < input.Length) { int decompressedChunkSize = Math.Min(input.Length - currentPosition, 0x10000); var decompressedData = new byte[decompressedChunkSize]; Array.Copy(input, currentPosition, decompressedData, 0, decompressedChunkSize); byte[] compressedData = ZlibCompress(decompressedData); int compressedDataLength = compressedData.Length + 5; writer.Write((byte)(compressedDataLength >> 16)); writer.Write((byte)(compressedDataLength >> 8)); writer.Write((byte)compressedDataLength); writer.Write((ushort)(decompressedChunkSize - 1)); writer.Write(compressedData); writer.WriteTimes(0, 5); currentPosition += decompressedChunkSize; } return(outputDataStream); }
public void CreateFromMemoryAllowToExpand() { var stream = DataStreamFactory.FromMemory(); Assert.That(() => stream.WriteByte(0xFE), Throws.Nothing); stream.Dispose(); }
private static DataStream Inflate(DataStream source, EndiannessMode endianness) { DataStream dest = DataStreamFactory.FromMemory(); source.Seek(0); var reader = new DataReader(source) { Endianness = endianness, }; int size = reader.ReadInt32(); int chunkSize = reader.ReadInt32(); while (chunkSize != 0) { using var zlibStream = new ZlibStream(dest, CompressionMode.Decompress, true); source.WriteSegmentTo(source.Position, chunkSize, zlibStream); zlibStream.Close(); source.Seek(chunkSize, SeekOrigin.Current); chunkSize = reader.ReadInt32(); } if (dest.Length != size) { throw new ExtractionException("Result size doesn't match with expected size."); } return(dest); }
private static void Decrypt(Options.Decrypt opts) { WriteHeader(); if (!File.Exists(opts.InputFile)) { Console.WriteLine($"ERROR: \"{opts.InputFile}\" not found!!!!"); return; } if (File.Exists(opts.OutputFile)) { Console.WriteLine($"WARNING: \"{opts.OutputFile}\" already exists. It will be overwritten."); Console.Write("Continue? (y/N) "); string answer = Console.ReadLine(); if (!string.IsNullOrEmpty(answer) && answer.ToUpperInvariant() != "Y") { Console.WriteLine("CANCELLED BY USER."); return; } } // Encryption uses 2 keys: // 1. First bytes in file, XORed with input file name without extension in upper case // 2. Fixed byte array. string fileWithoutExtension = Path.GetFileNameWithoutExtension(opts.InputFile).ToUpperInvariant(); using DataStream input = DataStreamFactory.FromFile(opts.InputFile, FileOpenMode.Read); using DataStream output = DataStreamFactory.FromMemory(); Console.Write($"Decrypting '{opts.InputFile}'..."); Decrypt(input, output, fileWithoutExtension); output.WriteTo(opts.OutputFile); Console.WriteLine(" DONE!"); }
public void CreateFromMemoryUseMemoryStream() { var dataStream = DataStreamFactory.FromMemory(); Assert.That(dataStream.BaseStream, Is.AssignableFrom <RecyclableMemoryStream>()); dataStream.Dispose(); }
/// <summary> /// Decompresses an ARC container. /// </summary> /// <param name="source">Source format.</param> /// <returns>The uncompressed format.</returns> public BinaryFormat Convert(BinaryFormat source) { if (source == null) { throw new ArgumentNullException(nameof(source)); } source.Stream.Position = 0; Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); var reader = new DataReader(source.Stream) { DefaultEncoding = Encoding.GetEncoding(1252), Endianness = EndiannessMode.LittleEndian, }; // Read the file header ArcHeader header = reader.Read <ArcHeader>() as ArcHeader; this.CheckHeader(header); switch (header.CompressionType) { case 0x00: // Not compressed return(source); case 0x02: // LZ4 Compression { DataStream outputStream = DataStreamFactory.FromMemory(); var writer = new DataWriter(outputStream) { DefaultEncoding = Encoding.GetEncoding(1252), Endianness = EndiannessMode.LittleEndian, }; header.CompressionType = 0x00; writer.WriteOfType(header); byte[] filesInfo = reader.ReadBytes(0x70 * header.FileCount); byte[] compressedData = reader.ReadBytes((int)(source.Stream.Length - source.Stream.Position)); var decompressedData = new byte[header.OriginalSize]; int bytesWritten = LZ4Codec.Decode(compressedData, decompressedData); if (bytesWritten != header.OriginalSize) { throw new FormatException($"ARC: Bad LZ4 compression."); } writer.Write(filesInfo); writer.Write(decompressedData); return(new BinaryFormat(outputStream)); } default: throw new FormatException($"ARC: Unknown compression {header.CompressionType:X4}"); } }
private static DataStream Compress(DataStream inputDataStream, CompressorParameters parameters) { DataStream outputDataStream = DataStreamFactory.FromMemory(); var writer = new DataWriter(outputDataStream) { DefaultEncoding = Encoding.ASCII, }; if (parameters == null) { parameters = new CompressorParameters { Version = 0x01, Endianness = 0x00, }; } writer.Endianness = parameters.Endianness == 0 ? EndiannessMode.LittleEndian : EndiannessMode.BigEndian; writer.Write("SLLZ", false); writer.Write(parameters.Endianness); writer.Write(parameters.Version); writer.Write((ushort)0x10); // Header size writer.Write((int)inputDataStream.Length); writer.Stream.PushCurrentPosition(); writer.Write(0x00000000); // Compressed size DataStream compressedDataStream; if (parameters.Version == 1) { compressedDataStream = CompressV1(inputDataStream); } else if (parameters.Version == 2) { if (inputDataStream.Length < 0x1B) { throw new FormatException($"SLLZv2: Input size must more than 0x1A."); } compressedDataStream = CompressV2(inputDataStream); } else { throw new FormatException($"SLLZ: Unknown compression version {parameters.Version}."); } compressedDataStream.WriteTo(outputDataStream); writer.Stream.PopPosition(); writer.Write((int)(compressedDataStream.Length + 0x10)); // data + header compressedDataStream.Dispose(); return(outputDataStream); }
/// <summary> /// Serializes a DLC BIN archive. /// </summary> /// <param name="source">Collection of files as NodeContainerFormat.</param> /// <returns>The BinaryFormat.</returns> public BinaryFormat Convert(NodeContainerFormat source) { if (source == null) { throw new ArgumentNullException(nameof(source)); } if (source.Root.Children.Count == 0) { throw new FormatException("No files selected."); } _params.Stream ??= DataStreamFactory.FromMemory(); _params.Stream.Seek(0); var writer = new DataWriter(_params.Stream) { Endianness = _params.Endianness, }; BinFileHeader header = new () { MagicNumber = 0x00000064, FileCount = source.Root.Children.Count, BlockSize = 0, Padding = 0, }; writer.WriteOfType(header); const int headerSize = 0x10 + 0x80 + 0x80; writer.Stream.SetLength(headerSize); for (int i = 0; i < source.Root.Children.Count; i++) { Node node = source.Root.Children[i]; writer.Stream.Seek(0, SeekOrigin.End); long offset = writer.Stream.Position; writer.Stream.Seek(0x10 + (0x04 * i), SeekOrigin.Begin); writer.Write((int)offset); writer.Stream.Seek(0x90 + (0x04 * i), SeekOrigin.Begin); writer.Write((int)node.Stream.Length); // size writer.Stream.Seek(0, SeekOrigin.End); node.Stream.WriteTo(writer.Stream); writer.WritePadding(0x00, 0x10); } return(new BinaryFormat(_params.Stream)); }
public BinaryFormat Convert(BinaryFormat source) { if (source == null) { throw new ArgumentNullException(nameof(source)); } source.Stream.Position = 0; var reader = new DataReader(source.Stream) { DefaultEncoding = Encoding.ASCII, Endianness = EndiannessMode.LittleEndian, }; source.Stream.Seek(0x18, System.IO.SeekOrigin.Begin); long dataSize = reader.ReadInt64(); source.Stream.Seek(0x40, System.IO.SeekOrigin.Begin); uint width = reader.ReadUInt32(); uint height = reader.ReadUInt32(); source.Stream.Seek(0x70, System.IO.SeekOrigin.Begin); uint numMipmaps = reader.ReadUInt32(); source.Stream.Seek(0x80, System.IO.SeekOrigin.Begin); long dataOffset = reader.ReadInt64() + 0x80; source.Stream.Seek(dataOffset, System.IO.SeekOrigin.Begin); byte[] data = reader.ReadBytes((int)(dataSize - dataOffset + 0x30)); DataStream outputDataStream = DataStreamFactory.FromMemory(); var writer = new DataWriter(outputDataStream) { DefaultEncoding = Encoding.ASCII, Endianness = EndiannessMode.LittleEndian, }; writer.Write(DdsHeader); writer.Write(data); writer.Stream.Seek(0x0C, System.IO.SeekOrigin.Begin); writer.Write(height); writer.Write(width); writer.Write(height / 4 * width / 4 * 8); writer.Stream.Seek(0x1C, System.IO.SeekOrigin.Begin); writer.Write(numMipmaps); return(new BinaryFormat(outputDataStream)); }
/// <summary> /// Converts an Armp into a Binary Format. /// </summary> /// <param name="source">Input format.</param> /// <returns>The binary format.</returns> /// <exception cref="ArgumentNullException">Thrown if source is null.</exception> public BinaryFormat Convert(ArmpTable source) { if (source == null) { throw new ArgumentNullException(nameof(source)); } DataStream stream = DataStreamFactory.FromMemory(); var writer = new DataWriter(stream) { DefaultEncoding = Encoding.UTF8, Endianness = EndiannessMode.LittleEndian, }; var header = new FileHeader { Magic = "armp", PlatformId = 0, Endianness = Endianness.LittleEndian, SizeExtended = 0, Relocated = 0, Version = 0x0001000C, Size = 0, }; writer.WriteOfType(header); writer.WriteTimes(0x00, 0x10); // Main table pointer (byte[] tableData, int tableOffset) = SerializeTable(source, 0x20); writer.Write(tableData); writer.WritePadding(0x00, 16); _ = writer.Stream.Seek(0x10, System.IO.SeekOrigin.Begin); writer.Write(tableOffset); return(new BinaryFormat(stream)); }
private static DataStream Deflate(DataStream source, EndiannessMode endianness) { DataStream dest = DataStreamFactory.FromMemory(); var writer = new DataWriter(dest) { Endianness = endianness, }; source.Seek(0); writer.Write((int)source.Length); int remaining = (int)source.Length; while (remaining > 0) { int chunkSize = Math.Min(remaining, 0x8000); long lengthPos = dest.Position; writer.Write(0); // size placeholder long startDataPos = dest.Position; using var zlibStream = new ZlibStream(dest, CompressionMode.Compress, CompressionLevel.BestCompression, true); source.WriteSegmentTo(source.Position, chunkSize, zlibStream); zlibStream.Close(); long compressedChunkSize = dest.Position - startDataPos; dest.PushToPosition(lengthPos); writer.Write((int)compressedChunkSize); dest.PopPosition(); source.Seek(chunkSize, SeekOrigin.Current); remaining -= chunkSize; } writer.Write(0); return(dest); }
public BinaryFormat Convert(BMFont source) { if (source == null) { throw new ArgumentNullException(nameof(source)); } DataStream dataStream = DataStreamFactory.FromMemory(); var writer = new TextDataWriter(dataStream, Encoding.UTF8); writer.WriteLine("# AngelCode Bitmap Font Generator configuration file"); writer.WriteLine("fileVersion=1"); writer.WriteLine(); WriteInfoBlock(source.Info, writer); WriteCommonBlock(source.Common, writer); WriteCharsBlock(source.Chars, writer); var result = new BinaryFormat(dataStream); return(result); }
public ParFile Convert(NodeContainerFormat source) { if (source == null) { throw new ArgumentNullException(nameof(source)); } Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); DataStream dataStream = string.IsNullOrEmpty(this.parameters.OutputPath) ? DataStreamFactory.FromMemory() : DataStreamFactory.FromFile(this.parameters.OutputPath, FileOpenMode.Write); var writer = new DataWriter(dataStream) { DefaultEncoding = Encoding.GetEncoding(1252), Endianness = EndiannessMode.BigEndian, }; var folders = new List <Node>(); var files = new List <Node>(); if (this.parameters.IncludeDots) { var parFolderRootNode = new Node(".", new NodeContainerFormat()); source.MoveChildrenTo(parFolderRootNode); folders.Add(parFolderRootNode); } GetFoldersAndFiles(source.Root, folders, files, this.parameters); CompressFiles(files, this.parameters.CompressorVersion); int headerSize = 32 + (64 * folders.Count) + (64 * files.Count); int folderTableOffset = headerSize; int fileTableOffset = folderTableOffset + (folders.Count * 32); long dataPosition = fileTableOffset + (files.Count * 32); dataPosition = Align(dataPosition, 2048); writer.Write("PARC", 4, false); if (source.Root.Tags.ContainsKey("PlatformId")) { writer.Write((byte)source.Root.Tags["PlatformId"]); } else { writer.Write((byte)0x02); } if (source.Root.Tags.ContainsKey("Endianness")) { var endianness = (byte)source.Root.Tags["Endianness"]; writer.Write(endianness); writer.Endianness = endianness == 0x00 ? EndiannessMode.LittleEndian : EndiannessMode.BigEndian; } else { writer.Write((byte)0x01); } writer.Write((ushort)0x0000); // extended size and relocated if (source.Root.Tags.ContainsKey("Version")) { writer.Write((int)source.Root.Tags["Version"]); } else { writer.Write(0x00020001); } writer.Write(0x00000000); // data size writer.Write(folders.Count); writer.Write(folderTableOffset); writer.Write(files.Count); writer.Write(fileTableOffset); WriteNames(writer, folders); WriteNames(writer, files); WriteFolders(writer, folders); WriteFiles(writer, files, dataPosition); dataStream.Seek(0, SeekMode.End); writer.WritePadding(0, 2048); var result = new ParFile(dataStream) { CanBeCompressed = false, }; return(result); }
/// <summary> /// Converts a NodeContainerFormat into a BinaryFormat. /// </summary> /// <param name="source">Input format.</param> /// <returns>The node container format.</returns> /// <exception cref="ArgumentNullException">Thrown if source is null.</exception> public virtual BinaryFormat Convert(NodeContainerFormat source) { if (source == null) { throw new ArgumentNullException(nameof(source)); } // Reorder nodes source.Root.SortChildren((x, y) => string.CompareOrdinal(x.Name.ToLowerInvariant(), y.Name.ToLowerInvariant())); // Fill node indexes FillNodeIndexes(source.Root); Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); DataStream stream = _writerParameters.OutputStream ?? DataStreamFactory.FromMemory(); stream.Position = 0; var writer = new DataWriter(stream) { DefaultEncoding = Encoding.GetEncoding(1252), Endianness = _writerParameters.Endianness == Endianness.LittleEndian ? EndiannessMode.LittleEndian : EndiannessMode.BigEndian, }; var directories = new List <Node>(); var files = new List <Node>(); uint fileOffset = 0; uint maxFileSize = 0; foreach (Node node in Navigator.IterateNodes(source.Root)) { if (!node.IsContainer) { uint compressedSize = (uint)node.Stream.Length; fileOffset = RoundSize(fileOffset, compressedSize); if (compressedSize > maxFileSize) { maxFileSize = compressedSize; } files.Add(node); } else { directories.Add(node); } } if (maxFileSize >= 0xFFFFFFFF) { throw new FormatException("Can not add files over 4GB"); } var header = new FileHeader { Magic = "PARC", PlatformId = _writerParameters.PlatformId, Endianness = _writerParameters.Endianness, SizeExtended = 0, Relocated = 0, Version = _writerParameters.Version, Size = 0, }; uint directoryStartOffset = (uint)(0x20 + (0x40 * (directories.Count + files.Count))); uint fileStartOffset = (uint)(directoryStartOffset + (0x20 * directories.Count)); var index = new ParIndex { DirectoryCount = (uint)directories.Count, DirectoryStartOffset = directoryStartOffset, FileCount = (uint)files.Count, FileStartOffset = fileStartOffset, }; uint headerSize = RoundSize((uint)((0x20 * files.Count) + fileStartOffset)); writer.Stream.SetLength(RoundSize(fileOffset + headerSize)); uint currentOffset = headerSize; if (_writerParameters.WriteDataSize) { header.Size = headerSize + fileOffset; } writer.WriteOfType(header); writer.WriteOfType(index); for (int i = 0; i < directories.Count; i++) { Node node = directories[i]; writer.Write(node.Name, 0x40, false); long returnPosition = writer.Stream.Position; _ = writer.Stream.Seek(directoryStartOffset + (i * 0x20), System.IO.SeekOrigin.Begin); var directoryInfo = new ParDirectoryInfo { SubdirectoryCount = (uint)node.Tags["SubdirectoryCount"], SubdirectoryStartIndex = (uint)node.Tags["SubdirectoryStartIndex"], FileCount = (uint)node.Tags["FileCount"], FileStartIndex = (uint)node.Tags["FileStartIndex"], RawAttributes = (uint)node.Tags["RawAttributes"], }; writer.WriteOfType(directoryInfo); writer.WritePadding(0x00, 0x20); _ = writer.Stream.Seek(returnPosition, System.IO.SeekOrigin.Begin); } for (int i = 0; i < files.Count; i++) { Node node = files[i]; writer.Write(node.Name, 0x40, false); long returnPosition = writer.Stream.Position; _ = writer.Stream.Seek(fileStartOffset + (i * 0x20), System.IO.SeekOrigin.Begin); ParFile file = node.GetFormatAs <ParFile>(); currentOffset = RoundOffset(currentOffset, file.FileInfo.CompressedSize); file.FileInfo.DataOffset = currentOffset; file.FileInfo.ExtendedOffset = 0; if (node.Tags.ContainsKey("RawAttributes")) { file.FileInfo.RawAttributes = node.Tags["RawAttributes"]; } else if (node.Tags.ContainsKey("FileInfo")) { FileInfo info = node.Tags["FileInfo"]; file.FileInfo.RawAttributes = (uint)info.Attributes; } else { file.FileInfo.RawAttributes = 0x20; } if (node.Tags.ContainsKey("Timestamp")) { file.FileInfo.Timestamp = node.Tags["Timestamp"]; } else if (node.Tags.ContainsKey("FileInfo")) { DateTime baseDate = new DateTime(1970, 1, 1); FileInfo info = node.Tags["FileInfo"]; file.FileInfo.Timestamp = (uint)(info.LastWriteTime - baseDate).TotalSeconds; } else { DateTime baseDate = new DateTime(1970, 1, 1); file.FileInfo.Timestamp = (uint)(DateTime.Now - baseDate).TotalSeconds; } writer.WriteOfType(file.FileInfo); _ = writer.Stream.Seek(currentOffset, SeekOrigin.Begin); node.Stream.WriteTo(writer.Stream); currentOffset += file.FileInfo.CompressedSize; _ = writer.Stream.Seek(returnPosition, System.IO.SeekOrigin.Begin); } return(new BinaryFormat(stream)); }
/// <summary> /// Creates a SLLZ standard compressed BinaryFormat. /// </summary> /// <param name="source">Original format.</param> /// <returns>The compressed binary.</returns> public ParFile Convert(BinaryFormat source) { if (source == null) { throw new ArgumentNullException(nameof(source)); } source.Stream.Seek(0); byte[] data = new byte[source.Stream.Length]; _ = source.Stream.Read(data, 0, data.Length); byte[] compressedData; try { compressedData = Compress(data); } catch (SllzException) { // Data can't be compressed return(new ParFile(source.Stream)); } DataStream outputDataStream = compressorParameters.OutputStream ?? DataStreamFactory.FromMemory(); outputDataStream.Position = 0; var writer = new DataWriter(outputDataStream) { DefaultEncoding = Encoding.ASCII, Endianness = compressorParameters.Endianness == Endianness.LittleEndian ? EndiannessMode.LittleEndian : EndiannessMode.BigEndian, }; var header = new SllzHeader { Magic = "SLLZ", Endianness = compressorParameters.Endianness, CompressionType = compressorParameters.CompressionType, HeaderSize = 0x10, OriginalSize = (uint)source.Stream.Length, CompressedSize = (uint)compressedData.Length + 0x10, // includes header length }; writer.WriteOfType(header); writer.Write(compressedData); var fileInfo = new ParFileInfo { Flags = 0x80000000, OriginalSize = (uint)source.Stream.Length, CompressedSize = (uint)outputDataStream.Length, DataOffset = 0, RawAttributes = 0, ExtendedOffset = 0, Timestamp = 0, }; return(new ParFile(fileInfo, outputDataStream)); }
public BinaryFormat Convert(Po source) { if (source == null) { throw new ArgumentNullException(nameof(source)); } DataStream outputDataStream = DataStreamFactory.FromMemory(); var writer = new Yarhl.IO.TextDataWriter(outputDataStream, Encoding.UTF8) { NewLine = "\r", }; string csvHeader = source.Header.Extensions["CSVHeader"]; writer.WriteLine(csvHeader); string[] fields = csvHeader.Split(','); string[] translationFieldArray = SakunaTools.Constants.TranslationFields.Split(','); var translationIndexes = new int[translationFieldArray.Length]; for (var i = 0; i < translationIndexes.Length; i++) { translationIndexes[i] = -1; } for (var i = 0; i < fields.Length; i++) { int index = Array.IndexOf(translationFieldArray, fields[i]); if (index != -1) { translationIndexes[index] = i; } } var translations = new Dictionary <string, Dictionary <int, string> >(); foreach (PoEntry entry in source.Entries) { string[] split = entry.Context.Split('\n'); string context; string field; if (split.Length == 1) { context = split[0]; field = "English"; } else { context = split[1]; field = split[0]; } if (!translations.ContainsKey(context)) { translations.Add(context, new Dictionary <int, string>()); } string value; if (entry.Original == "<!empty>") { value = string.Empty; } else if (string.IsNullOrEmpty(entry.Translated)) { value = entry.Original; } else { value = entry.Translated; } int index = Array.IndexOf(translationFieldArray, field); if (index >= 0 && translationIndexes[index] >= 0) { translations[context].Add(translationIndexes[index], value); } } foreach ((string context, Dictionary <int, string> translation) in translations) { string[] split = context.Split(','); foreach ((int field, string value) in translation) { split[field] = value.Replace(",", "\\1").Replace("\n", "<NewLine>"); } var line = string.Join(',', split); writer.WriteLine(line.Replace("<NewLine>", "\n")); } return(new BinaryFormat(outputDataStream)); }
/// <summary> /// Serializes a BIN archive. /// </summary> /// <param name="source">Collection of files as NodeContainerFormat.</param> /// <returns>The BinaryFormat.</returns> public BinaryFormat Convert(NodeContainerFormat source) { if (source == null) { throw new ArgumentNullException(nameof(source)); } if (source.Root.Children.Count == 0) { throw new FormatException("No files selected."); } _params.Stream ??= DataStreamFactory.FromMemory(); _params.Stream.Seek(0); var writer = new DataWriter(_params.Stream) { Endianness = _params.Endianness, }; if (_params.BlockSize == 0) { _params.BlockSize = 0x800; } BinFileHeader header = new BinFileHeader { MagicNumber = 0x00077DF9, BlockSize = _params.BlockSize, FileCount = source.Root.Children.Count, Padding = 0, }; writer.WriteOfType(header); int headerSize = 0x10 + (header.FileCount * 0x10); writer.Stream.SetLength(headerSize); for (int i = 0; i < source.Root.Children.Count; i++) { Node node = source.Root.Children[i]; writer.Stream.Seek(0, SeekOrigin.End); writer.WritePadding(0x00, header.BlockSize); long startBlock = writer.Stream.Position / header.BlockSize; writer.Stream.Seek(0x10 + (i * 0x10), SeekOrigin.Begin); writer.Write(startBlock); FileType type = node.Tags["Type"]; switch (type) { case FileType.Empty: writer.Write(0); // size writer.Write(0); // inflated size break; case FileType.Dummy: writer.Write(0x70); // size writer.Write(0x835F837E); // inflated size writer.Stream.Seek(0, SeekOrigin.End); writer.Write(_dummy); break; case FileType.Normal: writer.Write((int)node.Stream.Length); // size writer.Write(0); // inflated size writer.Stream.Seek(0, SeekOrigin.End); node.Stream.WriteTo(writer.Stream); break; case FileType.Compressed: writer.Write((int)node.Stream.Length); // size writer.Write((int)node.Tags["InflatedSize"]); // inflated size writer.Stream.Seek(0, SeekOrigin.End); node.Stream.WriteTo(writer.Stream); break; case FileType.CompressedAlternateEndian: var alternateEndianness = _params.Endianness == EndiannessMode.BigEndian ? EndiannessMode.LittleEndian : EndiannessMode.BigEndian; writer.Write((int)node.Stream.Length); // size writer.Endianness = alternateEndianness; writer.Write((int)node.Tags["InflatedSize"]); // inflated size writer.Endianness = _params.Endianness; writer.Stream.Seek(0, SeekOrigin.End); node.Stream.WriteTo(writer.Stream); break; default: throw new FormatException($"Unsupported file type: {type}"); } } return(new BinaryFormat(_params.Stream)); }
/// <summary> /// Write the mipmaps to a DataStream. /// </summary> /// <param name="height">Image height (in pixels).</param> /// <param name="width">Image width (in pixels).</param> /// <param name="mipmaps">The list of mipmaps.</param> /// <returns>The DataStream.</returns> protected virtual DataStream Write(uint height, uint width, List <byte[]> mipmaps) { if (mipmaps == null) { throw new ArgumentNullException(nameof(mipmaps)); } DataStream outputDataStream = DataStreamFactory.FromMemory(); var writer = new DataWriter(outputDataStream) { DefaultEncoding = Encoding.ASCII, Endianness = EndiannessMode.LittleEndian, }; writer.Write(0x20L); writer.Write(0x10L); writer.Write(0x20L); writer.Write(0x00L); // Size writer.Write(0x00L); writer.Write(0x00L); writer.Write(0x50); writer.Write(0x00); writer.Write(0x00); writer.Write(0x03); writer.Write(width); writer.Write(height); writer.Write(0x01); writer.Write(0x01); writer.Write((long)mipmaps.Count); writer.Write(0x00L); writer.Write(0x00L); writer.Write(0x10L); writer.Write((long)mipmaps.Count); long baseOffset = writer.Stream.Position; long indexSize = 0x18 * mipmaps.Count; long acum = indexSize; for (var i = 0; i < mipmaps.Count; i++) { uint pitch = Math.Max(1, ((width >> i) + 3) / 4) * 8; writer.Write(pitch); writer.Write(mipmaps[i].Length); writer.Write(acum - (writer.Stream.Position - baseOffset)); writer.Write((long)mipmaps[i].Length); acum += mipmaps[i].Length; } foreach (byte[] mipmap in mipmaps) { writer.Write(mipmap); } writer.Stream.Seek(0x18, System.IO.SeekOrigin.Begin); writer.Write(outputDataStream.Length - 0x30); return(outputDataStream); }
/// <summary> /// Compresses an ARC container using LZ4. /// </summary> /// <param name="source">Source format.</param> /// <returns>The compressed format.</returns> public BinaryFormat Convert(BinaryFormat source) { if (source == null) { throw new ArgumentNullException(nameof(source)); } source.Stream.Position = 0; Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); var reader = new DataReader(source.Stream) { DefaultEncoding = Encoding.GetEncoding(1252), Endianness = EndiannessMode.LittleEndian, }; // Read the file header ArcHeader header = reader.Read<ArcHeader>() as ArcHeader; this.CheckHeader(header); switch (header.CompressionType) { case 0x00: // Not compressed { DataStream outputStream = DataStreamFactory.FromMemory(); var writer = new DataWriter(outputStream) { DefaultEncoding = Encoding.GetEncoding(1252), Endianness = EndiannessMode.LittleEndian, }; header.CompressionType = 0x02; writer.WriteOfType(header); byte[] filesInfo = reader.ReadBytes(0x70 * header.FileCount); byte[] decompressedData = reader.ReadBytes((int)(source.Stream.Length - source.Stream.Position)); var compressedData = new byte[LZ4Codec.MaximumOutputSize(decompressedData.Length)]; int bytesWritten = LZ4Codec.Encode(decompressedData, compressedData, LZ4Level.L11_OPT); if (bytesWritten < 0) { throw new FormatException($"ARC: Error in LZ4 compression."); } writer.Write(filesInfo); var data = new byte[bytesWritten]; Array.Copy(compressedData, data, bytesWritten); writer.Write(data); return new BinaryFormat(outputStream); } case 0x02: // LZ4 Compression // Already compressed return source; default: throw new FormatException($"ARC: Unknown compression {header.CompressionType:X4}"); } }
/// <summary> /// Converts a NodeContainerFormat into an ARC container. /// </summary> /// <param name="source">The source format.</param> /// <returns>The binary format.</returns> public BinaryFormat Convert(NodeContainerFormat source) { if (source == null) { throw new ArgumentNullException(nameof(source)); } var files = new List <Node>(); var tags = new Dictionary <string, Dictionary <string, int> >(); var totalSize = 0; foreach (Node node in Navigator.IterateNodes(source.Root, NavigationMode.DepthFirst)) { if (node.IsContainer) { continue; } if (node.Name.EndsWith(".arcinfo", StringComparison.InvariantCulture)) { string path = node.Path.Replace(".arcinfo", string.Empty); var dict = new Dictionary <string, int>(); var reader = new TextDataReader(node.Stream); while (!node.Stream.EndOfStream) { string line = reader.ReadLine(); if (string.IsNullOrEmpty(line)) { continue; } string[] split = line.Split('='); dict.Add(split[0], int.Parse(split[1], NumberStyles.Integer, CultureInfo.InvariantCulture)); } tags.Add(path, dict); } else { files.Add(node); totalSize += (int)node.Stream.Length; } } var header = new ArcHeader { MagicId = "TGP0", Version = 3, CompressionType = 0, FileCount = files.Count, OriginalSize = totalSize, }; Encoding.RegisterProvider(CodePagesEncodingProvider.Instance); DataStream outputStream = DataStreamFactory.FromMemory(); var writer = new DataWriter(outputStream) { DefaultEncoding = Encoding.GetEncoding(1252), Endianness = EndiannessMode.LittleEndian, }; writer.WriteOfType(header); var currentOffset = 0; foreach (Node node in files) { string path = node.Path.Substring(source.Root.Path.Length + 1).Replace("/", "\\"); var size = (int)node.Stream.Length; writer.Write(path, 0x60, false); writer.Write(currentOffset); writer.Write(size); if (tags.ContainsKey(node.Path)) { writer.Write(tags[node.Path]["Unknown1"]); writer.Write(tags[node.Path]["Unknown2"]); } else { writer.Write(0); writer.Write(0); } currentOffset += size; } foreach (Node node in files) { node.Stream.WriteTo(outputStream); } return(new BinaryFormat(outputStream)); }
/// <summary> /// Write the swizzled mipmaps to a DataStream. /// </summary> /// <param name="height">Image height (in pixels).</param> /// <param name="width">Image width (in pixels).</param> /// <param name="mipmaps">The list of mipmaps.</param> /// <returns>The DataStream.</returns> protected override DataStream Write(uint height, uint width, List <byte[]> mipmaps) { if (mipmaps == null) { throw new ArgumentNullException(nameof(mipmaps)); } DataStream outputDataStream = DataStreamFactory.FromMemory(); var writer = new DataWriter(outputDataStream) { DefaultEncoding = Encoding.ASCII, Endianness = EndiannessMode.LittleEndian, }; writer.Write(0x20L); writer.Write(0x10L); writer.Write(0x20L); writer.Write(0x00L); // Size writer.Write(0x00L); writer.Write(0x00L); writer.Write(0x20L); writer.Write(0x18L); writer.Write(0x28L); writer.Write(0x00L); // Size - 0x38 writer.Write(width); writer.Write(height); writer.Write(0x01); writer.Write(0x01); writer.Write(0x49); writer.Write(mipmaps.Count); var blockHeightShift = 0; const uint blkWidth = 4; const uint blkHeight = 4; const uint bpp = 8; uint blockHeight = SwitchSwizzler.GetBlockHeight(SwitchSwizzler.DivRoundUp(height, blkHeight)); uint blockHeightLog2 = (uint)System.Convert.ToString(blockHeight, 2).Length - 1; uint linesPerBlockHeight = blockHeight * 8; for (var mipLevel = 0; mipLevel < mipmaps.Count; mipLevel++) { byte[] mipmap = mipmaps[mipLevel]; uint mipmapWidth = Math.Max(1, width >> mipLevel); uint mipmapHeight = Math.Max(1, height >> mipLevel); uint roundedHeight = SwitchSwizzler.DivRoundUp(mipmapHeight, blkHeight); if (SwitchSwizzler.Pow2RoundUp(roundedHeight) < linesPerBlockHeight) { blockHeightShift += 1; } var info = new SwizzleInfo { Width = mipmapWidth, Height = mipmapHeight, Depth = 1, BlkWidth = blkWidth, BlkHeight = blkHeight, BlkDepth = 1, RoundPitch = 1, Bpp = bpp, TileMode = 0, BlockHeightLog2 = (int)Math.Max(0, blockHeightLog2 - blockHeightShift), }; byte[] swizzled = SwitchSwizzler.Swizzle(info, mipmap); writer.Write(swizzled); } writer.Stream.Seek(0x18, System.IO.SeekOrigin.Begin); writer.Write(outputDataStream.Length - 0x30); writer.Stream.Seek(0x48, System.IO.SeekOrigin.Begin); writer.Write(outputDataStream.Length - 0x68); return(outputDataStream); }