/// <summary> /// Creates a ONE archive from a set of files. /// </summary> /// <param name="files">The files to create an archive from.</param> /// <param name="version">The version of the archive. Heroes' default is 3.5.0.0. Consider using 3.3.0.0 to support all available prototypes.</param> /// <param name="bufferSize">Size of the search buffer used in compression between 0-8191.</param> public static byte[] FromFiles(IList <ManagedOneFile> files, RwVersion version, int bufferSize = 255) { // Compress all files. files = files.Select(x => new ManagedOneFile(x.Name, x.GetCompressedData(bufferSize), true)).ToArray(); // Calculate sizes. var numberOfFiles = files.Count + 2; // Two dummy entries. var sizeOfHeaders = sizeof(OneArchiveHeader) + sizeof(OneNameSectionHeader); var sizeOfNameSection = sizeof(OneFileName) * numberOfFiles; var sizeOfFileSection = files.Sum(x => x.GetCompressedData().Length + sizeof(OneFileEntry)); var totalSize = sizeOfHeaders + sizeOfNameSection + sizeOfFileSection; // Make file. using var memStream = new ExtendedMemoryStream(totalSize); memStream.Append(new OneArchiveHeader(totalSize - sizeof(OneArchiveHeader), version)); memStream.Append(new OneNameSectionHeader(sizeOfNameSection, version)); memStream.Append(new OneFileName("")); // Dummy entries memStream.Append(new OneFileName("")); foreach (var file in files) { memStream.Append(new OneFileName(file.Name)); } int nameSectionIndex = 2; foreach (var file in files) { memStream.Append(new OneFileEntry(nameSectionIndex++, file.GetCompressedData().Length, file.RwVersion)); memStream.Append(file.GetCompressedData()); } return(memStream.ToArray()); }
static unsafe void Inject(InjectOptions options) { var elf = File.ReadAllBytes(options.File); var scanner = new DatScanner(elf); var files = scanner.FindFiles(options.Dat); var jsonFile = JsonDatFile.FromFile(options.JsonPath); using var memoryStream = new MemoryStream(elf, true); using var streamReader = new BufferedStreamReader(memoryStream, 2048); using var extendedMemoryStream = new ExtendedMemoryStream(elf, true); // Patch all table entries. foreach (var offset in files.Keys) { Console.WriteLine($"Patching table at: {offset:X}, RAM: {scanner.RawToMemoryAddress(offset):X}"); // Go to 2nd entry and get initial file name write pointer. streamReader.Seek(offset, SeekOrigin.Begin); streamReader.Read(out DatFileEntry firstEntry); // Get file name write pointer. streamReader.Peek(out DatFileEntry secondEntry); int fileNameWritePointer = scanner.MemoryToRawAddress(secondEntry.NamePtr); // Write archive entry var newFirstEntry = new DatFileEntry(firstEntry.NamePtr, jsonFile.Files[0].Offset / DatFileEntry.SECTOR_SIZE_BYTES, jsonFile.Files[0].SizeBytes); extendedMemoryStream.Seek(offset, SeekOrigin.Begin); extendedMemoryStream.Write(newFirstEntry); // Now write each file in order, while keeping track of the pointer. foreach (var entry in jsonFile.Files) { // Make entry for the file. var datEntry = new DatFileEntry(scanner.RawToMemoryAddress(fileNameWritePointer), entry.Offset / DatFileEntry.SECTOR_SIZE_BYTES, entry.SizeBytes); extendedMemoryStream.Write(datEntry); // Get bytes attached to the name (w/ Null Terminator). var alignedTextLength = Utilities.Utilities.RoundUp(entry.Name.Length + 1, 8); // Alignment of 8 var nameBytes = new byte[alignedTextLength]; Encoding.ASCII.GetBytes(entry.Name, nameBytes); // Write bytes to pointer. Array.Copy(nameBytes, 0, elf, fileNameWritePointer, nameBytes.Length); // Align text to next predetermined value like in original ELF. fileNameWritePointer += alignedTextLength; } } // Write new executable to file. memoryStream.Dispose(); streamReader.Dispose(); extendedMemoryStream.Dispose(); Console.WriteLine($"Writing patched file to: {options.File}"); File.WriteAllBytes(options.File, elf); }
/// <summary> /// Writes the contents of the archive to be generated to the stream. /// </summary> public void Write(Stream writeStream, bool bigEndian) { using var stream = new ExtendedMemoryStream(); using EndianMemoryStream endianStream = bigEndian ? (EndianMemoryStream) new BigEndianMemoryStream(stream) : new LittleEndianMemoryStream(stream); // Number of items. endianStream.Write <int>(Groups.Keys.Count); // Number of items for each id. foreach (var group in Groups) { endianStream.Write <byte>((byte)group.Value.Files.Count); } endianStream.AddPadding(0x00, 4); // Write first item index for each group. ushort totalItems = 0; foreach (var group in Groups) { endianStream.Write <ushort>(totalItems); totalItems += (ushort)group.Value.Files.Count; } // Write ID for each group. foreach (var group in Groups) { endianStream.Write <ushort>(group.Value.Id); } // Write offsets for each file and pad. int firstWriteOffset = Utilities.Utilities.RoundUp((int)endianStream.Stream.Position + (sizeof(int) * totalItems), 16); int fileWriteOffset = firstWriteOffset; foreach (var group in Groups) { foreach (var file in group.Value.Files) { endianStream.Write <int>(file.Data.Length <= 0 ? 0 : fileWriteOffset); fileWriteOffset += file.Data.Length; } } // Write files. endianStream.Write(new byte[(int)(firstWriteOffset - endianStream.Stream.Position)]); // Alignment foreach (var file in Groups.SelectMany(x => x.Value.Files)) { endianStream.Write(file.Data); } writeStream.Write(endianStream.ToArray()); }
/// <summary> /// Writes the contents of the archive to be generated to the stream. /// </summary> public void Write(Stream writeStream, bool bigEndian) { using var stream = new ExtendedMemoryStream(); using EndianMemoryStream endianStream = bigEndian ? (EndianMemoryStream) new BigEndianMemoryStream(stream) : new LittleEndianMemoryStream(stream); // Precompute Offsets var fileNameSize = Files.Sum(x => x.Name.Length) + (Files.Count); Span <int> offsets = stackalloc int[Files.Count]; PrecomputeFileOffsets(offsets, fileNameSize); // Texture Count endianStream.Write <short>((short)Files.Count); endianStream.Write((short)1); // Texture Offsets for (int x = 0; x < offsets.Length; x++) { endianStream.Write(offsets[x]); } // Texture Flags for (int x = 0; x < Files.Count; x++) { endianStream.Write((byte)0x11); } // Texture Names Span <byte> currentString = stackalloc byte[1024]; foreach (var file in Files) { int numEncoded = Encoding.ASCII.GetBytes(file.Name, currentString); currentString[numEncoded] = 0x00; stream.Write(currentString.Slice(0, numEncoded + 1)); } // Texture Data stream.AddPadding(FileDataAlignment); for (int x = 0; x < Files.Count; x++) { stream.Write(Files[x].Data); stream.AddPadding(FileDataAlignment); } writeStream.Write(stream.ToArray()); }
/// <summary> /// Builds a virtual AFS based upon a supplied base AFS file. /// </summary> public VirtualAfs Build(string afsFilePath, int alignment = 2048) { // Get entries from original AFS file. var entries = GetEntriesFromFile(afsFilePath); var files = new Dictionary <int, VirtualFile>(entries.Length); // Get Original File List and Copy to New Header. var maxCustomFileId = _customFiles.Count > 0 ? _customFiles.Max(x => x.Key) + 1 : 0; var numFiles = Math.Max(maxCustomFileId, entries.Length); var newEntries = new AfsFileEntry[numFiles]; var headerLength = Utilities.RoundUp(sizeof(AfsHeader) + (sizeof(AfsFileEntry) * entries.Length), alignment); // Create new Virtual AFS Header for (int x = 0; x < entries.Length; x++) { var offset = x > 0 ? Utilities.RoundUp(newEntries[x - 1].Offset + newEntries[x - 1].Length, alignment) : entries[0].Offset; int length = 0; if (_customFiles.ContainsKey(x)) { length = _customFiles[x].Length; files[offset] = _customFiles[x]; } else { length = entries[x].Length; files[offset] = new VirtualFile(entries[x], afsFilePath); } newEntries[x] = new AfsFileEntry(offset, length); } var lastEntry = newEntries.Last(); var fileSize = Utilities.RoundUp(lastEntry.Offset + lastEntry.Length, alignment); // Make Header using var memStream = new ExtendedMemoryStream(headerLength); memStream.Append(AfsHeader.FromNumberOfFiles(newEntries.Length)); memStream.Append(newEntries); memStream.Append(new AfsFileEntry(0, 0)); memStream.AddPadding(alignment); return(new VirtualAfs(memStream.ToArray(), files, alignment, fileSize)); }
/* * Inspired by: https://gist.github.com/darkfall/1656050 * Originally licensed with CC-BY-SA. */ /// <summary> /// Converts a PNG image to a icon (ico) with all supported Windows sizes. /// </summary> /// <param name="inputBitmap">The input image.</param> /// <param name="output">The output stream.</param> public static bool TryConvertToIcon(Bitmap inputBitmap, Stream output) { if (inputBitmap == null) { return(false); } int[] sizes = { 256, 64, 48, 32, 16 }; // Generate PNGs for all sizes and toss them in streams var streams = new List <MemoryStream>(); foreach (int size in sizes) { var newBitmap = ResizeImage(inputBitmap, size, size); if (newBitmap == null) { return(false); } var imageStream = new MemoryStream(); newBitmap.Save(imageStream, ImageFormat.Png); streams.Add(imageStream); } using var iconWriter = new ExtendedMemoryStream(); // Write ICO header. iconWriter.Write(new IcoHeader() { ImageType = 1, NumberOfImages = (short)sizes.Length }); // Make Image Headers var imageDataOffset = Struct.GetSize <IcoHeader>() + (Struct.GetSize <IcoEntry>() * sizes.Length); for (int x = 0; x < sizes.Length; x++) { iconWriter.Write(new IcoEntry() { Width = (byte)sizes[x], Height = (byte)sizes[x], BitsPerPixel = 32, SizeOfImageData = (int)streams[x].Length, OffsetOfImageData = imageDataOffset }); imageDataOffset += (int)streams[x].Length; } // Write Image Data for (int i = 0; i < sizes.Length; i++) { iconWriter.Write(streams[i].ToArray()); streams[i].Close(); } iconWriter.Flush(); output.Write(iconWriter.ToArray()); return(true); }
/// <inheritdoc /> public LittleEndianMemoryStream(ExtendedMemoryStream stream) : base(stream) { }
/// <inheritdoc /> public BigEndianMemoryStream(ExtendedMemoryStream stream) : base(stream) { }
/// <summary> /// Constructs a <see cref="EndianMemoryStream"/> given an existing stream. /// </summary> protected EndianMemoryStream(ExtendedMemoryStream stream) => Stream = stream;
/// <inheritdoc /> public BigEndianMemoryStream(ExtendedMemoryStream stream, bool disposeUnderlyingStream = true) : base(stream, disposeUnderlyingStream) { }