/// <summary> /// Creates a ONE archive from a set of files. /// </summary> /// <param name="files">The files to create an archive from.</param> /// <param name="version">The version of the archive. Heroes' default is 3.5.0.0. Consider using 3.3.0.0 to support all available prototypes.</param> /// <param name="bufferSize">Size of the search buffer used in compression between 0-8191.</param> public static byte[] FromFiles(IList <ManagedOneFile> files, RwVersion version, int bufferSize = 255) { // Compress all files. files = files.Select(x => new ManagedOneFile(x.Name, x.GetCompressedData(bufferSize), true)).ToArray(); // Calculate sizes. var numberOfFiles = files.Count + 2; // Two dummy entries. var sizeOfHeaders = sizeof(OneArchiveHeader) + sizeof(OneNameSectionHeader); var sizeOfNameSection = sizeof(OneFileName) * numberOfFiles; var sizeOfFileSection = files.Sum(x => x.GetCompressedData().Length + sizeof(OneFileEntry)); var totalSize = sizeOfHeaders + sizeOfNameSection + sizeOfFileSection; // Make file. using var memStream = new ExtendedMemoryStream(totalSize); memStream.Append(new OneArchiveHeader(totalSize - sizeof(OneArchiveHeader), version)); memStream.Append(new OneNameSectionHeader(sizeOfNameSection, version)); memStream.Append(new OneFileName("")); // Dummy entries memStream.Append(new OneFileName("")); foreach (var file in files) { memStream.Append(new OneFileName(file.Name)); } int nameSectionIndex = 2; foreach (var file in files) { memStream.Append(new OneFileEntry(nameSectionIndex++, file.GetCompressedData().Length, file.RwVersion)); memStream.Append(file.GetCompressedData()); } return(memStream.ToArray()); }
/// <summary> /// Writes the contents of the archive to be generated to the stream. /// </summary> public void Write(Stream writeStream) { using var stream = new ExtendedMemoryStream(); // Number of items. stream.Write <int>(Groups.Keys.Count); // Number of items for each id. foreach (var group in Groups) { stream.Write <byte>((byte)group.Value.Files.Count); } stream.AddPadding(0x00, 4); // Write first item index for each group. ushort totalItems = 0; foreach (var group in Groups) { stream.Write <ushort>(totalItems); totalItems += (ushort)group.Value.Files.Count; } // Write ID for each group. foreach (var group in Groups) { stream.Write <ushort>(group.Value.Id); } // Write offsets for each file and pad. int firstWriteOffset = Utilities.Utilities.RoundUp((int)stream.Position + (sizeof(int) * totalItems), 16); int fileWriteOffset = firstWriteOffset; foreach (var group in Groups) { foreach (var file in group.Value.Files) { stream.Write <int>(file.Data.Length <= 0 ? 0 : fileWriteOffset); fileWriteOffset += file.Data.Length; } } // Write files. stream.Write(new byte[(int)(firstWriteOffset - stream.Position)]); // Alignment foreach (var file in Groups.SelectMany(x => x.Value.Files)) { stream.Write(file.Data); } writeStream.Write(stream.ToArray()); }
/// <summary> /// Writes the contents of the archive to be generated to the stream. /// </summary> public void Write(Stream writeStream, bool bigEndian) { using var stream = new ExtendedMemoryStream(); using EndianMemoryStream endianStream = bigEndian ? (EndianMemoryStream) new BigEndianMemoryStream(stream) : new LittleEndianMemoryStream(stream); // Precompute Offsets var fileNameSize = Files.Sum(x => x.Name.Length) + (Files.Count); Span <int> offsets = stackalloc int[Files.Count]; PrecomputeFileOffsets(offsets, fileNameSize); // Texture Count endianStream.Write <short>((short)Files.Count); endianStream.Write((short)1); // Texture Offsets for (int x = 0; x < offsets.Length; x++) { endianStream.Write(offsets[x]); } // Texture Flags for (int x = 0; x < Files.Count; x++) { endianStream.Write((byte)0x11); } // Texture Names Span <byte> currentString = stackalloc byte[1024]; foreach (var file in Files) { int numEncoded = Encoding.ASCII.GetBytes(file.Name, currentString); currentString[numEncoded] = 0x00; stream.Write(currentString.Slice(0, numEncoded + 1)); } // Texture Data stream.AddPadding(FileDataAlignment); for (int x = 0; x < Files.Count; x++) { stream.Write(Files[x].Data); stream.AddPadding(FileDataAlignment); } writeStream.Write(stream.ToArray()); }
/// <summary> /// Builds a virtual AFS based upon a supplied base AFS file. /// </summary> public VirtualAfs Build(string afsFilePath, int alignment = 2048) { // Get entries from original AFS file. var entries = GetEntriesFromFile(afsFilePath); var files = new Dictionary <int, VirtualFile>(entries.Length); // Get Original File List and Copy to New Header. var maxCustomFileId = _customFiles.Count > 0 ? _customFiles.Max(x => x.Key) + 1 : 0; var numFiles = Math.Max(maxCustomFileId, entries.Length); var newEntries = new AfsFileEntry[numFiles]; var headerLength = Utilities.RoundUp(sizeof(AfsHeader) + (sizeof(AfsFileEntry) * entries.Length), alignment); // Create new Virtual AFS Header for (int x = 0; x < entries.Length; x++) { var offset = x > 0 ? Utilities.RoundUp(newEntries[x - 1].Offset + newEntries[x - 1].Length, alignment) : entries[0].Offset; int length = 0; if (_customFiles.ContainsKey(x)) { length = _customFiles[x].Length; files[offset] = _customFiles[x]; } else { length = entries[x].Length; files[offset] = new VirtualFile(entries[x], afsFilePath); } newEntries[x] = new AfsFileEntry(offset, length); } var lastEntry = newEntries.Last(); var fileSize = Utilities.RoundUp(lastEntry.Offset + lastEntry.Length, alignment); // Make Header using var memStream = new ExtendedMemoryStream(headerLength); memStream.Append(AfsHeader.FromNumberOfFiles(newEntries.Length)); memStream.Append(newEntries); memStream.Append(new AfsFileEntry(0, 0)); memStream.AddPadding(alignment); return(new VirtualAfs(memStream.ToArray(), files, alignment, fileSize)); }
/* * Inspired by: https://gist.github.com/darkfall/1656050 * Originally licensed with CC-BY-SA. */ /// <summary> /// Converts a PNG image to a icon (ico) with all supported Windows sizes. /// </summary> /// <param name="inputBitmap">The input image.</param> /// <param name="output">The output stream.</param> public static bool TryConvertToIcon(Bitmap inputBitmap, Stream output) { if (inputBitmap == null) { return(false); } int[] sizes = { 256, 64, 48, 32, 16 }; // Generate PNGs for all sizes and toss them in streams var streams = new List <MemoryStream>(); foreach (int size in sizes) { var newBitmap = ResizeImage(inputBitmap, size, size); if (newBitmap == null) { return(false); } var imageStream = new MemoryStream(); newBitmap.Save(imageStream, ImageFormat.Png); streams.Add(imageStream); } using var iconWriter = new ExtendedMemoryStream(); // Write ICO header. iconWriter.Write(new IcoHeader() { ImageType = 1, NumberOfImages = (short)sizes.Length }); // Make Image Headers var imageDataOffset = Struct.GetSize <IcoHeader>() + (Struct.GetSize <IcoEntry>() * sizes.Length); for (int x = 0; x < sizes.Length; x++) { iconWriter.Write(new IcoEntry() { Width = (byte)sizes[x], Height = (byte)sizes[x], BitsPerPixel = 32, SizeOfImageData = (int)streams[x].Length, OffsetOfImageData = imageDataOffset }); imageDataOffset += (int)streams[x].Length; } // Write Image Data for (int i = 0; i < sizes.Length; i++) { iconWriter.Write(streams[i].ToArray()); streams[i].Close(); } iconWriter.Flush(); output.Write(iconWriter.ToArray()); return(true); }