private long CalculateDataStartOffset() { long offset = CalculateEntryNamesOffset(); foreach (IPackfileEntry entry in Files) { offset = offset.Align(2); offset += entry.Name.Length; offset += 2; offset = offset.Align(2); } return(offset); }
public static long EstimateHeaderSize(IEnumerable <string> paths) { long size = 4 + 4 + 4 + 4; foreach (var path in paths) { size += 4 + Encoding.ASCII.GetByteCount(path).Align(4) + 4 + 4; } return(size.Align(16)); }
private IEnumerable <PartitionInstance> CreateInstances() { const int dataOffsetDelta = -8; var endian = this._Partition.Endian; long offset = (int)this._Partition.StringTableSize; for (int i = 0; i < this._Partition.InstanceEntries.Count; i++) { var instance = this._Partition.InstanceEntries[i]; var typeDefinition = this._Partition.TypeDefinitionEntries[instance.TypeIndex]; var typeSize = typeDefinition.Alignment == 4 ? (typeDefinition.DataSize + dataOffsetDelta).Align(typeDefinition.Alignment) : ((int)typeDefinition.DataSize).Align(typeDefinition.Alignment); var flattenedType = this._FlattenedTypes[instance.TypeIndex]; for (int j = 0; j < instance.Count; j++) { offset = offset.Align(typeDefinition.Alignment); var guid = Guid.Empty; if (i < this._Partition.NamedInstanceCount) { this._Data.Position = offset; guid = this._Data.ReadValueGuid(endian); offset += 16; } var instanceOffset = offset; if (typeDefinition.Alignment == 4) { instanceOffset += dataOffsetDelta; } yield return(new PartitionInstance(this, guid, instanceOffset, flattenedType)); offset += typeSize; } } }
protected void Build( TPackage package, IEnumerable <KeyValuePair <string, string> > paths, string outputPath, bool ps3) { var isCompressed = (package.Flags & Package.HeaderFlags.Compressed) != 0; var isCondensed = (package.Flags & Package.HeaderFlags.Condensed) != 0; package.Entries.Clear(); foreach (var kv in paths) { package.Entries.Add(new TEntry() { Name = kv.Key, }); } var baseOffset = package.EstimateHeaderSize(); package.Entries.Clear(); using (var output = File.Create(outputPath)) { if (isCondensed == true && isCompressed == true) { output.Seek(baseOffset, SeekOrigin.Begin); using (var compressed = new MemoryStream()) { var z = new ZLIB.ZOutputStream(compressed, ZLIB.zlibConst.Z_BEST_COMPRESSION); z.FlushMode = ZLIB.zlibConst.Z_SYNC_FLUSH; long offset = 0; foreach (var kv in paths) { using (var input = File.OpenRead(kv.Value)) { var entry = new TEntry(); entry.Name = kv.Key; entry.Offset = (uint)offset; entry.UncompressedSize = (uint)input.Length; long size = z.TotalOut; z.WriteFromStream(input, input.Length); size = z.TotalOut - size; entry.CompressedSize = (uint)size; offset += entry.UncompressedSize; package.Entries.Add(entry); } } package.CompressedSize = (uint)compressed.Length; package.UncompressedSize = (uint)offset; compressed.Position = 0; output.WriteFromStream(compressed, compressed.Length); } output.Seek(0, SeekOrigin.Begin); package.Serialize(output); } else if ( ps3 == true && isCompressed == true) { output.Seek(baseOffset, SeekOrigin.Begin); long offset = 0; uint uncompressedSize = 0; foreach (var kv in paths) { using (var input = File.OpenRead(kv.Value)) { if (isCondensed == false) { var offsetPadding = offset.Align(2048) - offset; if (offsetPadding > 0) { offset += offsetPadding; output.Seek(offsetPadding, SeekOrigin.Current); } var sizePadding = uncompressedSize.Align(2048) - uncompressedSize; if (sizePadding > 0) { uncompressedSize += sizePadding; } } var entry = new TEntry(); entry.Name = kv.Key; entry.Offset = (uint)offset; entry.UncompressedSize = (uint)input.Length; entry.CompressedSize = 0; var left = input.Length; while (left > 0) { using (var compressed = new MemoryStream()) { var chunkUncompressedSize = (uint)Math.Min(0x10000, left); var zlib = new DeflaterOutputStream(compressed, new Deflater(9, true)); zlib.WriteFromStream(input, chunkUncompressedSize); zlib.Finish(); var chunkCompressedSize = (uint)compressed.Length; if (chunkCompressedSize > 0xFFFF) { throw new InvalidOperationException(); } output.WriteValueU16((ushort)chunkCompressedSize, package.Endian); output.WriteValueU16(0, package.Endian); output.WriteValueU32(chunkUncompressedSize, package.Endian); entry.CompressedSize += 2 + 2 + 4; entry.CompressedSize += chunkCompressedSize; compressed.Position = 0; output.WriteFromStream(compressed, compressed.Length); left -= chunkUncompressedSize; } } offset += entry.CompressedSize; uncompressedSize += entry.UncompressedSize; package.Entries.Add(entry); } } package.CompressedSize = (uint)offset; package.UncompressedSize = uncompressedSize; } else if (isCompressed == true) { output.Seek(baseOffset, SeekOrigin.Begin); long offset = 0; uint uncompressedSize = 0; foreach (var kv in paths) { using (var input = File.OpenRead(kv.Value)) { if (isCondensed == false) { var offsetPadding = offset.Align(2048) - offset; if (offsetPadding > 0) { offset += offsetPadding; output.Seek(offsetPadding, SeekOrigin.Current); } var sizePadding = uncompressedSize.Align(2048) - uncompressedSize; if (sizePadding > 0) { uncompressedSize += sizePadding; } } var entry = new TEntry(); entry.Name = kv.Key; entry.Offset = (uint)offset; entry.UncompressedSize = (uint)input.Length; using (var compressed = new MemoryStream()) { var zlib = new DeflaterOutputStream(compressed); zlib.WriteFromStream(input, input.Length); zlib.Finish(); entry.CompressedSize = (uint)compressed.Length; compressed.Position = 0; output.WriteFromStream(compressed, compressed.Length); } offset += entry.CompressedSize; uncompressedSize += entry.UncompressedSize; package.Entries.Add(entry); } } package.CompressedSize = (uint)offset; package.UncompressedSize = uncompressedSize; } else { output.Seek(baseOffset, SeekOrigin.Begin); long offset = 0; foreach (var kv in paths) { using (var input = File.OpenRead(kv.Value)) { if (isCondensed == false) { var padding = offset.Align(2048) - offset; if (padding > 0) { offset += padding; output.Seek(padding, SeekOrigin.Current); } } else if ( isCondensed == true && isCompressed == false) { var padding = offset.Align(16) - offset; if (padding > 0) { offset += padding; output.Seek(padding, SeekOrigin.Current); } } var entry = new TEntry(); entry.Name = kv.Key; entry.Offset = (uint)offset; entry.UncompressedSize = (uint)input.Length; output.WriteFromStream(input, input.Length); entry.CompressedSize = 0xFFFFFFFF; offset += entry.UncompressedSize; package.Entries.Add(entry); } } package.CompressedSize = 0xFFFFFFFF; package.UncompressedSize = (uint)offset; } package.TotalSize = (uint)output.Length; output.Seek(0, SeekOrigin.Begin); package.Serialize(output); } }
public static void Main(string[] args) { bool showHelp = false; bool verbose = false; bool stripFileNames = false; OptionSet options = new OptionSet() { { "v|verbose", "be verbose", v => verbose = v != null }, { "s|strip", "strip file names", v => stripFileNames = v != null }, { "h|help", "show this message and exit", v => showHelp = v != null }, }; List <string> extras; try { extras = options.Parse(args); } catch (OptionException e) { Console.Write("{0}: ", GetExecutableName()); Console.WriteLine(e.Message); Console.WriteLine("Try `{0} --help' for more information.", GetExecutableName()); return; } if (extras.Count < 1 || showHelp == true) { Console.WriteLine("Usage: {0} [OPTIONS]+ output_erf input_directory+", GetExecutableName()); Console.WriteLine("Pack files from input directories into a Encapsulated Resource File."); Console.WriteLine(); Console.WriteLine("Options:"); options.WriteOptionDescriptions(Console.Out); return; } var inputPaths = new List <string>(); string outputPath; if (extras.Count == 1) { inputPaths.Add(extras[0]); outputPath = Path.ChangeExtension(extras[0], ".erf"); } else { outputPath = extras[0]; inputPaths.AddRange(extras.Skip(1)); } var paths = new SortedDictionary <ulong, string>(); var lookup = new Dictionary <ulong, string>(); if (verbose == true) { Console.WriteLine("Finding files..."); } foreach (var relPath in inputPaths) { string inputPath = Path.GetFullPath(relPath); if (inputPath.EndsWith(Path.DirectorySeparatorChar.ToString()) == true) { inputPath = inputPath.Substring(0, inputPath.Length - 1); } foreach (string path in Directory.GetFiles(inputPath, "*", SearchOption.AllDirectories)) { bool hasName; string fullPath = Path.GetFullPath(path); string partPath = fullPath.Substring(inputPath.Length + 1).ToLowerInvariant(); ulong hash = 0xFFFFFFFFFFFFFFFFul; if (partPath.ToUpper().StartsWith("__UNKNOWN") == true) { string partName; partName = Path.GetFileNameWithoutExtension(partPath); if (partName.Length > 8) { partName = partName.Substring(0, 8); } hash = ulong.Parse( partName, System.Globalization.NumberStyles.AllowHexSpecifier); hasName = false; } else { hash = partPath.ToLowerInvariant().HashFNV64(); hasName = true; } if (paths.ContainsKey(hash) == true) { Console.WriteLine("Ignoring {0} duplicate.", partPath); continue; } paths[hash] = fullPath; if (hasName == false) { partPath = null; } else if (stripFileNames == true && Path.GetExtension(fullPath) != ".gda") { partPath = null; } lookup[hash] = partPath; } } using (var output = File.Open(outputPath, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite)) { var erf = new ERF(); erf.Version = EncapsulatedResourceFile.FileVersion.V3_0; erf.Compression = ERF.CompressionScheme.None; erf.Encryption = ERF.EncryptionScheme.None; erf.ContentId = 0; if (verbose == true) { Console.WriteLine("Adding files..."); } long headerSize = ERF.CalculateHeaderSize( erf.Version, lookup.Values.ToArray(), paths.Count); long baseOffset = headerSize.Align(16); if (verbose == true) { Console.WriteLine("Writing to disk..."); } erf.Entries.Clear(); output.Seek(baseOffset, SeekOrigin.Begin); foreach (var kvp in paths) { if (verbose == true) { Console.WriteLine(kvp.Value); } using (var input = File.Open(kvp.Value, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) { var entry = new ERF.Entry() { Name = lookup[kvp.Key], NameHash = kvp.Key, TypeHash = 0, Offset = output.Position, CompressedSize = (uint)input.Length, UncompressedSize = (uint)input.Length, }; if (entry.Name != null) { entry.CalculateHashes(); } else { var extension = Path.GetExtension(kvp.Value); entry.TypeHash = extension == null ? 0 : extension.Trim('.').HashFNV32(); } output.WriteFromStream(input, input.Length); output.Seek(output.Position.Align(16), SeekOrigin.Begin); erf.Entries.Add(entry); } } output.Seek(0, SeekOrigin.Begin); erf.Serialize(output); if (output.Position != headerSize) { throw new InvalidOperationException(); } } }
private static ulong PackDataBlock(Stream target, List <HyoutaArchiveFileInfo> files, byte packedAlignment, EndianUtils.Endianness endian) { byte smallPackedAlignment = ToSmallPackedAlignment(packedAlignment); long startPosition = target.Position; target.WriteUInt16(0); // offsetToFirstFileInfo, fill in later bool hasDummyContent = files.Any(x => x.DummyContent != null); uint dummyContentLength = hasDummyContent ? ((uint)files.Max(x => x.DummyContent?.Length ?? 0)).Align(1 << smallPackedAlignment) : 0; bool hasFilename = files.Any(x => x.Filename != null); uint filenameLength = 0; //bool embedFilenamesInFileInfo = false; List <byte[]?>?encodedFilenames = null; if (hasFilename) { // figure out whether we want the strings to embed into the fileinfo directly // or whether to use an offset and write the string data at the end of the fileinfo // note that if a string is <= 8 bytes we can always embed it as we'd need 8 bytes for the offset anyway // so... encodedFilenames = new List <byte[]?>(files.Count); long longestBytecount = 0; long totalBytecount = 0; long filenameCountOver8Bytes = 0; for (int i = 0; i < files.Count; ++i) { var currentFilename = files[i].Filename; if (currentFilename == null) { encodedFilenames.Add(null); } else { byte[] stringbytes = EncodeString(currentFilename); encodedFilenames.Add(stringbytes); if (stringbytes.LongLength > 8) { longestBytecount = Math.Max(longestBytecount, stringbytes.LongLength); totalBytecount += stringbytes.LongLength; ++filenameCountOver8Bytes; } } } // alright so we have, in practice, two options here // - make filenameLength == 16, store strings that are longer than that offsetted long nonEmbedSize = files.Count * 16 + totalBytecount.Align(1 << smallPackedAlignment); // - make filenameLength long enough so all strings can be embedded long embedSize = files.Count * (8 + longestBytecount).Align(1 << smallPackedAlignment); // pick whatever results in a smaller file; on a tie embed if (nonEmbedSize < embedSize) { //embedFilenamesInFileInfo = false; filenameLength = 16; } else { //embedFilenamesInFileInfo = true; filenameLength = (uint)(8 + longestBytecount).Align(1 << smallPackedAlignment); } } bool hasCompression = files.Any(x => x.CompressionInfo != null); uint compressionInfoLength = hasCompression ? files.Max(x => x.CompressionInfo?.MaximumCompressionInfoLength() ?? 0).Align(1 << smallPackedAlignment) : 0; bool hasBpsPatch = files.Any(x => x.BpsPatchInfo != null); uint bpsPatchInfoLength = hasBpsPatch ? 16u.Align(1 << smallPackedAlignment) : 0; bool hasCrc32 = files.Any(x => x.crc32 != null); uint crc32ContentLength = hasCrc32 ? 4u.Align(1 << smallPackedAlignment) : 0u; bool hasMd5 = files.Any(x => x.md5 != null); uint md5ContentLength = hasMd5 ? 16u.Align(1 << smallPackedAlignment) : 0u; bool hasSha1 = files.Any(x => x.sha1 != null); uint sha1ContentLength = hasSha1 ? 20u.Align(1 << smallPackedAlignment) : 0u; ushort contentBitfield1 = 0; contentBitfield1 |= (ushort)(hasDummyContent ? 0x0001u : 0); contentBitfield1 |= (ushort)(hasFilename ? 0x0002u : 0); contentBitfield1 |= (ushort)(hasCompression ? 0x0004u : 0); contentBitfield1 |= (ushort)(hasBpsPatch ? 0x0008u : 0); contentBitfield1 |= (ushort)(hasCrc32 ? 0x0010u : 0); contentBitfield1 |= (ushort)(hasMd5 ? 0x0020u : 0); contentBitfield1 |= (ushort)(hasSha1 ? 0x0040u : 0); target.WriteUInt16(contentBitfield1, endian); if (hasDummyContent) { WriteContentLength(dummyContentLength, target, endian); } if (hasFilename) { WriteContentLength(filenameLength, target, endian); } if (hasCompression) { WriteContentLength(compressionInfoLength, target, endian); } if (hasBpsPatch) { WriteContentLength(bpsPatchInfoLength, target, endian); } if (hasCrc32) { WriteContentLength(crc32ContentLength, target, endian); } if (hasMd5) { WriteContentLength(md5ContentLength, target, endian); } if (hasSha1) { WriteContentLength(sha1ContentLength, target, endian); } long offsetToFirstFileInfo = (target.Position - startPosition).Align(1 << smallPackedAlignment); StreamUtils.WriteZeros(target, offsetToFirstFileInfo - (target.Position - startPosition)); target.Position = startPosition; WriteContentLength((uint)offsetToFirstFileInfo, target, endian); target.Position = startPosition + offsetToFirstFileInfo; long singleFileInfoLength = 16 + dummyContentLength + filenameLength + compressionInfoLength + bpsPatchInfoLength + crc32ContentLength + md5ContentLength + sha1ContentLength; long totalFileInfoLength = singleFileInfoLength * files.Count; long offsetToEndOfFileInfo = (offsetToFirstFileInfo + totalFileInfoLength).Align(1 << smallPackedAlignment); StreamUtils.WriteZeros(target, offsetToEndOfFileInfo - offsetToFirstFileInfo); var filedata = new List <(long position, DuplicatableStream data)>(files.Count); long positionOfFreeSpace = offsetToEndOfFileInfo; for (int i = 0; i < files.Count; ++i) { HyoutaArchiveFileInfo fi = files[i]; var fiData = fi.Data; if (fiData == null) { throw new Exception("Data of file " + i + " is null."); } using (DuplicatableStream fs = fiData.Duplicate()) { DuplicatableStream streamToWrite = fs; bool streamIsInternallyCompressed = fi.StreamIsCompressed; if (fi.BpsPatchInfo != null && fi.CompressionInfo != null && streamIsInternallyCompressed && !fi.StreamIsBpsPatch) { // this is a weird case; the stream wants both bps patch and compression // and is already compressed but not already bps patched, which breaks the defined order // we can handle this by decompressing, creating patch, recompressing streamToWrite = fi.DataStream.Duplicate(); // this decompresses the stream streamIsInternallyCompressed = false; // and fake-set the stream as uncompressed for packing logic } byte[]? bpsPatchInfoBytes = null; byte[]? compressionInfoBytes = null; if (hasBpsPatch) { if (fi.BpsPatchInfo == null) { // chunk has patches but this file is unpatched; we store this by pointing the file to itself bpsPatchInfoBytes = new HyoutaArchiveBpsPatchInfo((ulong)i, (ulong)streamToWrite.Length, null).Serialize(endian); } else if (fi.StreamIsBpsPatch) { bpsPatchInfoBytes = fi.BpsPatchInfo.Serialize(endian); } else { var p = HyoutaArchiveBps.CreatePatch(fi.BpsPatchInfo, streamToWrite, endian); bpsPatchInfoBytes = p.patchInfo; streamToWrite = new DuplicatableByteArrayStream(p.patchData); } } if (hasCompression && fi.CompressionInfo != null) { if (streamIsInternallyCompressed) { compressionInfoBytes = fi.CompressionInfo.Serialize(endian); } else { var p = fi.CompressionInfo.Compress(streamToWrite, endian); compressionInfoBytes = p.compressionInfo; streamToWrite = new DuplicatableByteArrayStream(p.compressedData); } } // write file info target.Position = (singleFileInfoLength * i) + offsetToFirstFileInfo + startPosition; long positionPosition = target.Position; target.WriteUInt64(0); // position of file, will be filled later target.WriteUInt64((ulong)streamToWrite.Length, endian); if (hasDummyContent) { if (fi.DummyContent != null) { target.Write(fi.DummyContent); target.WriteZeros(dummyContentLength - fi.DummyContent.Length); } else { target.WriteZeros(dummyContentLength); } } if (hasFilename) { if (fi.Filename != null) { var efn = encodedFilenames ![i];
public static void PrepareRawBnsfFromWav(string path, int targetSampleRate) { try { using (var fs = new DuplicatableFileStream(path + ".wav")) { fs.Position = 0x16; ushort channels = fs.ReadUInt16(EndianUtils.Endianness.LittleEndian); uint samplerate = fs.ReadUInt32(EndianUtils.Endianness.LittleEndian); fs.Position = 0x2c; long samplecount = (fs.Length - 0x2c) / 2; long samplesPerChannel = samplecount / channels; short[,] samples = new short[channels, samplesPerChannel]; for (long i = 0; i < samplesPerChannel; ++i) { for (long j = 0; j < channels; ++j) { samples[j, i] = fs.ReadInt16(EndianUtils.Endianness.LittleEndian); } } short[,] outsamples; long outSampleCountPerChannel; if (samplerate == targetSampleRate) { outSampleCountPerChannel = samplesPerChannel; outsamples = samples; } else if (targetSampleRate * 3 == samplerate * 2) { outSampleCountPerChannel = (samplesPerChannel.Align(3) * 2) / 3; outsamples = new short[channels, outSampleCountPerChannel]; for (long ch = 0; ch < channels; ++ch) { long sourcePos = 0; for (long s = 0; s < outSampleCountPerChannel; s += 2) { int sample0 = sourcePos < samplesPerChannel ? samples[ch, sourcePos] : 0; ++sourcePos; int sample1 = sourcePos < samplesPerChannel ? samples[ch, sourcePos] : sample0; ++sourcePos; int sample2 = sourcePos < samplesPerChannel ? samples[ch, sourcePos] : sample1; ++sourcePos; outsamples[ch, s] = (short)LerpTwoThirds(sample0, sample1); outsamples[ch, s + 1] = (short)LerpTwoThirds(sample2, sample1); } } } else { throw new Exception("unsupported sample rate conversion"); } using (var nfs = new FileStream(path + ".raw", FileMode.Create)) { for (long ch = 0; ch < channels; ++ch) { for (long s = 0; s < outSampleCountPerChannel; ++s) { nfs.WriteInt16(outsamples[ch, s], EndianUtils.Endianness.LittleEndian); } } } using (var nfs = new FileStream(path + ".samplecount", FileMode.Create)) { nfs.WriteUInt64((ulong)outSampleCountPerChannel, EndianUtils.Endianness.LittleEndian); } return; } } catch (Exception ex) { System.IO.File.WriteAllText(path + ".error", ex.ToString()); } }