public static DuplicatableStream ApplyPatch(HyoutaArchiveBpsPatchInfo patchInfo, Stream data)
        {
            var refchunk = patchInfo.ReferencedChunk;

            if (refchunk == null)
            {
                // implies that the file points at itself and is unpatched
                return(data.CopyToByteArrayStream());
            }

            HyoutaArchiveFileInfo sourceFile = refchunk.GetFile((long)patchInfo.FileIndexToPatch);

            using (DuplicatableStream sourceStream = sourceFile.DataStream.Duplicate()) {
                sourceStream.Position = 0;
                data.Position         = 0;
                using (MemoryStream ms = new MemoryStream((int)patchInfo.TargetFilesize)) {
                    Bps.BpsPatcher.ApplyPatchToStream(sourceStream, data, ms);
                    return(ms.CopyToByteArrayStreamAndDispose());
                }
            }
        }
示例#2
0
        private static ulong PackDataBlock(Stream target, List <HyoutaArchiveFileInfo> files, byte packedAlignment, EndianUtils.Endianness endian)
        {
            byte smallPackedAlignment = ToSmallPackedAlignment(packedAlignment);

            long startPosition = target.Position;

            target.WriteUInt16(0);             // offsetToFirstFileInfo, fill in later

            bool hasDummyContent    = files.Any(x => x.DummyContent != null);
            uint dummyContentLength = hasDummyContent ? ((uint)files.Max(x => x.DummyContent?.Length ?? 0)).Align(1 << smallPackedAlignment) : 0;
            bool hasFilename        = files.Any(x => x.Filename != null);
            uint filenameLength     = 0;
            //bool embedFilenamesInFileInfo = false;
            List <byte[]?>?encodedFilenames = null;

            if (hasFilename)
            {
                // figure out whether we want the strings to embed into the fileinfo directly
                // or whether to use an offset and write the string data at the end of the fileinfo
                // note that if a string is <= 8 bytes we can always embed it as we'd need 8 bytes for the offset anyway
                // so...
                encodedFilenames = new List <byte[]?>(files.Count);
                long longestBytecount        = 0;
                long totalBytecount          = 0;
                long filenameCountOver8Bytes = 0;
                for (int i = 0; i < files.Count; ++i)
                {
                    var currentFilename = files[i].Filename;
                    if (currentFilename == null)
                    {
                        encodedFilenames.Add(null);
                    }
                    else
                    {
                        byte[] stringbytes = EncodeString(currentFilename);
                        encodedFilenames.Add(stringbytes);

                        if (stringbytes.LongLength > 8)
                        {
                            longestBytecount = Math.Max(longestBytecount, stringbytes.LongLength);
                            totalBytecount  += stringbytes.LongLength;
                            ++filenameCountOver8Bytes;
                        }
                    }
                }

                // alright so we have, in practice, two options here
                // - make filenameLength == 16, store strings that are longer than that offsetted
                long nonEmbedSize = files.Count * 16 + totalBytecount.Align(1 << smallPackedAlignment);
                // - make filenameLength long enough so all strings can be embedded
                long embedSize = files.Count * (8 + longestBytecount).Align(1 << smallPackedAlignment);

                // pick whatever results in a smaller file; on a tie embed
                if (nonEmbedSize < embedSize)
                {
                    //embedFilenamesInFileInfo = false;
                    filenameLength = 16;
                }
                else
                {
                    //embedFilenamesInFileInfo = true;
                    filenameLength = (uint)(8 + longestBytecount).Align(1 << smallPackedAlignment);
                }
            }
            bool hasCompression        = files.Any(x => x.CompressionInfo != null);
            uint compressionInfoLength = hasCompression ? files.Max(x => x.CompressionInfo?.MaximumCompressionInfoLength() ?? 0).Align(1 << smallPackedAlignment) : 0;
            bool hasBpsPatch           = files.Any(x => x.BpsPatchInfo != null);
            uint bpsPatchInfoLength    = hasBpsPatch ? 16u.Align(1 << smallPackedAlignment) : 0;
            bool hasCrc32           = files.Any(x => x.crc32 != null);
            uint crc32ContentLength = hasCrc32 ? 4u.Align(1 << smallPackedAlignment) : 0u;
            bool hasMd5             = files.Any(x => x.md5 != null);
            uint md5ContentLength   = hasMd5 ? 16u.Align(1 << smallPackedAlignment) : 0u;
            bool hasSha1            = files.Any(x => x.sha1 != null);
            uint sha1ContentLength  = hasSha1 ? 20u.Align(1 << smallPackedAlignment) : 0u;

            ushort contentBitfield1 = 0;

            contentBitfield1 |= (ushort)(hasDummyContent ? 0x0001u : 0);
            contentBitfield1 |= (ushort)(hasFilename ? 0x0002u : 0);
            contentBitfield1 |= (ushort)(hasCompression ? 0x0004u : 0);
            contentBitfield1 |= (ushort)(hasBpsPatch ? 0x0008u : 0);
            contentBitfield1 |= (ushort)(hasCrc32 ? 0x0010u : 0);
            contentBitfield1 |= (ushort)(hasMd5 ? 0x0020u : 0);
            contentBitfield1 |= (ushort)(hasSha1 ? 0x0040u : 0);
            target.WriteUInt16(contentBitfield1, endian);

            if (hasDummyContent)
            {
                WriteContentLength(dummyContentLength, target, endian);
            }
            if (hasFilename)
            {
                WriteContentLength(filenameLength, target, endian);
            }
            if (hasCompression)
            {
                WriteContentLength(compressionInfoLength, target, endian);
            }
            if (hasBpsPatch)
            {
                WriteContentLength(bpsPatchInfoLength, target, endian);
            }
            if (hasCrc32)
            {
                WriteContentLength(crc32ContentLength, target, endian);
            }
            if (hasMd5)
            {
                WriteContentLength(md5ContentLength, target, endian);
            }
            if (hasSha1)
            {
                WriteContentLength(sha1ContentLength, target, endian);
            }

            long offsetToFirstFileInfo = (target.Position - startPosition).Align(1 << smallPackedAlignment);

            StreamUtils.WriteZeros(target, offsetToFirstFileInfo - (target.Position - startPosition));
            target.Position = startPosition;
            WriteContentLength((uint)offsetToFirstFileInfo, target, endian);
            target.Position = startPosition + offsetToFirstFileInfo;

            long singleFileInfoLength  = 16 + dummyContentLength + filenameLength + compressionInfoLength + bpsPatchInfoLength + crc32ContentLength + md5ContentLength + sha1ContentLength;
            long totalFileInfoLength   = singleFileInfoLength * files.Count;
            long offsetToEndOfFileInfo = (offsetToFirstFileInfo + totalFileInfoLength).Align(1 << smallPackedAlignment);

            StreamUtils.WriteZeros(target, offsetToEndOfFileInfo - offsetToFirstFileInfo);

            var filedata = new List <(long position, DuplicatableStream data)>(files.Count);

            long positionOfFreeSpace = offsetToEndOfFileInfo;

            for (int i = 0; i < files.Count; ++i)
            {
                HyoutaArchiveFileInfo fi = files[i];
                var fiData = fi.Data;
                if (fiData == null)
                {
                    throw new Exception("Data of file " + i + " is null.");
                }
                using (DuplicatableStream fs = fiData.Duplicate()) {
                    DuplicatableStream streamToWrite = fs;

                    bool streamIsInternallyCompressed = fi.StreamIsCompressed;
                    if (fi.BpsPatchInfo != null && fi.CompressionInfo != null && streamIsInternallyCompressed && !fi.StreamIsBpsPatch)
                    {
                        // this is a weird case; the stream wants both bps patch and compression
                        // and is already compressed but not already bps patched, which breaks the defined order
                        // we can handle this by decompressing, creating patch, recompressing
                        streamToWrite = fi.DataStream.Duplicate();                    // this decompresses the stream
                        streamIsInternallyCompressed = false;                         // and fake-set the stream as uncompressed for packing logic
                    }

                    byte[]? bpsPatchInfoBytes    = null;
                    byte[]? compressionInfoBytes = null;
                    if (hasBpsPatch)
                    {
                        if (fi.BpsPatchInfo == null)
                        {
                            // chunk has patches but this file is unpatched; we store this by pointing the file to itself
                            bpsPatchInfoBytes = new HyoutaArchiveBpsPatchInfo((ulong)i, (ulong)streamToWrite.Length, null).Serialize(endian);
                        }
                        else if (fi.StreamIsBpsPatch)
                        {
                            bpsPatchInfoBytes = fi.BpsPatchInfo.Serialize(endian);
                        }
                        else
                        {
                            var p = HyoutaArchiveBps.CreatePatch(fi.BpsPatchInfo, streamToWrite, endian);
                            bpsPatchInfoBytes = p.patchInfo;
                            streamToWrite     = new DuplicatableByteArrayStream(p.patchData);
                        }
                    }
                    if (hasCompression && fi.CompressionInfo != null)
                    {
                        if (streamIsInternallyCompressed)
                        {
                            compressionInfoBytes = fi.CompressionInfo.Serialize(endian);
                        }
                        else
                        {
                            var p = fi.CompressionInfo.Compress(streamToWrite, endian);
                            compressionInfoBytes = p.compressionInfo;
                            streamToWrite        = new DuplicatableByteArrayStream(p.compressedData);
                        }
                    }

                    // write file info
                    target.Position = (singleFileInfoLength * i) + offsetToFirstFileInfo + startPosition;
                    long positionPosition = target.Position;
                    target.WriteUInt64(0);                     // position of file, will be filled later
                    target.WriteUInt64((ulong)streamToWrite.Length, endian);
                    if (hasDummyContent)
                    {
                        if (fi.DummyContent != null)
                        {
                            target.Write(fi.DummyContent);
                            target.WriteZeros(dummyContentLength - fi.DummyContent.Length);
                        }
                        else
                        {
                            target.WriteZeros(dummyContentLength);
                        }
                    }
                    if (hasFilename)
                    {
                        if (fi.Filename != null)
                        {
                            var efn = encodedFilenames ![i];
 public static (byte[] patchInfo, byte[] patchData) CreatePatch(HyoutaArchiveBpsPatchInfo patchInfo, Stream data, EndianUtils.Endianness endian)
 {
     throw new Exception("not yet implemented");
 }
示例#4
0
        public HyoutaArchiveChunk(DuplicatableStream duplicatableStream, out ulong chunkLength)
        {
            using (DuplicatableStream data = duplicatableStream.Duplicate()) {
                data.Position = 0;

                // header
                ulong extraMagic = data.ReadUInt64(EndianUtils.Endianness.LittleEndian);
                ulong magic      = extraMagic & 0x00fffffffffffffful;
                if (magic != 0x6b6e7568636168)
                {
                    throw new Exception("wrong magic");
                }
                byte extra                  = (byte)((extraMagic >> 56) & 0xffu);
                byte packedAlignment        = (byte)(extra & 0x1fu);
                long unpackedAlignment      = 1L << packedAlignment;
                bool hasMetadata            = (extra & 0x20) != 0;
                bool isCompressed           = (extra & 0x40) != 0;
                bool isBigEndian            = (extra & 0x80) != 0;
                EndianUtils.Endianness e    = isBigEndian ? EndianUtils.Endianness.BigEndian : EndianUtils.Endianness.LittleEndian;
                ulong endOfFileOffset       = data.ReadUInt64(e) << packedAlignment;
                ulong tableOfContentsOffset = data.ReadUInt64(e) << packedAlignment;
                ulong filecount             = data.ReadUInt64(e);
                chunkLength = endOfFileOffset;

                if (hasMetadata)
                {
                    // just skip past this for now
                    ulong metadataLength = data.ReadUInt64(e);
                    data.DiscardBytes(metadataLength);
                }

                DuplicatableStream dataBlockStream;
                if (isCompressed)
                {
                    ushort compressionInfoLengthRaw       = data.ReadUInt16(e);
                    uint   compressionInfoLength          = compressionInfoLengthRaw & 0xfffcu;
                    int    compressionInfoAlignmentPacked = (compressionInfoLengthRaw & 0x3) + 1;
                    data.ReadAlign(1u << compressionInfoAlignmentPacked);
                    Compression.IHyoutaArchiveCompressionInfo?compressionInfo = HyoutaArchiveCompression.Deserialize(data, compressionInfoLength == 0 ? 0x10000u : compressionInfoLength, e);
                    if (compressionInfo == null)
                    {
                        throw new Exception("File is indicated to be compressed, but no decompressor found.");
                    }
                    dataBlockStream = compressionInfo.Decompress(data);
                }
                else
                {
                    data.ReadAlign(unpackedAlignment);
                    dataBlockStream = new PartialStream(data, data.Position, (long)(endOfFileOffset - (ulong)data.Position));
                }

                try {
                    data.Dispose();

                    dataBlockStream.Position = (long)tableOfContentsOffset;
                    uint offsetToFirstFileInfo = ReadContentLength(dataBlockStream, e);

                    // decode content bitfield(s)
                    long   numberOfUnknownBits = 0;
                    ushort contentBitfield1    = dataBlockStream.ReadUInt16(e);
                    bool   hasDummyContent     = (contentBitfield1 & 0x0001u) != 0;
                    bool   hasFilename         = (contentBitfield1 & 0x0002u) != 0;
                    bool   hasCompression      = (contentBitfield1 & 0x0004u) != 0;
                    bool   hasBpsPatch         = (contentBitfield1 & 0x0008u) != 0;
                    bool   hasCrc32            = (contentBitfield1 & 0x0010u) != 0;
                    bool   hasMd5  = (contentBitfield1 & 0x0020u) != 0;
                    bool   hasSha1 = (contentBitfield1 & 0x0040u) != 0;
                    numberOfUnknownBits += (contentBitfield1 & 0x0080u) != 0 ? 1 : 0;
                    numberOfUnknownBits += (contentBitfield1 & 0x0100u) != 0 ? 1 : 0;
                    numberOfUnknownBits += (contentBitfield1 & 0x0200u) != 0 ? 1 : 0;
                    numberOfUnknownBits += (contentBitfield1 & 0x0400u) != 0 ? 1 : 0;
                    numberOfUnknownBits += (contentBitfield1 & 0x0800u) != 0 ? 1 : 0;
                    numberOfUnknownBits += (contentBitfield1 & 0x1000u) != 0 ? 1 : 0;
                    numberOfUnknownBits += (contentBitfield1 & 0x2000u) != 0 ? 1 : 0;
                    numberOfUnknownBits += (contentBitfield1 & 0x4000u) != 0 ? 1 : 0;
                    ushort currentBitfield = contentBitfield1;
                    while ((currentBitfield & 0x8000u) != 0)
                    {
                        // more bitfields, though we don't understand them since only the first handful of bits are defined at the moment, so just count and skip them
                        currentBitfield      = dataBlockStream.ReadUInt16(e);
                        numberOfUnknownBits += (currentBitfield & 0x0001u) != 0 ? 1 : 0;
                        numberOfUnknownBits += (currentBitfield & 0x0002u) != 0 ? 1 : 0;
                        numberOfUnknownBits += (currentBitfield & 0x0004u) != 0 ? 1 : 0;
                        numberOfUnknownBits += (currentBitfield & 0x0008u) != 0 ? 1 : 0;
                        numberOfUnknownBits += (currentBitfield & 0x0010u) != 0 ? 1 : 0;
                        numberOfUnknownBits += (currentBitfield & 0x0020u) != 0 ? 1 : 0;
                        numberOfUnknownBits += (currentBitfield & 0x0040u) != 0 ? 1 : 0;
                        numberOfUnknownBits += (currentBitfield & 0x0080u) != 0 ? 1 : 0;
                        numberOfUnknownBits += (currentBitfield & 0x0100u) != 0 ? 1 : 0;
                        numberOfUnknownBits += (currentBitfield & 0x0200u) != 0 ? 1 : 0;
                        numberOfUnknownBits += (currentBitfield & 0x0400u) != 0 ? 1 : 0;
                        numberOfUnknownBits += (currentBitfield & 0x0800u) != 0 ? 1 : 0;
                        numberOfUnknownBits += (currentBitfield & 0x1000u) != 0 ? 1 : 0;
                        numberOfUnknownBits += (currentBitfield & 0x2000u) != 0 ? 1 : 0;
                        numberOfUnknownBits += (currentBitfield & 0x4000u) != 0 ? 1 : 0;
                    }
                    uint dummyContentLength   = hasDummyContent ? ReadContentLength(dataBlockStream, e) : 0;
                    uint filenameLength       = hasFilename ? ReadContentLength(dataBlockStream, e) : 0;
                    uint compressionLength    = hasCompression ? ReadContentLength(dataBlockStream, e) : 0;
                    uint bpspatchLength       = hasBpsPatch ? ReadContentLength(dataBlockStream, e) : 0;
                    uint crc32Length          = hasCrc32 ? ReadContentLength(dataBlockStream, e) : 0;
                    uint md5Length            = hasMd5 ? ReadContentLength(dataBlockStream, e) : 0;
                    uint sha1Length           = hasSha1 ? ReadContentLength(dataBlockStream, e) : 0;
                    long unknownContentLength = 0;
                    for (long i = 0; i < numberOfUnknownBits; ++i)
                    {
                        unknownContentLength += ReadContentLength(dataBlockStream, e);
                    }

                    dataBlockStream.Position = (long)(tableOfContentsOffset + offsetToFirstFileInfo);
                    List <HyoutaArchiveFileInfo> files = new List <HyoutaArchiveFileInfo>((int)filecount);
                    for (ulong i = 0; i < filecount; ++i)
                    {
                        ulong offset             = dataBlockStream.ReadUInt64(e) << packedAlignment;
                        ulong filesize           = dataBlockStream.ReadUInt64(e);
                        HyoutaArchiveFileInfo fi = new HyoutaArchiveFileInfo();
                        if (hasDummyContent)
                        {
                            fi.DummyContent = dataBlockStream.ReadBytes(dummyContentLength);
                        }
                        if (hasFilename)
                        {
                            fi.Filename = ReadString(dataBlockStream, filenameLength, e);
                        }
                        if (hasCompression)
                        {
                            fi.CompressionInfo    = HyoutaArchiveCompression.Deserialize(dataBlockStream, compressionLength, e);
                            fi.StreamIsCompressed = true;
                        }
                        if (hasBpsPatch)
                        {
                            fi.BpsPatchInfo     = HyoutaArchiveBpsPatchInfo.Deserialize(dataBlockStream, bpspatchLength, e, i, this);
                            fi.StreamIsBpsPatch = fi.BpsPatchInfo != null;
                        }
                        if (hasCrc32)
                        {
                            if (crc32Length >= 4)
                            {
                                fi.crc32 = new CRC32(dataBlockStream.ReadUInt32(EndianUtils.Endianness.BigEndian));
                                dataBlockStream.DiscardBytes(crc32Length - 4);
                            }
                            else
                            {
                                dataBlockStream.DiscardBytes(crc32Length);
                            }
                        }
                        if (hasMd5)
                        {
                            if (md5Length >= 16)
                            {
                                ulong a = dataBlockStream.ReadUInt64(EndianUtils.Endianness.BigEndian);
                                ulong b = dataBlockStream.ReadUInt64(EndianUtils.Endianness.BigEndian);
                                fi.md5 = new MD5(a, b);
                                dataBlockStream.DiscardBytes(md5Length - 16);
                            }
                            else
                            {
                                dataBlockStream.DiscardBytes(md5Length);
                            }
                        }
                        if (hasSha1)
                        {
                            if (sha1Length >= 20)
                            {
                                ulong a = dataBlockStream.ReadUInt64(EndianUtils.Endianness.BigEndian);
                                ulong b = dataBlockStream.ReadUInt64(EndianUtils.Endianness.BigEndian);
                                uint  c = dataBlockStream.ReadUInt32(EndianUtils.Endianness.BigEndian);
                                fi.sha1 = new SHA1(a, b, c);
                                dataBlockStream.DiscardBytes(sha1Length - 20);
                            }
                            else
                            {
                                dataBlockStream.DiscardBytes(sha1Length);
                            }
                        }
                        dataBlockStream.DiscardBytes(unknownContentLength);

                        fi.Data = new PartialStream(dataBlockStream, (long)offset, (long)filesize);
                        files.Add(fi);
                    }

                    Files = files;
                } finally {
                    dataBlockStream.Dispose();
                }
            }
        }