// UE4.23-4.24 has changed compressed data layout for streaming, so it's worth making a separate // serializer function for it. private void SerializeCompressedData2(FAssetArchive Ar) { var compressedRawDataSize = Ar.Read <int>(); CompressedTrackToSkeletonMapTable = Ar.ReadArray <FTrackToSkeletonMap>(); var compressedCurveNames = Ar.ReadArray(() => new FSmartName(Ar)); // Since 4.23, this is FUECompressedAnimData::SerializeCompressedData KeyEncodingFormat = Ar.Read <AnimationKeyFormat>(); TranslationCompressionFormat = Ar.Read <AnimationCompressionFormat>(); RotationCompressionFormat = Ar.Read <AnimationCompressionFormat>(); ScaleCompressionFormat = Ar.Read <AnimationCompressionFormat>(); var compressedNumFrames = Ar.Read <int>(); // SerializeView() just serializes array size var compressedTrackOffsetsNum = Ar.Read <int>(); var compressedScaleOffsetsNum = Ar.Read <int>(); CompressedScaleOffsets = new FCompressedOffsetData(Ar.Read <int>()); var compressedByteStreamNum = Ar.Read <int>(); // ... end of FUECompressedAnimData::SerializeCompressedData var numBytes = Ar.Read <int>(); var bUseBulkDataForLoad = Ar.ReadBoolean(); // In UE4.23 CompressedByteStream field exists in FUECompressedAnimData (as TArrayView) and in // FCompressedAnimSequence (as byte array). Serialization is done in FCompressedAnimSequence, // either as TArray or as bulk, and then array is separated onto multiple "views" for // FUECompressedAnimData. We'll use a different name for "joined" serialized array here to // avoid confuse. byte[] serializedByteStream; if (bUseBulkDataForLoad) { throw new NotImplementedException("Anim: bUseBulkDataForLoad not implemented"); //todo: read from bulk to serializedByteStream } else { serializedByteStream = Ar.ReadBytes(numBytes); } // Setup all array views from single array. In UE4 this is done in FUECompressedAnimData::InitViewsFromBuffer. // We'll simply copy array data away from SerializedByteStream, and then SerializedByteStream // will be released from memory as it is a local variable here. // Note: copying is not byte-order wise, so if there will be any problems in the future, // should use byte swap functions. using (var tempAr = new FByteArchive("SerializedByteStream", serializedByteStream, Ar.Versions)) { CompressedTrackOffsets = tempAr.ReadArray <int>(compressedTrackOffsetsNum); CompressedScaleOffsets.OffsetData = tempAr.ReadArray <int>(compressedScaleOffsetsNum); CompressedByteStream = tempAr.ReadBytes(compressedByteStreamNum); } var curveCodecPath = Ar.ReadFString(); var compressedCurveByteStream = Ar.ReadArray <byte>(); }
private IReadOnlyDictionary <string, GameFile> ReadIndexUpdated(bool caseInsensitive) { // Prepare primary index and decrypt if necessary Ar.Position = Info.IndexOffset; FArchive primaryIndex = new FByteArchive($"{Name} - Primary Index", ReadAndDecrypt((int)Info.IndexSize)); string mountPoint; try { mountPoint = primaryIndex.ReadFString(); } catch (Exception e) { throw new InvalidAesKeyException($"Given aes key '{AesKey?.KeyString}'is not working with '{Name}'", e); } ValidateMountPoint(ref mountPoint); MountPoint = mountPoint; var fileCount = primaryIndex.Read <int>(); EncryptedFileCount = 0; primaryIndex.Position += 8; // PathHashSeed if (!primaryIndex.ReadBoolean()) { throw new ParserException(primaryIndex, "No path hash index"); } primaryIndex.Position += 36; // PathHashIndexOffset (long) + PathHashIndexSize (long) + PathHashIndexHash (20 bytes) if (!primaryIndex.ReadBoolean()) { throw new ParserException(primaryIndex, "No directory index"); } var directoryIndexOffset = primaryIndex.Read <long>(); var directoryIndexSize = primaryIndex.Read <long>(); primaryIndex.Position += 20; // Directory Index hash var encodedPakEntriesSize = primaryIndex.Read <int>(); var encodedPakEntries = primaryIndex.ReadBytes(encodedPakEntriesSize); if (primaryIndex.Read <int>() < 0) { throw new ParserException("Corrupt pak PrimaryIndex detected"); } // Read FDirectoryIndex Ar.Position = directoryIndexOffset; var directoryIndex = new FByteArchive($"{Name} - Directory Index", ReadAndDecrypt((int)directoryIndexSize)); unsafe { fixed(byte *ptr = encodedPakEntries) { var directoryIndexLength = directoryIndex.Read <int>(); var files = new Dictionary <string, GameFile>(fileCount); for (var i = 0; i < directoryIndexLength; i++) { var dir = directoryIndex.ReadFString(); var dirDictLength = directoryIndex.Read <int>(); for (var j = 0; j < dirDictLength; j++) { var name = directoryIndex.ReadFString(); var path = string.Concat(mountPoint, dir, name); var entry = new FPakEntry(this, path, ptr + directoryIndex.Read <int>()); if (entry.IsEncrypted) { EncryptedFileCount++; } if (caseInsensitive) { files[path.ToLowerInvariant()] = entry; } else { files[path] = entry; } } } Files = files; return(files); } } }
public FIoStoreTocResource(FArchive Ar, EIoStoreTocReadOptions readOptions = EIoStoreTocReadOptions.Default) { var streamBuffer = new byte[Ar.Length]; Ar.Read(streamBuffer, 0, streamBuffer.Length); using var archive = new FByteArchive(Ar.Name, streamBuffer); // Header Header = new FIoStoreTocHeader(archive); if (Header.Version < EIoStoreTocVersion.PartitionSize) { Header.PartitionCount = 1; Header.PartitionSize = uint.MaxValue; } // Chunk IDs ChunkIds = archive.ReadArray <FIoChunkId>((int)Header.TocEntryCount); // Chunk offsets ChunkOffsetLengths = new FIoOffsetAndLength[Header.TocEntryCount]; for (int i = 0; i < Header.TocEntryCount; i++) { ChunkOffsetLengths[i] = new FIoOffsetAndLength(archive); } // Chunk perfect hash map uint perfectHashSeedsCount = 0; uint chunksWithoutPerfectHashCount = 0; if (Header.Version >= EIoStoreTocVersion.PerfectHashWithOverflow) { perfectHashSeedsCount = Header.TocChunkPerfectHashSeedsCount; chunksWithoutPerfectHashCount = Header.TocChunksWithoutPerfectHashCount; } else if (Header.Version >= EIoStoreTocVersion.PerfectHash) { perfectHashSeedsCount = Header.TocChunkPerfectHashSeedsCount; } if (perfectHashSeedsCount > 0) { ChunkPerfectHashSeeds = archive.ReadArray <int>((int)perfectHashSeedsCount); } if (chunksWithoutPerfectHashCount > 0) { ChunkIndicesWithoutPerfectHash = archive.ReadArray <int>((int)chunksWithoutPerfectHashCount); } // Compression blocks CompressionBlocks = new FIoStoreTocCompressedBlockEntry[Header.TocCompressedBlockEntryCount]; for (int i = 0; i < Header.TocCompressedBlockEntryCount; i++) { CompressionBlocks[i] = new FIoStoreTocCompressedBlockEntry(archive); } // Compression methods unsafe { var bufferSize = (int)(Header.CompressionMethodNameLength * Header.CompressionMethodNameCount); var buffer = stackalloc byte[bufferSize]; archive.Serialize(buffer, bufferSize); CompressionMethods = new CompressionMethod[Header.CompressionMethodNameCount + 1]; CompressionMethods[0] = CompressionMethod.None; for (var i = 0; i < Header.CompressionMethodNameCount; i++) { var name = new string((sbyte *)buffer + i * Header.CompressionMethodNameLength, 0, (int)Header.CompressionMethodNameLength).TrimEnd('\0'); if (string.IsNullOrEmpty(name)) { continue; } if (!Enum.TryParse(name, true, out CompressionMethod method)) { Log.Warning($"Unknown compression method '{name}' in {Ar.Name}"); method = CompressionMethod.Unknown; } CompressionMethods[i + 1] = method; } } // Chunk block signatures if (Header.ContainerFlags.HasFlag(EIoContainerFlags.Signed)) { var hashSize = archive.Read <int>(); // tocSignature and blockSignature both byte[hashSize] // and ChunkBlockSignature of FSHAHash[Header.TocCompressedBlockEntryCount] archive.Position += hashSize + hashSize + FSHAHash.SIZE * Header.TocCompressedBlockEntryCount; // You could verify hashes here but nah } // Directory index if (Header.Version >= EIoStoreTocVersion.DirectoryIndex && readOptions.HasFlag(EIoStoreTocReadOptions.ReadDirectoryIndex) && Header.ContainerFlags.HasFlag(EIoContainerFlags.Indexed) && Header.DirectoryIndexSize > 0) { DirectoryIndexBuffer = archive.ReadBytes((int)Header.DirectoryIndexSize); } // Meta if (readOptions.HasFlag(EIoStoreTocReadOptions.ReadTocMeta)) { ChunkMetas = new FIoStoreTocEntryMeta[Header.TocEntryCount]; for (int i = 0; i < Header.TocEntryCount; i++) { ChunkMetas[i] = new FIoStoreTocEntryMeta(archive); } } }