Beispiel #1
0
 bool DecryptAndValidateIndex(ref byte[] IndexData, byte[] aesKey, FSHAHash ExpectedHash, out FSHAHash OutHash)
 {
     if (Info.bEncryptedIndex)
     {
         IndexData = AESDecryptor.DecryptAES(IndexData, aesKey);
     }
     OutHash = ExpectedHash; // too lazy to actually check against the hash
     // https://github.com/EpicGames/UnrealEngine/blob/79a64829237ae339118bb50b61d84e4599c14e8a/Engine/Source/Runtime/PakFile/Private/IPlatformFilePak.cpp#L5371
     return(true);
 }
Beispiel #2
0
 public FPointerTableBase(FArchive Ar) // LoadFromArchive
 {
     NumDependencies = Ar.Read <int>();
     for (var i = 0; i < NumDependencies; ++i)
     {
         var Name            = Ar.ReadFName();
         var SavedLayoutSize = Ar.Read <uint>();
         var SavedLayoutHash = new FSHAHash(Ar);
     }
 }
        // MemoryImage.cpp
        private static void FPointerTableBase_LoadFromArchive(FMaterialResourceProxyReader Ar)
        {
            var numDependencies = Ar.Read <int>();

            for (var i = 0; i < numDependencies; ++i)
            {
                var nameHash        = Ar.Read <ulong>();
                var savedLayoutSize = Ar.Read <uint>();
                var savedLayoutHash = new FSHAHash(Ar);
            }
        }
Beispiel #4
0
        public FMaterialShaderMapId(FArchive Ar)
        {
            var bIsLegacyPackage = Ar.Ver < EUnrealEngineObjectUE4Version.PURGED_FMATERIAL_COMPILE_OUTPUTS;

            if (!bIsLegacyPackage)
            {
                QualityLevel = (EMaterialQualityLevel)Ar.Read <int>();
                FeatureLevel = (ERHIFeatureLevel)Ar.Read <int>();
            }
            else
            {
                var LegacyQualityLevel = (EMaterialQualityLevel)Ar.Read <byte>(); // Is it enum?
            }

            CookedShaderMapIdHash = new FSHAHash(Ar);

            if (!bIsLegacyPackage)
            {
                LayoutParams = new FPlatformTypeLayoutParameters(Ar);
            }
        }
Beispiel #5
0
        public void Deserialize(FMaterialResourceProxyReader Ar)
        {
            var bUseNewFormat = Ar.Versions["ShaderMap.UseNewCookedFormat"];

            ImageResult = new FMemoryImageResult(Ar);

            var bShareCode = Ar.ReadBoolean();

            if (bUseNewFormat)
            {
                var ShaderPlatform = Ar.Read <EShaderPlatform>();
            }

            if (bShareCode)
            {
                ResourceHash = new FSHAHash(Ar);
            }
            else
            {
                Code = new FShaderMapResourceCode(Ar);
            }
        }
        public FMaterialShaderMapId(FMaterialResourceProxyReader Ar)
        {
            var bIsLegacyPackage = Ar.Ver < (UE4Version)260;

            if (!bIsLegacyPackage)
            {
                QualityLevel = (EMaterialQualityLevel)Ar.Read <int>();
                FeatureLevel = (ERHIFeatureLevel)Ar.Read <int>();
            }
            else
            {
                var legacyQualityLevel = Ar.Read <byte>();
            }

            // Cooked so can assume this is valid
            CookedShaderMapIdHash = new FSHAHash(Ar);

            if (!bIsLegacyPackage)
            {
                LayoutParams = Ar.Read <FPlatformTypeLayoutParameters>();
            }
        }
Beispiel #7
0
        private FPakInfo(FArchive Ar, OffsetsToTry offsetToTry)
        {
            // New FPakInfo fields.
            EncryptionKeyGuid = Ar.Read <FGuid>();         // PakFile_Version_EncryptionKeyGuid
            EncryptedIndex    = Ar.Read <byte>() != 0;     // Do not replace by ReadFlag

            // Old FPakInfo fields
            Magic = Ar.Read <uint>();
            if (Magic != PAK_FILE_MAGIC)
            {
                // Stop immediately when magic is wrong
                return;
            }

            Version      = Ar.Read <EPakFileVersion>();
            IsSubVersion = (Version == EPakFileVersion.PakFile_Version_FNameBasedCompressionMethod && offsetToTry == OffsetsToTry.Size8a);
            IndexOffset  = Ar.Read <long>();
            IndexSize    = Ar.Read <long>();
            IndexHash    = new FSHAHash(Ar);

            if (Version == EPakFileVersion.PakFile_Version_FrozenIndex)
            {
                var bIndexIsFrozen = Ar.ReadFlag();
                // used just for 4.25, so don't do any support unless it's really needed
                if (bIndexIsFrozen)
                {
                    throw new ParserException(Ar, "Pak index is frozen");
                }
            }

            if (Version < EPakFileVersion.PakFile_Version_FNameBasedCompressionMethod)
            {
                CompressionMethods = new List <CompressionMethod>
                {
                    CompressionMethod.None, CompressionMethod.Zlib, CompressionMethod.Gzip, CompressionMethod.Custom, CompressionMethod.Oodle
                };
            }
            else
            {
                var maxNumCompressionMethods = offsetToTry switch
                {
                    OffsetsToTry.Size8a => 5,
                    OffsetsToTry.Size8 => 4,
                    OffsetsToTry.Size8_1 => 1,
                    OffsetsToTry.Size8_2 => 2,
                    OffsetsToTry.Size8_3 => 3,
                    _ => 4
                };

                unsafe
                {
                    var bufferSize = COMPRESSION_METHOD_NAME_LEN * maxNumCompressionMethods;
                    var buffer     = stackalloc byte[bufferSize];
                    Ar.Serialize(buffer, bufferSize);
                    CompressionMethods = new List <CompressionMethod>(maxNumCompressionMethods + 1)
                    {
                        CompressionMethod.None
                    };
                    for (var i = 0; i < maxNumCompressionMethods; i++)
                    {
                        var name = new string((sbyte *)buffer + i * COMPRESSION_METHOD_NAME_LEN, 0, COMPRESSION_METHOD_NAME_LEN).TrimEnd('\0');
                        if (string.IsNullOrEmpty(name))
                        {
                            continue;
                        }
                        if (!Enum.TryParse(name, out CompressionMethod method))
                        {
                            Log.Warning($"Unknown compression method '{name}' in {Ar.Name}");
                            method = CompressionMethod.Unknown;
                        }
                        CompressionMethods.Add(method);
                    }
                }
            }

            // Reset new fields to their default states when seralizing older pak format.
            if (Version < EPakFileVersion.PakFile_Version_IndexEncryption)
            {
                EncryptedIndex = default;
            }

            if (Version < EPakFileVersion.PakFile_Version_EncryptionKeyGuid)
            {
                EncryptionKeyGuid = default;
            }
        }
Beispiel #8
0
        public FIoStoreTocResource(Stream tocStream, EIoStoreTocReadOptions readOptions = EIoStoreTocReadOptions.Default)
        {
            var streamBuffer = new byte[tocStream.Length];

            tocStream.Read(streamBuffer, 0, streamBuffer.Length);
            using var reader           = new BinaryReader(new MemoryStream(streamBuffer));
            Header                     = new FIoStoreTocHeader(reader);
            reader.BaseStream.Position = Header.TocHeaderSize;

            var totalTocSize   = tocStream.Length - Header.TocHeaderSize;
            var tocMetaSize    = Header.TocEntryCount * FIoStoreTocEntryMeta.SIZE;
            var defaultTocSize = totalTocSize - Header.DirectoryIndexSize - tocMetaSize;

            var tocSize = defaultTocSize;

            if (readOptions.HasAnyFlags(EIoStoreTocReadOptions.ReadTocMeta))
            {
                tocSize = totalTocSize; // Meta data is at the end of the TOC file
            }

            if (readOptions.HasAnyFlags(EIoStoreTocReadOptions.ReadDirectoryIndex))
            {
                tocSize = defaultTocSize + Header.DirectoryIndexSize;
            }

            // Chunk IDs
            ChunkIds = new FIoChunkId[Header.TocEntryCount];
            for (var i = 0; i < Header.TocEntryCount; i++)
            {
                ChunkIds[i] = new FIoChunkId(reader);
            }

            // Chunk offsets
            ChunkOffsetLengths = new FIoOffsetAndLength[Header.TocEntryCount];
            for (var i = 0; i < Header.TocEntryCount; i++)
            {
                ChunkOffsetLengths[i] = new FIoOffsetAndLength(reader);
            }

            // Compression blocks
            CompressionBlocks = new FIoStoreTocCompressedBlockEntry[Header.TocCompressedBlockEntryCount];
            for (var i = 0; i < Header.TocCompressedBlockEntryCount; i++)
            {
                CompressionBlocks[i] = new FIoStoreTocCompressedBlockEntry(reader);
            }

            // Compression methods
            CompressionMethods = new string[Header.CompressionMethodNameCount]; // Not doing +1 nor adding CompressionMethod none here since the FPakInfo implementation doesn't as well
            for (var i = 0; i < Header.CompressionMethodNameCount; i++)
            {
                CompressionMethods[i] = Encoding.ASCII.GetString(reader.ReadBytes((int)Header.CompressionMethodNameLength)).TrimEnd('\0');
            }

            // Chunk block signatures
            if (Header.ContainerFlags.HasAnyFlags(EIoContainerFlags.Signed))
            {
                var hashSize = reader.ReadInt32();
                reader.BaseStream.Position += hashSize; // actually: var tocSignature = reader.ReadBytes(hashSize);
                reader.BaseStream.Position += hashSize; // actually: var blockSignature = reader.ReadBytes(hashSize);

                ChunkBlockSignatures = new FSHAHash[Header.TocCompressedBlockEntryCount];
                for (var i = 0; i < Header.TocCompressedBlockEntryCount; i++)
                {
                    ChunkBlockSignatures[i] = new FSHAHash(reader);
                }

                // You could verify hashes here but nah
            }

            // Directory index
            if (Header.Version >= EIoStoreTocVersion.DirectoryIndex &&
                readOptions.HasAnyFlags(EIoStoreTocReadOptions.ReadDirectoryIndex) &&
                Header.ContainerFlags.HasAnyFlags(EIoContainerFlags.Indexed) &&
                Header.DirectoryIndexSize > 0)
            {
                DirectoryIndexBuffer = reader.ReadBytes((int)Header.DirectoryIndexSize);
            }

            // Meta
            if (readOptions.HasAnyFlags(EIoStoreTocReadOptions.ReadTocMeta))
            {
                ChunkMetas = new FIoStoreTocEntryMeta[Header.TocEntryCount];
                for (var i = 0; i < Header.TocEntryCount; i++)
                {
                    ChunkMetas[i] = new FIoStoreTocEntryMeta(reader);
                }
            }
        }
Beispiel #9
0
        void ReadIndexUpdated(BinaryReader reader, byte[] aesKey, out Dictionary <string, FPakEntry> dict, PakFilter filter)
        {
            MountPoint = reader.ReadFString();
            if (MountPoint.StartsWith("../../.."))
            {
                MountPoint = MountPoint.Substring(8);
            }
            else
            {
                // Weird mount point location...
                MountPoint = "/";
            }
            if (!CaseSensitive)
            {
                MountPoint = MountPoint.ToLowerInvariant();
            }
            var NumEntries   = reader.ReadInt32();
            var PathHashSeed = reader.ReadUInt64();

            if (reader.ReadInt32() == 0)
            {
                throw new FileLoadException("No path hash index");
            }

            /*
             * long PathHashIndexOffset = reader.ReadInt64();
             * long PathHashIndexSize = reader.ReadInt64();
             * FSHAHash PathHashIndexHash = new FSHAHash(reader);
             */
            reader.BaseStream.Position += 8L + 8L + 20L;

            if (reader.ReadInt32() == 0)
            {
                throw new FileLoadException("No directory index");
            }

            long     FullDirectoryIndexOffset = reader.ReadInt64();
            long     FullDirectoryIndexSize   = reader.ReadInt64();
            FSHAHash FullDirectoryIndexHash   = new FSHAHash(reader);

            byte[] EncodedPakEntries = reader.ReadTArray(() => reader.ReadByte());

            int FilesNum = reader.ReadInt32();

            if (FilesNum < 0)
            {
                // Should not be possible for any values in the PrimaryIndex to be invalid, since we verified the index hash
                throw new FileLoadException("Corrupt pak PrimaryIndex detected!");
            }

            Reader.BaseStream.Position = FullDirectoryIndexOffset;
            byte[] PathHashIndexData = Reader.ReadBytes((int)FullDirectoryIndexSize);

            if (!DecryptAndValidateIndex(ref PathHashIndexData, aesKey, FullDirectoryIndexHash, out var ComputedHash))
            {
                throw new FileLoadException("Corrupt pak PrimaryIndex detected!");
                //UE_LOG(LogPakFile, Log, TEXT(" Filename: %s"), *PakFilename);
                //UE_LOG(LogPakFile, Log, TEXT(" Encrypted: %d"), Info.bEncryptedIndex);
                //UE_LOG(LogPakFile, Log, TEXT(" Total Size: %d"), Reader->TotalSize());
                //UE_LOG(LogPakFile, Log, TEXT(" Index Offset: %d"), FullDirectoryIndexOffset);
                //UE_LOG(LogPakFile, Log, TEXT(" Index Size: %d"), FullDirectoryIndexSize);
                //UE_LOG(LogPakFile, Log, TEXT(" Stored Index Hash: %s"), *PathHashIndexHash.ToString());
                //UE_LOG(LogPakFile, Log, TEXT(" Computed Index Hash: %s"), *ComputedHash.ToString());
            }

            BinaryReader PathHashIndexReader = new BinaryReader(new MemoryStream(PathHashIndexData));

            FPakDirectoryEntry[] PathHashIndex = PathHashIndexReader.ReadTArray(() => new FPakDirectoryEntry(PathHashIndexReader));

            dict = new Dictionary <string, FPakEntry>(NumEntries);
            foreach (FPakDirectoryEntry directoryEntry in PathHashIndex)
            {
                foreach (FPathHashIndexEntry hashIndexEntry in directoryEntry.Entries)
                {
                    var path = directoryEntry.Directory + hashIndexEntry.Filename;
                    if (path.StartsWith("/"))
                    {
                        path = path.Substring(1);
                    }
                    if (!CaseSensitive)
                    {
                        path = path.ToLowerInvariant();
                    }
                    // if there is no filter OR the filter passes
                    if (filter == null || filter.CheckFilter(MountPoint + hashIndexEntry.Filename, CaseSensitive))
                    {
                        // Filename is without the MountPoint concatenated to save memory
                        dict[path] = GetEntry(path, hashIndexEntry.Location, EncodedPakEntries);
                    }
                }
            }
        }
Beispiel #10
0
        void ReadIndexUpdated(BinaryReader reader, byte[] aesKey, long totalSize, PakFilter filter)
        {
            MountPoint = reader.ReadFString();
            if (MountPoint.StartsWith("../../.."))
            {
                MountPoint = MountPoint.Substring(8);
            }
            else
            {
                // Weird mount point location...
                MountPoint = "/";
            }
            if (!CaseSensitive)
            {
                MountPoint = MountPoint.ToLowerInvariant();
            }
            var NumEntries   = reader.ReadInt32();
            var PathHashSeed = reader.ReadUInt64();

            bool     bReaderHasPathHashIndex = false;
            long     PathHashIndexOffset     = -1; // INDEX_NONE
            long     PathHashIndexSize       = 0;
            FSHAHash PathHashIndexHash       = default;

            bReaderHasPathHashIndex = reader.ReadInt32() != 0;
            if (bReaderHasPathHashIndex)
            {
                PathHashIndexOffset     = reader.ReadInt64();
                PathHashIndexSize       = reader.ReadInt64();
                PathHashIndexHash       = new FSHAHash(reader);
                bReaderHasPathHashIndex = bReaderHasPathHashIndex && PathHashIndexOffset != -1;
            }

            bool     bReaderHasFullDirectoryIndex = false;
            long     FullDirectoryIndexOffset     = -1; // INDEX_NONE
            long     FullDirectoryIndexSize       = 0;
            FSHAHash FullDirectoryIndexHash       = default;

            bReaderHasFullDirectoryIndex = reader.ReadInt32() != 0;
            if (bReaderHasFullDirectoryIndex)
            {
                FullDirectoryIndexOffset     = reader.ReadInt64();
                FullDirectoryIndexSize       = reader.ReadInt64();
                FullDirectoryIndexHash       = new FSHAHash(reader);
                bReaderHasFullDirectoryIndex = bReaderHasFullDirectoryIndex && FullDirectoryIndexOffset != -1;
            }

            byte[] EncodedPakEntries = reader.ReadTArray(() => reader.ReadByte());
            File.WriteAllBytes("pakentryencoded", EncodedPakEntries);

            int FilesNum = reader.ReadInt32();

            if (FilesNum < 0)
            {
                // Should not be possible for any values in the PrimaryIndex to be invalid, since we verified the index hash
                throw new FileLoadException("Corrupt pak PrimaryIndex detected!");
            }
            FPakEntry[] Files = new FPakEntry[FilesNum]; // from what i can see, there aren't any???
            if (FilesNum > 0)
            {
                for (int FileIndex = 0; FileIndex < FilesNum; ++FileIndex)
                {
                    Files[FileIndex] = new FPakEntry(reader, Info.Version);
                }
            }

            // Decide which SecondaryIndex(es) to load
            bool bWillUseFullDirectoryIndex;
            bool bWillUsePathHashIndex;
            bool bReadFullDirectoryIndex;

            if (bReaderHasPathHashIndex && bReaderHasFullDirectoryIndex)
            {
                bWillUseFullDirectoryIndex = false; // https://github.com/EpicGames/UnrealEngine/blob/79a64829237ae339118bb50b61d84e4599c14e8a/Engine/Source/Runtime/PakFile/Private/IPlatformFilePak.cpp#L5628
                bWillUsePathHashIndex      = !bWillUseFullDirectoryIndex;
                bool bWantToReadFullDirectoryIndex = false;
                bReadFullDirectoryIndex = bReaderHasFullDirectoryIndex && bWantToReadFullDirectoryIndex;
            }
            else if (bReaderHasPathHashIndex)
            {
                bWillUsePathHashIndex      = true;
                bWillUseFullDirectoryIndex = false;
                bReadFullDirectoryIndex    = false;
            }
            else if (bReaderHasFullDirectoryIndex)
            {
                // We don't support creating the PathHash Index at runtime; we want to move to having only the PathHashIndex, so supporting not having it at all is not useful enough to write
                bWillUsePathHashIndex      = false;
                bWillUseFullDirectoryIndex = true;
                bReadFullDirectoryIndex    = true;
            }
            else
            {
                // It should not be possible for PrimaryIndexes to be built without a PathHashIndex AND without a FullDirectoryIndex; CreatePakFile in UnrealPak.exe has a check statement for it.
                throw new FileLoadException("Corrupt pak PrimaryIndex detected!");
            }

            // Load the Secondary Index(es)
            byte[] PathHashIndexData;
            Dictionary <ulong, int> PathHashIndex;
            BinaryReader            PathHashIndexReader = default;
            bool bHasPathHashIndex;

            if (bWillUsePathHashIndex)
            {
                if (PathHashIndexOffset < 0 || totalSize < (PathHashIndexOffset + PathHashIndexSize))
                {
                    // Should not be possible for these values (which came from the PrimaryIndex) to be invalid, since we verified the index hash of the PrimaryIndex
                    throw new FileLoadException("Corrupt pak PrimaryIndex detected!");
                    //UE_LOG(LogPakFile, Log, TEXT(" Filename: %s"), *PakFilename);
                    //UE_LOG(LogPakFile, Log, TEXT(" Total Size: %d"), Reader->TotalSize());
                    //UE_LOG(LogPakFile, Log, TEXT(" PathHashIndexOffset : %d"), PathHashIndexOffset);
                    //UE_LOG(LogPakFile, Log, TEXT(" PathHashIndexSize: %d"), PathHashIndexSize);
                }
                Reader.BaseStream.Position = PathHashIndexOffset;
                PathHashIndexData          = Reader.ReadBytes((int)PathHashIndexSize);
                File.WriteAllBytes("indexdata.daa", PathHashIndexData);

                {
                    if (!DecryptAndValidateIndex(Reader, ref PathHashIndexData, aesKey, PathHashIndexHash, out var ComputedHash))
                    {
                        throw new FileLoadException("Corrupt pak PrimaryIndex detected!");
                        //UE_LOG(LogPakFile, Log, TEXT(" Filename: %s"), *PakFilename);
                        //UE_LOG(LogPakFile, Log, TEXT(" Encrypted: %d"), Info.bEncryptedIndex);
                        //UE_LOG(LogPakFile, Log, TEXT(" Total Size: %d"), Reader->TotalSize());
                        //UE_LOG(LogPakFile, Log, TEXT(" Index Offset: %d"), FullDirectoryIndexOffset);
                        //UE_LOG(LogPakFile, Log, TEXT(" Index Size: %d"), FullDirectoryIndexSize);
                        //UE_LOG(LogPakFile, Log, TEXT(" Stored Index Hash: %s"), *PathHashIndexHash.ToString());
                        //UE_LOG(LogPakFile, Log, TEXT(" Computed Index Hash: %s"), *ComputedHash.ToString());
                    }
                }

                PathHashIndexReader = new BinaryReader(new MemoryStream(PathHashIndexData));
                PathHashIndex       = ReadPathHashIndex(PathHashIndexReader);
                bHasPathHashIndex   = true;
            }

            var  DirectoryIndex = new Dictionary <string, Dictionary <string, int> >();
            bool bHasFullDirectoryIndex;

            if (!bReadFullDirectoryIndex)
            {
                DirectoryIndex         = ReadDirectoryIndex(PathHashIndexReader);
                bHasFullDirectoryIndex = false;
            }
            if (DirectoryIndex.Count == 0)
            {
                if (totalSize < (FullDirectoryIndexOffset + FullDirectoryIndexSize) ||
                    FullDirectoryIndexOffset < 0)
                {
                    // Should not be possible for these values (which came from the PrimaryIndex) to be invalid, since we verified the index hash of the PrimaryIndex
                    throw new FileLoadException("Corrupt pak PrimaryIndex detected!");
                    //UE_LOG(LogPakFile, Log, TEXT(" Filename: %s"), *PakFilename);
                    //UE_LOG(LogPakFile, Log, TEXT(" Total Size: %d"), Reader->TotalSize());
                    //UE_LOG(LogPakFile, Log, TEXT(" FullDirectoryIndexOffset : %d"), FullDirectoryIndexOffset);
                    //UE_LOG(LogPakFile, Log, TEXT(" FullDirectoryIndexSize: %d"), FullDirectoryIndexSize);
                }
                Reader.BaseStream.Position = FullDirectoryIndexOffset;
                byte[] FullDirectoryIndexData = Reader.ReadBytes((int)FullDirectoryIndexSize);

                {
                    if (!DecryptAndValidateIndex(Reader, ref FullDirectoryIndexData, aesKey, FullDirectoryIndexHash, out var ComputedHash))
                    {
                        throw new FileLoadException("Corrupt pak PrimaryIndex detected!");
                        //UE_LOG(LogPakFile, Log, TEXT(" Filename: %s"), *PakFilename);
                        //UE_LOG(LogPakFile, Log, TEXT(" Encrypted: %d"), Info.bEncryptedIndex);
                        //UE_LOG(LogPakFile, Log, TEXT(" Total Size: %d"), Reader->TotalSize());
                        //UE_LOG(LogPakFile, Log, TEXT(" Index Offset: %d"), FullDirectoryIndexOffset);
                        //UE_LOG(LogPakFile, Log, TEXT(" Index Size: %d"), FullDirectoryIndexSize);
                        //UE_LOG(LogPakFile, Log, TEXT(" Stored Index Hash: %s"), *FullDirectoryIndexHash.ToString());
                        //UE_LOG(LogPakFile, Log, TEXT(" Computed Index Hash: %s"), *ComputedHash.ToString());
                    }
                }

                var SecondaryIndexReader = new BinaryReader(new MemoryStream(FullDirectoryIndexData));
                DirectoryIndex         = ReadDirectoryIndex(SecondaryIndexReader);
                bHasFullDirectoryIndex = true;
            }

            Entries = new Dictionary <string, FPakEntry>(NumEntries);
            foreach (var(dirname, dir) in DirectoryIndex)
            {
                foreach (var(filename, pakLocation) in dir)
                {
                    var path = dirname + filename;
                    if (!CaseSensitive)
                    {
                        path = path.ToLowerInvariant();
                    }
                    // if there is no filter OR the filter passes
                    if (filter == null || filter.CheckFilter(MountPoint + filename, CaseSensitive))
                    {
                        // Filename is without the MountPoint concatenated to save memory
                        Entries[path] = GetEntry(pakLocation, EncodedPakEntries);
                    }
                }
            }
        }
Beispiel #11
0
 public FShaderMapResourceCode(FArchive Ar)
 {
     ResourceHash  = new FSHAHash(Ar);
     ShaderHashes  = Ar.ReadArray(() => new FSHAHash(Ar));
     ShaderEntries = Ar.ReadArray(() => new FShaderEntry(Ar));
 }
        public void Deserialize(FMaterialResourceProxyReader Ar)
        {
            #region FMemoryImageResult::LoadFromArchive, MemoryImage.cpp
            if (GIsHybridUE5)
            {
                var layoutParameters = Ar.Read <FPlatformTypeLayoutParameters>();
            }

            var frozenSize   = Ar.Read <int>();
            var frozenObject = Ar.ReadBytes(frozenSize);

            if (GIsHybridUE5)
            {
                //var bFrozenObjectIsValid = pointerTable.LoadFromArchive(Ar, layoutParameters, frozenObject);
                FShaderMapPointerTable_LoadFromArchive(Ar);
            }

            var numVTables      = Ar.Read <uint>();
            var numScriptNames  = Ar.Read <uint>();
            var numMinimalNames = Ar.Read <uint>();

            for (var i = 0; i < numVTables; i++)
            {
                var typeNameHash = Ar.Read <ulong>();
                var numPatches   = Ar.Read <uint>();

                for (var patchIndex = 0; patchIndex < numPatches; ++patchIndex)
                {
                    var vTableOffset = Ar.Read <uint>();
                    var offset       = Ar.Read <uint>();
                }
            }

            for (var i = 0; i < numScriptNames; i++)
            {
                var name       = Ar.ReadFName();
                var numPatches = Ar.Read <uint>();

                for (var patchIndex = 0; patchIndex < numPatches; ++patchIndex)
                {
                    var offset = Ar.Read <uint>();
                }
            }

            for (var i = 0; i < numMinimalNames; i++)
            {
                var name       = Ar.ReadFName();
                var numPatches = Ar.Read <uint>();

                for (var patchIndex = 0; patchIndex < numPatches; ++patchIndex)
                {
                    var offset = Ar.Read <uint>();
                }
            }

            #endregion

            if (!GIsHybridUE5)
            {
                FShaderMapPointerTable_LoadFromArchive(Ar);
            }

            var bShareCode = Ar.ReadBoolean();
            if (GIsHybridUE5)
            {
                var shaderPlatform = Ar.Read <byte>();
            }

            if (bShareCode)
            {
                var resourceHash = new FSHAHash(Ar);
            }
            else
            {
                throw new NotImplementedException();
            }
        }