Пример #1
0
        public void Test_MultipleWithSubTasks()
        {
            var ct = new CancellationTokenSource().Token;
            var p  = new NefsProgress(ct);

            p.BeginTask(1.0f, "A");
            Assert.Equal(0.0f, p.Percent);
            Assert.Equal("A", p.StatusMessage);
            Assert.Equal("", p.StatusSubMessage);
            {
                p.BeginTask(0.2f);
                Assert.Equal(0.0f, p.Percent);
                Assert.Equal("A", p.StatusMessage);
                Assert.Equal("", p.StatusSubMessage);
                {
                    p.BeginSubTask(0.5f, "sub1");
                    Assert.Equal(0.0f, p.Percent);
                    Assert.Equal("A", p.StatusMessage);
                    Assert.Equal("sub1", p.StatusSubMessage);

                    p.EndTask();
                    Assert.Equal(0.1f, p.Percent);
                    Assert.Equal("A", p.StatusMessage);
                    Assert.Equal("", p.StatusSubMessage);

                    p.BeginSubTask(0.5f, "sub2");
                    Assert.Equal(0.1f, p.Percent);
                    Assert.Equal("A", p.StatusMessage);
                    Assert.Equal("sub2", p.StatusSubMessage);

                    p.EndTask();
                    Assert.Equal(0.2f, p.Percent);
                    Assert.Equal("A", p.StatusMessage);
                    Assert.Equal("", p.StatusSubMessage);
                }
                p.EndTask();
                Assert.Equal(0.2f, p.Percent);
                Assert.Equal("A", p.StatusMessage);
                Assert.Equal("", p.StatusSubMessage);

                p.BeginTask(0.8f, "B");
                Assert.Equal(0.2f, p.Percent);
                Assert.Equal("B", p.StatusMessage);
                Assert.Equal("", p.StatusSubMessage);

                p.EndTask();
                Assert.Equal(1.0f, p.Percent);
                Assert.Equal("A", p.StatusMessage);
                Assert.Equal("", p.StatusSubMessage);
            }
            p.EndTask();
            Assert.Equal(1.0f, p.Percent);
            Assert.Equal("", p.StatusMessage);
            Assert.Equal("", p.StatusSubMessage);
        }
Пример #2
0
        public void Test_MultipleTasks()
        {
            var ct = new CancellationTokenSource().Token;
            var p  = new NefsProgress(ct);

            p.BeginTask(1.0f);
            p.BeginTask(0.5f);
            Assert.Equal(0.0f, p.Percent);

            p.EndTask();
            Assert.Equal(0.5f, p.Percent);

            p.EndTask();
            Assert.Equal(1.0f, p.Percent);
        }
Пример #3
0
        public void BeginTask_WeightTooBig_ArgumentOutOfRangeExceptionThrown()
        {
            var ct = new CancellationTokenSource().Token;
            var p  = new NefsProgress(ct);

            Assert.Throws <ArgumentOutOfRangeException>(() => p.BeginTask(2.0f));
        }
Пример #4
0
        /// <inheritdoc/>
        public async Task DetransformAsync(
            Stream input,
            Int64 inputOffset,
            Stream output,
            Int64 outputOffset,
            uint extractedSize,
            IReadOnlyList <NefsDataChunk> chunks,
            NefsProgress p)
        {
            var numChunks      = chunks.Count;
            var bytesRemaining = extractedSize;

            input.Seek(inputOffset, SeekOrigin.Begin);
            output.Seek(outputOffset, SeekOrigin.Begin);

            using (var t = p.BeginTask(1.0f, $"Detransforming stream"))
            {
                for (int i = 0; i < numChunks; i++)
                {
                    using (var st = p.BeginSubTask(1.0f / numChunks, $"Detransforming chunk {i + 1}/{numChunks}..."))
                    {
                        // Determine the maximum output size for this chunk based on expected output size
                        var maxChunkSize = Math.Min(bytesRemaining, chunks[i].Transform.ChunkSize);

                        // Revert the transform
                        var chunkSize = await this.DetransformChunkAsync(input, output, chunks[i], maxChunkSize, p);

                        bytesRemaining -= chunkSize;
                    }
                }
            }
        }
Пример #5
0
        /// <summary>
        /// Reads header part 7 from an input stream.
        /// </summary>
        /// <param name="stream">The stream to read from.</param>
        /// <param name="offset">The offset to the header part from the beginning of the stream.</param>
        /// <param name="numEntries">Number of entries.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header part.</returns>
        internal async Task <NefsHeaderPart7> ReadHeaderPart7Async(Stream stream, uint offset, uint numEntries, NefsProgress p)
        {
            var entries = new List <NefsHeaderPart7Entry>();
            var size    = numEntries * NefsHeaderPart7Entry.Size;

            // Validate inputs
            if (!this.ValidateHeaderPartStream(stream, offset, (uint)size, "7"))
            {
                return(new NefsHeaderPart7(entries));
            }

            // Get entries in part 7
            var entryOffset = offset;

            for (var i = 0; i < numEntries; ++i)
            {
                using (p.BeginTask(1.0f / numEntries))
                {
                    // Read the entry data
                    var entry = new NefsHeaderPart7Entry();
                    await FileData.ReadDataAsync(stream, entryOffset, entry, NefsVersion.Version200, p);

                    entryOffset += NefsHeaderPart7Entry.Size;

                    entries.Add(entry);
                }
            }

            return(new NefsHeaderPart7(entries));
        }
Пример #6
0
 /// <summary>
 /// Writes the header intro to an output stream.
 /// </summary>
 /// <param name="stream">The stream to write to.</param>
 /// <param name="offset">The absolute offset in the stream to write at.</param>
 /// <param name="intro">The intro to write.</param>
 /// <param name="p">Progress info.</param>
 /// <returns>An async task.</returns>
 internal async Task WriteHeaderIntroAsync(Stream stream, UInt64 offset, NefsHeaderIntro intro, NefsProgress p)
 {
     using (var t = p.BeginTask(1.0f))
     {
         await FileData.WriteDataAsync(stream, offset, intro, NefsVersion.Version200, p);
     }
 }
Пример #7
0
        /// <summary>
        /// Reads header part 2 from an input stream.
        /// </summary>
        /// <param name="stream">The stream to read from.</param>
        /// <param name="offset">The offset to the header part from the beginning of the stream.</param>
        /// <param name="size">The size of the header part.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header part.</returns>
        internal async Task <NefsHeaderPart2> ReadHeaderPart2Async(Stream stream, uint offset, uint size, NefsProgress p)
        {
            var entries = new List <NefsHeaderPart2Entry>();

            // Validate inputs
            if (!this.ValidateHeaderPartStream(stream, offset, size, "2"))
            {
                return(new NefsHeaderPart2(entries));
            }

            // Get entries in part 2
            var numEntries  = size / NefsHeaderPart2Entry.Size;
            var entryOffset = offset;

            for (var i = 0; i < numEntries; ++i)
            {
                using (p.BeginTask(1.0f / numEntries))
                {
                    var entry = new NefsHeaderPart2Entry();
                    await FileData.ReadDataAsync(stream, entryOffset, entry, NefsVersion.Version200, p);

                    entryOffset += NefsHeaderPart2Entry.Size;

                    entries.Add(entry);
                }
            }

            return(new NefsHeaderPart2(entries));
        }
Пример #8
0
 /// <summary>
 /// Writes the header intro table of contents to an output stream.
 /// </summary>
 /// <param name="stream">The stream to write to.</param>
 /// <param name="offset">The absolute offset in the stream to write at.</param>
 /// <param name="toc">The table of contents to write.</param>
 /// <param name="p">Progress info.</param>
 /// <returns>An async task.</returns>
 internal async Task WriteHeaderIntroTocAsync(Stream stream, UInt64 offset, NefsHeaderIntroToc toc, NefsProgress p)
 {
     using (var t = p.BeginTask(1.0f))
     {
         await FileData.WriteDataAsync(stream, offset, toc, p);
     }
 }
Пример #9
0
        /// <summary>
        /// Reads the header from an input stream.
        /// </summary>
        /// <param name="originalStream">The stream to read from.</param>
        /// <param name="offset">The offset to the header from the beginning of the stream.</param>
        /// <param name="part6Offset">
        /// The offset to the start of part 6 data from the beginning of the stream.
        /// </param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header.</returns>
        internal async Task <INefsHeader> ReadHeaderAsync(Stream originalStream, ulong offset, ulong part6Offset, NefsProgress p)
        {
            Stream          stream;
            Stream          part6Stream;
            INefsHeader     header = null;
            NefsHeaderIntro intro  = null;

            using (p.BeginTask(0.2f, "Reading header intro"))
            {
                // Decrypt header if needed
                (intro, stream) = await this.ReadHeaderIntroAsync(originalStream, offset, p);
            }

            // For now, assume that if the header is encrypted, then the part 6 data is not
            // separated. We've only seen encrypted headers in some nefs 2.0 archives (i.e., DLC content).
            part6Stream = intro.IsEncrypted ? stream : originalStream;

            using (p.BeginTask(0.8f))
            {
                if (intro.NefsVersion == 0x20000)
                {
                    // 2.0.0
                    Log.LogInformation("Detected NeFS version 2.0.");
                    header = await this.Read20HeaderAsync(stream, 0, part6Stream, part6Offset, intro, p);
                }
                else if (intro.NefsVersion == 0x10600)
                {
                    // 1.6.0
                    Log.LogInformation("Detected NeFS version 1.6.");
                    header = await this.Read16HeaderAsync(stream, 0, part6Stream, part6Offset, intro, p);
                }
                else
                {
                    Log.LogInformation($"Detected unkown NeFS version {intro.NefsVersion}.");
                    header = await this.Read20HeaderAsync(stream, 0, part6Stream, part6Offset, intro, p);
                }
            }

            // The header stream must be disposed
            stream.Dispose();

            return(header);
        }
Пример #10
0
        /// <summary>
        /// Reads header part 3 from an input stream.
        /// </summary>
        /// <param name="stream">The stream to read from.</param>
        /// <param name="offset">The offset to the header part from the beginning of the stream.</param>
        /// <param name="size">The size of the header part.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header part.</returns>
        internal async Task <NefsHeaderPart3> ReadHeaderPart3Async(Stream stream, uint offset, uint size, NefsProgress p)
        {
            var entries = new List <string>();

            // Validate inputs
            if (!this.ValidateHeaderPartStream(stream, offset, size, "3"))
            {
                return(new NefsHeaderPart3(entries));
            }

            // Read in header part 3
            var bytes = new byte[size];

            stream.Seek(offset, SeekOrigin.Begin);
            await stream.ReadAsync(bytes, 0, (int)size);

            // Process all strings in the strings table
            var nextOffset = 0;

            while (nextOffset < size)
            {
                using (p.BeginTask(nextOffset / size))
                {
                    // Find the next null terminator
                    var nullOffset = (int)size;
                    for (var i = nextOffset; i < size; ++i)
                    {
                        if (bytes[i] == 0)
                        {
                            nullOffset = i;
                            break;
                        }
                    }

                    if (nullOffset == size)
                    {
                        // No null terminator found, assume end of part 3. There can be a few
                        // garbage bytes at the end of this part.
                        break;
                    }

                    // Get the string
                    var str = Encoding.ASCII.GetString(bytes, nextOffset, nullOffset - nextOffset);

                    // Record entry
                    entries.Add(str);

                    // Find next string
                    nextOffset = nullOffset + 1;
                }
            }

            return(new NefsHeaderPart3(entries));
        }
Пример #11
0
        public void Test_MultipleTasksWithMessage()
        {
            var ct = new CancellationTokenSource().Token;
            var p  = new NefsProgress(ct);

            p.BeginTask(1.0f, "A");
            Assert.Equal(0.0f, p.Percent);
            Assert.Equal("A", p.StatusMessage);
            Assert.Equal("", p.StatusSubMessage);

            p.BeginTask(0.25f, "B");
            Assert.Equal(0.0f, p.Percent);
            Assert.Equal("B", p.StatusMessage);
            Assert.Equal("", p.StatusSubMessage);

            p.EndTask();
            Assert.Equal(0.25f, p.Percent);

            p.EndTask();
            Assert.Equal(1.0f, p.Percent);
        }
Пример #12
0
        public void Test_MoreTests()
        {
            var ct = new CancellationTokenSource().Token;
            var p  = new NefsProgress(ct);

            p.BeginTask(1.0f);
            {
                p.BeginTask(0.1f);
                this.Verify(p, 0.0f, "", "");
                {
                    p.BeginSubTask(0.4f, "sub");
                    this.Verify(p, 0.0f, "", "sub");
                    p.EndTask();
                    this.Verify(p, 0.04f, "", "");

                    p.BeginSubTask(0.6f, "sub");
                    this.Verify(p, 0.04f, "", "sub");
                    p.EndTask();
                    this.Verify(p, 0.1f, "", "");
                }
                p.EndTask();
                this.Verify(p, 0.1f, "", "");

                p.BeginTask(0.8f);
                this.Verify(p, 0.1f, "", "");
                p.EndTask();
                this.Verify(p, 0.9f, "", "");

                p.BeginTask(0.05f);
                this.Verify(p, 0.9f, "", "");
                p.EndTask();
                this.Verify(p, 0.95f, "", "");

                // 0.1 + 0.8 + 0.05 == 0.95 (does not add up to 1)
            }
            p.EndTask();
            this.Verify(p, 1.0f, "", "");
        }
Пример #13
0
        public void BeginTask_Message_ProgressChangedRaised()
        {
            var ct = new CancellationTokenSource().Token;
            var p  = new NefsProgress(ct);

            NefsProgressEventArgs args = null;

            p.ProgressChanged += (o, e) => args = e;

            p.BeginTask(1.0f, "A");
            Assert.Equal(p.StatusMessage, args.Message);
            Assert.Equal(p.StatusSubMessage, args.SubMessage);
            Assert.Equal(p.Percent, args.Progress);
        }
Пример #14
0
        public void Test_SingleTask()
        {
            var ct = new CancellationTokenSource().Token;
            var p  = new NefsProgress(ct);

            p.BeginTask(1.0f);
            Assert.Equal(0.0f, p.Percent);
            Assert.Equal("", p.StatusMessage);
            Assert.Equal("", p.StatusSubMessage);

            p.EndTask();
            Assert.Equal(1.0f, p.Percent);
            Assert.Equal("", p.StatusMessage);
            Assert.Equal("", p.StatusSubMessage);
        }
Пример #15
0
 /// <inheritdoc/>
 public async Task DetransformFileAsync(
     string inputFile,
     Int64 inputOffset,
     string outputFile,
     Int64 outputOffset,
     uint extractedSize,
     IReadOnlyList <NefsDataChunk> chunks,
     NefsProgress p)
 {
     using (var t = p.BeginTask(1.0f))
         using (var inputStream = this.FileSystem.File.OpenRead(inputFile))
             using (var outputStream = this.FileSystem.File.OpenWrite(outputFile))
             {
                 await this.DetransformAsync(inputStream, inputOffset, outputStream, outputOffset, extractedSize, chunks, p);
             }
 }
Пример #16
0
 /// <inheritdoc/>
 public async Task DecompressFileAsync(
     string inputFile,
     Int64 inputOffset,
     IReadOnlyList <UInt32> chunkSizes,
     string outputFile,
     Int64 outputOffset,
     NefsProgress p,
     byte[] aes256key = null)
 {
     using (var t = p.BeginTask(1.0f))
         using (var inputStream = this.FileSystem.File.OpenRead(inputFile))
             using (var outputStream = this.FileSystem.File.OpenWrite(outputFile))
             {
                 await this.DecompressAsync(inputStream, inputOffset, chunkSizes, outputStream, outputOffset, p, aes256key);
             }
 }
Пример #17
0
        /// <inheritdoc/>
        public async Task <NefsItemSize> TransformAsync(
            Stream input,
            Int64 inputOffset,
            UInt32 inputLength,
            Stream output,
            Int64 outputOffset,
            NefsDataTransform transform,
            NefsProgress p)
        {
            var chunks       = new List <NefsDataChunk>();
            var rawChunkSize = transform.ChunkSize;

            input.Seek(inputOffset, SeekOrigin.Begin);
            output.Seek(outputOffset, SeekOrigin.Begin);

            // Split file into chunks and transform them
            using (var t = p.BeginTask(1.0f, $"Transforming stream"))
            {
                var cumulativeChunkSize = 0U;
                var bytesRemaining      = (int)inputLength;

                // Determine how many chunks to split file into
                var numChunks = (int)Math.Ceiling(inputLength / (double)rawChunkSize);

                for (var i = 0; i < numChunks; ++i)
                {
                    using (var st = p.BeginSubTask(1.0f / numChunks, $"Transforming chunk {i + 1}/{numChunks}"))
                    {
                        // The last chunk may not be exactly equal to the raw chunk size
                        var nextChunkSize = (int)Math.Min(rawChunkSize, bytesRemaining);
                        bytesRemaining -= nextChunkSize;

                        // Transform chunk and write to output stream
                        var chunkSize = await this.TransformChunkAsync(input, (uint)nextChunkSize, output, transform, p);

                        cumulativeChunkSize += chunkSize;

                        // Record chunk info
                        var chunk = new NefsDataChunk(chunkSize, cumulativeChunkSize, transform);
                        chunks.Add(chunk);
                    }
                }
            }

            // Return item size
            return(new NefsItemSize(inputLength, chunks));
        }
Пример #18
0
        /// <summary>
        /// Looks through the game executable to find header offsets for game.dat files.
        /// </summary>
        /// <returns>A list of game.dat archive sources.</returns>
        private async Task <List <NefsArchiveSource> > FindGameDatHeaderOffsetsAsync(
            string gameDatDir,
            string gameExePath,
            NefsProgress p)
        {
            if (!this.FileSystem.File.Exists(gameExePath))
            {
                this.UiService.ShowMessageBox($"Cannot find executable file: {gameExePath}.");
                return(new List <NefsArchiveSource>());
            }

            // Search for headers in the exe
            using (var t = p.BeginTask(1.0f, "Searching for headers"))
            {
                return(await this.Reader.FindHeadersAsync(gameExePath, gameDatDir, p));
            }
        }
Пример #19
0
        /// <inheritdoc/>
        public async Task <NefsItemSize> CompressAsync(
            Stream input,
            Int64 inputOffset,
            UInt32 inputLength,
            Stream output,
            Int64 outputOffset,
            UInt32 chunkSize,
            NefsProgress p)
        {
            var chunkSizes = new List <UInt32>();

            input.Seek(inputOffset, SeekOrigin.Begin);
            output.Seek(outputOffset, SeekOrigin.Begin);

            // Split file into chunks and compress them
            using (var t = p.BeginTask(1.0f, $"Compressing stream"))
            {
                var lastChunkSize  = 0;
                var totalChunkSize = 0;
                var lastBytesRead  = 0;
                var bytesRemaining = (int)inputLength;

                // Determine how many chunks to split file into
                var numChunks = (int)Math.Ceiling(inputLength / (double)chunkSize);

                for (var i = 0; i < numChunks; ++i)
                {
                    using (var st = p.BeginSubTask(1.0f / numChunks, $"Compressing chunk {i + 1}/{numChunks}"))
                    {
                        var nextBytes = Math.Min(chunkSize, bytesRemaining);

                        // Compress this chunk and write it to the output file
                        (lastBytesRead, lastChunkSize) = await DeflateHelper.DeflateAsync(input, (int)nextBytes, output, p.CancellationToken);

                        totalChunkSize += lastChunkSize;
                        bytesRemaining -= lastBytesRead;

                        // Record the total compressed size after this chunk
                        chunkSizes.Add((UInt32)totalChunkSize);
                    }
                }
            }

            // Return item size
            return(new NefsItemSize(inputLength, chunkSizes));
        }
Пример #20
0
        /// <summary>
        /// Reads header part 6 from an input stream.
        /// </summary>
        /// <param name="stream">The stream to read from.</param>
        /// <param name="offset">The offset to the header part from the beginning of the stream.</param>
        /// <param name="size">The size of the header part.</param>
        /// <param name="part2">
        /// Header part 2. This is used to lookup item ids since part 6 metadata does not store item ids.
        /// </param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header part.</returns>
        internal async Task <NefsHeaderPart6> ReadHeaderPart6Async(Stream stream, uint offset, uint size, NefsHeaderPart2 part2, NefsProgress p)
        {
            var entries = new List <NefsHeaderPart6Entry>();
            var ids     = new HashSet <NefsItemId>();

            // Validate inputs
            if (!this.ValidateHeaderPartStream(stream, offset, size, "6"))
            {
                return(new NefsHeaderPart6(entries));
            }

            // Get entries in part 6
            var numEntries  = size / NefsHeaderPart6Entry.Size;
            var entryOffset = offset;

            for (var i = 0; i < numEntries; ++i)
            {
                using (p.BeginTask(1.0f / numEntries))
                {
                    // Make sure there is a corresponding index in part 2
                    if (i >= part2.EntriesByIndex.Count)
                    {
                        Log.LogError($"Could not find matching item entry for part 6 index {i} in part 2.");
                        continue;
                    }

                    // Check for duplicate item ids
                    var id = new NefsItemId(part2.EntriesByIndex[i].Id.Value);
                    if (ids.Contains(id))
                    {
                        Log.LogError($"Found duplicate item id in part 6: {id.Value}");
                        continue;
                    }

                    var entry = new NefsHeaderPart6Entry(id);
                    await FileData.ReadDataAsync(stream, entryOffset, entry, p);

                    ids.Add(id);
                    entries.Add(entry);
                    entryOffset += NefsHeaderPart6Entry.Size;
                }
            }

            return(new NefsHeaderPart6(entries));
        }
Пример #21
0
        /// <summary>
        /// Reads header part 6 from an input stream.
        /// </summary>
        /// <param name="stream">The stream to read from.</param>
        /// <param name="offset">The offset to the header part from the beginning of the stream.</param>
        /// <param name="part1">Header part 1. Used to match part 6 data with an item.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header part.</returns>
        internal async Task <Nefs20HeaderPart6> Read20HeaderPart6Async(Stream stream, uint offset, NefsHeaderPart1 part1, NefsProgress p)
        {
            var entries  = new List <Nefs20HeaderPart6Entry>();
            var numItems = part1.EntriesByIndex.Count;
            var size     = numItems * Nefs20HeaderPart6Entry.Size;

            // Validate inputs
            if (!this.ValidateHeaderPartStream(stream, offset, (uint)size, "6"))
            {
                return(new Nefs20HeaderPart6(entries));
            }

            // Get entries in part 6
            var entryOffset = offset;

            for (var i = 0; i < numItems; ++i)
            {
                using (p.BeginTask(1.0f / numItems))
                {
                    // Make sure there is a corresponding index in part 1
                    if (i >= part1.EntriesByIndex.Count)
                    {
                        Log.LogError($"Could not find matching item entry for part 6 index {i} in part 1.");
                        continue;
                    }

                    // Get Guid from part 1. Part 1 entry order matches part 6 entry order.
                    var guid = part1.EntriesByIndex[i].Guid;

                    // Read the entry data
                    var entry = new Nefs20HeaderPart6Entry(guid);
                    await FileData.ReadDataAsync(stream, entryOffset, entry, NefsVersion.Version200, p);

                    entryOffset += Nefs20HeaderPart6Entry.Size;

                    entries.Add(entry);
                }
            }

            return(new Nefs20HeaderPart6(entries));
        }
Пример #22
0
        public void BeginSubTask_Valid_ProgressChangedRaised()
        {
            var ct = new CancellationTokenSource().Token;
            var p  = new NefsProgress(ct);

            p.BeginTask(1.0f, "A");
            {
                NefsProgressEventArgs args = null;
                p.ProgressChanged += (o, e) => args = e;

                p.BeginSubTask(1.0f, "sub");
                this.Verify(p, 0.0f, "A", "sub");
                Assert.Equal("A", args.Message);
                Assert.Equal("sub", args.SubMessage);
                Assert.Equal(0.0f, args.Progress);

                p.EndTask();
                this.Verify(p, 1.0f, "A", "");
            }
            p.EndTask();
            this.Verify(p, 1.0f, "", "");
        }
Пример #23
0
        /// <summary>
        /// Reads header part 2 from an input stream.
        /// </summary>
        /// <param name="stream">The stream to read from.</param>
        /// <param name="offset">The offset to the header part from the beginning of the stream.</param>
        /// <param name="size">The size of the header part.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header part.</returns>
        internal async Task <NefsHeaderPart2> ReadHeaderPart2Async(Stream stream, uint offset, uint size, NefsProgress p)
        {
            var entries = new List <NefsHeaderPart2Entry>();
            var ids     = new HashSet <NefsItemId>();

            // Validate inputs
            if (!this.ValidateHeaderPartStream(stream, offset, size, "2"))
            {
                return(new NefsHeaderPart2(entries));
            }

            // Get entries in part 2
            var numEntries  = size / NefsHeaderPart2Entry.Size;
            var entryOffset = offset;

            for (var i = 0; i < numEntries; ++i)
            {
                using (p.BeginTask(1.0f / numEntries))
                {
                    var entry = new NefsHeaderPart2Entry();
                    await FileData.ReadDataAsync(stream, entryOffset, entry, p);

                    // Check for duplicate item ids
                    var id = new NefsItemId(entry.Id.Value);
                    if (ids.Contains(id))
                    {
                        Log.LogError($"Found duplicate item id in part 2: {id.Value}");
                        continue;
                    }

                    ids.Add(id);
                    entries.Add(entry);
                    entryOffset += NefsHeaderPart2Entry.Size;
                }
            }

            return(new NefsHeaderPart2(entries));
        }
Пример #24
0
        /// <summary>
        /// Reads header part 4 from an input stream.
        /// </summary>
        /// <param name="stream">The stream to read from.</param>
        /// <param name="offset">The offset to the header part from the beginning of the stream.</param>
        /// <param name="size">The size of the header part.</param>
        /// <param name="part1">Header part 1.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header part.</returns>
        internal async Task <Nefs20HeaderPart4> Read20HeaderPart4Async(Stream stream, uint offset, uint size, NefsHeaderPart1 part1, NefsProgress p)
        {
            var entries     = new List <Nefs20HeaderPart4Entry>();
            var indexLookup = new Dictionary <Guid, uint>();

            // Validate inputs
            if (!this.ValidateHeaderPartStream(stream, offset, size, "4"))
            {
                return(new Nefs20HeaderPart4(entries, indexLookup));
            }

            // Get entries in part 4
            var numEntries  = size / Nefs20HeaderPart4Entry.Size;
            var entryOffset = offset;

            for (var i = 0; i < numEntries; ++i)
            {
                using (p.BeginTask(1.0f / numEntries))
                {
                    var entry = new Nefs20HeaderPart4Entry();
                    await FileData.ReadDataAsync(stream, entryOffset, entry, NefsVersion.Version200, p);

                    entryOffset += Nefs20HeaderPart4Entry.Size;

                    entries.Add(entry);
                }
            }

            // Create a table to allow looking up a part 4 index by item Guid
            foreach (var p1 in part1.EntriesByIndex)
            {
                indexLookup.Add(p1.Guid, p1.IndexPart4);
            }

            return(new Nefs20HeaderPart4(entries, indexLookup));
        }
Пример #25
0
        /// <summary>
        /// Looks through the game executable to find header offsets for game.dat files.
        /// </summary>
        /// <returns>A list of game.dat archive sources.</returns>
        private async Task <List <NefsArchiveSource> > FindGameDatHeaderOffsetsAsync(
            string gameDatDir,
            string gameExePath,
            NefsProgress p)
        {
            if (!this.FileSystem.File.Exists(gameExePath))
            {
                this.UiService.ShowMessageBox($"Cannot find executable file: {gameExePath}.");
                return(new List <NefsArchiveSource>());
            }

            var headerOffsets = new Dictionary <string, ulong>();
            var gameDatFiles  = new List <string>();

            // Read whole game exe into memory
            byte[] gameExeBuffer;
            using (var t = p.BeginTask(0.20f, "Reading game executable"))
                using (var reader = this.FileSystem.File.OpenRead(gameExePath))
                {
                    gameExeBuffer = new byte[reader.Length];
                    await reader.ReadAsync(gameExeBuffer, 0, (int)reader.Length, p.CancellationToken);
                }

            // Search for headers in the exe
            using (var t = p.BeginTask(0.50f, "Searching for headers"))
            {
                var searchOffset = 0UL;

                (string DataFileName, ulong Offset)? header;
                while ((header = await this.Reader.FindHeaderAsync(gameExeBuffer, searchOffset, p)) != null)
                {
                    headerOffsets.Add(header.Value.DataFileName, header.Value.Offset);
                    searchOffset = header.Value.Offset + 4;
                }
            }

            // Try to match offsets to game.dat files
            using (var t = p.BeginTask(0.30f, "Searching for game.dat files"))
            {
                foreach (var file in this.FileSystem.Directory.EnumerateFiles(gameDatDir))
                {
                    var fileName = Path.GetFileName(file);
                    if (headerOffsets.ContainsKey(fileName))
                    {
                        gameDatFiles.Add(file);
                    }
                }
            }

            // Match offsets and files
            if (gameDatFiles.Count != headerOffsets.Count)
            {
                Log.LogError($"Found {gameDatFiles.Count} game*.dat files, but found {headerOffsets.Count} headers in game exectuable.");
            }

            // Build data sources for the game.dat files
            var sources = new List <NefsArchiveSource>();

            for (var i = 0; i < gameDatFiles.Count; ++i)
            {
                var fileName        = Path.GetFileName(gameDatFiles[i]);
                var isDataEncrypted = true;
                var source          = new NefsArchiveSource(gameExePath, headerOffsets[fileName], gameDatFiles[i], isDataEncrypted);
                sources.Add(source);
            }

            return(sources);
        }
Пример #26
0
        /// <summary>
        /// Writes the header to the output stream.
        /// </summary>
        /// <param name="stream">The stream to write to.</param>
        /// <param name="headerOffset">The offset into the stream to begin.</param>
        /// <param name="header">The header to write.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>The async task.</returns>
        private async Task WriteHeaderAsync(Stream stream, UInt64 headerOffset, Nefs20Header header, NefsProgress p)
        {
            // Calc weight of each task (8 parts + intro + table of contents)
            var weight = 1.0f / 10.0f;

            // Get table of contents
            var toc = header.TableOfContents;

            using (var t = p.BeginTask(weight, "Writing header intro"))
            {
                var offset = headerOffset + Nefs20Header.IntroOffset;
                await this.WriteHeaderIntroAsync(stream, offset, header.Intro, p);
            }

            using (var t = p.BeginTask(weight, "Writing header intro table of contents"))
            {
                var offset = headerOffset + Nefs20HeaderIntroToc.Offset;
                await this.WriteHeaderIntroTocAsync(stream, offset, header.TableOfContents, p);
            }

            using (var t = p.BeginTask(weight, "Writing header part 1"))
            {
                var offset = headerOffset + toc.OffsetToPart1;
                await this.WriteHeaderPart1Async(stream, offset, header.Part1, p);
            }

            using (var t = p.BeginTask(weight, "Writing header part 2"))
            {
                var offset = headerOffset + toc.OffsetToPart2;
                await this.WriteHeaderPart2Async(stream, offset, header.Part2, p);
            }

            using (var t = p.BeginTask(weight, "Writing header part 3"))
            {
                var offset = headerOffset + toc.OffsetToPart3;
                await this.WriteHeaderPart3Async(stream, offset, header.Part3, p);
            }

            using (var t = p.BeginTask(weight, "Writing header part 4"))
            {
                var offset = headerOffset + toc.OffsetToPart4;
                await this.WriteHeaderPart4Async(stream, offset, header.Part4, p);
            }

            using (var t = p.BeginTask(weight, "Writing header part 5"))
            {
                var offset = headerOffset + toc.OffsetToPart5;
                await this.WriteHeaderPart5Async(stream, offset, header.Part5, p);
            }

            using (var t = p.BeginTask(weight, "Writing header part 6"))
            {
                var offset = headerOffset + toc.OffsetToPart6;
                await this.WriteHeaderPart6Async(stream, offset, header.Part6, p);
            }

            using (var t = p.BeginTask(weight, "Writing header part 7"))
            {
                var offset = headerOffset + toc.OffsetToPart7;
                await this.WriteHeaderPart7Async(stream, offset, header.Part7, p);
            }

            using (var t = p.BeginTask(weight, "Writing header part 8"))
            {
                var offset = headerOffset + toc.OffsetToPart8;
                await this.WriteHeaderPart8Async(stream, offset, header.Part8, p);
            }
        }
Пример #27
0
        /// <summary>
        /// Writes an archive to the specified stream. A new archive obejct is returned that
        /// contains the updated header and item metadata.
        /// </summary>
        /// <param name="stream">The stream to write to.</param>
        /// <param name="sourceHeader">Donor header information.</param>
        /// <param name="sourceItems">List of items to write. This list is not modified directly.</param>
        /// <param name="workDir">Temp working directory path.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>A new NefsArchive object containing the updated header and item metadata.</returns>
        private async Task <NefsArchive> WriteArchiveAsync(
            Stream stream,
            Nefs20Header sourceHeader,
            NefsItemList sourceItems,
            string workDir,
            NefsProgress p)
        {
            // Setup task weights
            var taskWeightPrepareItems = 0.45f;
            var taskWeightWriteItems   = 0.45f;
            var taskWeightHeader       = 0.1f;

            // Prepare items for writing
            NefsItemList items;

            using (var t = p.BeginTask(taskWeightPrepareItems, "Preparing items"))
            {
                items = await this.PrepareItemsAsync(sourceItems, workDir, p);
            }

            // Determine number of items
            var numItems = items.Count;

            // Update header parts 3 and 4 first (need to know their sizes)
            var p4 = new Nefs20HeaderPart4(items);
            var p3 = new NefsHeaderPart3(items);

            // Compute header size
            var introSize  = NefsHeaderIntro.Size;
            var tocSize    = Nefs20HeaderIntroToc.Size;
            var p1Size     = numItems * NefsHeaderPart1Entry.Size; // TODO : What about duplicates?
            var p2Size     = numItems * NefsHeaderPart2Entry.Size; // TODO : What about duplicates?
            var p3Size     = p3.Size;
            var p4Size     = p4.Size;
            var p5Size     = NefsHeaderPart5.Size;
            var p6Size     = numItems * Nefs20HeaderPart6Entry.Size;
            var p7Size     = numItems * NefsHeaderPart7Entry.Size;
            var p8Size     = sourceHeader.Intro.HeaderSize - sourceHeader.TableOfContents.OffsetToPart8;
            var headerSize = introSize + tocSize + p1Size + p2Size + p3Size + p4Size + p5Size + p6Size + p7Size + p8Size;

            // Determine first data offset. There are two known offset values. If the header is
            // large enough, the second (larger) offset is used.
            var firstDataOffset = Nefs20Header.DataOffsetDefault;

            if (headerSize > firstDataOffset)
            {
                firstDataOffset = Nefs20Header.DataOffsetLarge;
            }

            // Write item data
            UInt64 archiveSize;

            using (var t = p.BeginTask(taskWeightWriteItems, "Writing items"))
            {
                archiveSize = await this.WriteItemsAsync(stream, items, firstDataOffset, p);
            }

            // Update remaining header data
            var p1 = new NefsHeaderPart1(items, p4);
            var p2 = new NefsHeaderPart2(items, p3);
            var p6 = new Nefs20HeaderPart6(items);
            var p7 = new NefsHeaderPart7(items);

            // Compute total archive size
            var p5 = new NefsHeaderPart5();

            p5.Data0x00_ArchiveSize.Value             = archiveSize;
            p5.Data0x08_ArchiveNameStringOffset.Value = p3.OffsetsByFileName[items.DataFileName];
            p5.Data0x0C_FirstDataOffset.Value         = sourceHeader.Part5.FirstDataOffset;

            // Update header intro
            var intro = new NefsHeaderIntro();

            intro.Data0x00_MagicNumber.Value     = sourceHeader.Intro.MagicNumber;
            intro.Data0x24_AesKeyHexString.Value = sourceHeader.Intro.AesKeyHexString;
            intro.Data0x64_HeaderSize.Value      = (uint)headerSize;
            intro.Data0x68_NefsVersion.Value     = sourceHeader.Intro.NefsVersion;
            intro.Data0x6c_NumberOfItems.Value   = (uint)numItems;
            intro.Data0x70_UnknownZlib.Value     = sourceHeader.Intro.Unknown0x70zlib;
            intro.Data0x78_Unknown.Value         = sourceHeader.Intro.Unknown0x78;

            var toc = new Nefs20HeaderIntroToc();

            toc.Data0x00_NumVolumes.Value    = sourceHeader.TableOfContents.NumVolumes;
            toc.Data0x02_HashBlockSize.Value = sourceHeader.TableOfContents.Data0x02_HashBlockSize.Value;
            toc.Data0x04_OffsetToPart1.Value = introSize + tocSize;
            toc.Data0x0c_OffsetToPart2.Value = toc.OffsetToPart1 + (uint)p1Size;
            toc.Data0x14_OffsetToPart3.Value = toc.OffsetToPart2 + (uint)p2Size;
            toc.Data0x18_OffsetToPart4.Value = toc.OffsetToPart3 + (uint)p3Size;
            toc.Data0x1c_OffsetToPart5.Value = toc.OffsetToPart4 + (uint)p4Size;
            toc.Data0x08_OffsetToPart6.Value = toc.OffsetToPart5 + (uint)p5Size;
            toc.Data0x10_OffsetToPart7.Value = toc.OffsetToPart6 + (uint)p6Size;
            toc.Data0x20_OffsetToPart8.Value = toc.OffsetToPart7 + (uint)p7Size;
            toc.Data0x24_Unknown.Value       = sourceHeader.TableOfContents.Unknown0x24;

            // Part 8 - not writing anything for now
            var p8 = new NefsHeaderPart8(p8Size);

            // Create new header object
            var header = new Nefs20Header(intro, toc, p1, p2, p3, p4, p5, p6, p7, p8);

            // Write the header
            using (var t = p.BeginTask(taskWeightHeader, "Writing header"))
            {
                await this.WriteHeaderAsync(stream, 0, header, p);
            }

            // Update hash
            await this.UpdateHashAsync(stream, 0, header, p);

            // Create new archive object
            return(new NefsArchive(header, items));
        }
Пример #28
0
        /// <summary>
        /// Reads a version 2.0 header from an input stream.
        /// </summary>
        /// <param name="stream">The stream to read from.</param>
        /// <param name="offset">The offset to the header from the beginning of the stream.</param>
        /// <param name="part6Stream">The stream that contains part 6/7 data.</param>
        /// <param name="part6Offset">The offset to the start of part 6/7 data.</param>
        /// <param name="intro">The pre-parsed header intro.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header.</returns>
        internal async Task <Nefs20Header> Read20HeaderAsync(
            Stream stream,
            ulong offset,
            Stream part6Stream,
            ulong part6Offset,
            NefsHeaderIntro intro,
            NefsProgress p)
        {
            Nefs20HeaderIntroToc toc   = null;
            NefsHeaderPart1      part1 = null;
            NefsHeaderPart2      part2 = null;
            NefsHeaderPart3      part3 = null;
            Nefs20HeaderPart4    part4 = null;
            NefsHeaderPart5      part5 = null;
            Nefs20HeaderPart6    part6 = null;
            NefsHeaderPart7      part7 = null;
            NefsHeaderPart8      part8 = null;

            // Calc weight of each task (8 parts + table of contents)
            var weight = 1.0f / 10.0f;

            using (p.BeginTask(weight, "Reading header intro table of contents"))
            {
                toc = await this.Read20HeaderIntroTocAsync(stream, Nefs20HeaderIntroToc.Offset, p);
            }

            using (p.BeginTask(weight, "Reading header part 1"))
            {
                part1 = await this.ReadHeaderPart1Async(stream, toc.OffsetToPart1, toc.Part1Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 2"))
            {
                part2 = await this.ReadHeaderPart2Async(stream, toc.OffsetToPart2, toc.Part2Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 3"))
            {
                part3 = await this.ReadHeaderPart3Async(stream, toc.OffsetToPart3, toc.Part3Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 4"))
            {
                part4 = await this.Read20HeaderPart4Async(stream, toc.OffsetToPart4, toc.Part4Size, part1, p);
            }

            using (p.BeginTask(weight, "Reading header part 5"))
            {
                part5 = await this.ReadHeaderPart5Async(stream, toc.OffsetToPart5, NefsHeaderPart5.Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 6"))
            {
                part6 = await this.Read20HeaderPart6Async(part6Stream, (uint)part6Offset + toc.OffsetToPart6, part1, p);
            }

            using (p.BeginTask(weight, "Reading header part 7"))
            {
                var numEntries = (uint)part2.EntriesByIndex.Count;
                part7 = await this.ReadHeaderPart7Async(part6Stream, (uint)part6Offset + toc.OffsetToPart7, numEntries, p);
            }

            using (p.BeginTask(weight, "Reading header part 8"))
            {
                var part8Size = intro.HeaderSize - toc.OffsetToPart8;
                part8 = await this.ReadHeaderPart8Async(stream, toc.OffsetToPart8, part8Size, p);
            }

            // Validate header hash
            if (!this.ValidateHash(stream, offset, intro))
            {
                Log.LogWarning("Header hash does not match expected value.");
            }

            // The header stream must be disposed
            stream.Dispose();

            return(new Nefs20Header(intro, toc, part1, part2, part3, part4, part5, part6, part7, part8));
        }
Пример #29
0
        /// <summary>
        /// Reads the header from an input stream.
        /// </summary>
        /// <param name="originalStream">The stream to read from.</param>
        /// <param name="offset">The offset to the header from the beginning of the stream.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header.</returns>
        internal async Task <NefsHeader> ReadHeaderAsync(Stream originalStream, ulong offset, NefsProgress p)
        {
            Stream             stream;
            NefsHeaderIntro    intro = null;
            NefsHeaderIntroToc toc   = null;
            NefsHeaderPart1    part1 = null;
            NefsHeaderPart2    part2 = null;
            NefsHeaderPart3    part3 = null;
            NefsHeaderPart4    part4 = null;
            NefsHeaderPart5    part5 = null;
            NefsHeaderPart6    part6 = null;
            NefsHeaderPart7    part7 = null;
            NefsHeaderPart8    part8 = null;

            // Calc weight of each task (8 parts + intro + table of contents)
            var weight = 1.0f / 10.0f;

            using (p.BeginTask(weight, "Reading header intro"))
            {
                // Decrypt header if needed
                (intro, stream) = await this.ReadHeaderIntroAsync(originalStream, offset, p);
            }

            using (p.BeginTask(weight, "Reading header intro table of contents"))
            {
                toc = await this.ReadHeaderIntroTocAsync(stream, NefsHeaderIntroToc.Offset, p);
            }

            using (p.BeginTask(weight, "Reading header part 1"))
            {
                part1 = await this.ReadHeaderPart1Async(stream, toc.OffsetToPart1, toc.Part1Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 2"))
            {
                part2 = await this.ReadHeaderPart2Async(stream, toc.OffsetToPart2, toc.Part2Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 3"))
            {
                part3 = await this.ReadHeaderPart3Async(stream, toc.OffsetToPart3, toc.Part3Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 4"))
            {
                part4 = await this.ReadHeaderPart4Async(stream, toc.OffsetToPart4, toc.Part4Size, part1, part2, p);
            }

            using (p.BeginTask(weight, "Reading header part 5"))
            {
                part5 = await this.ReadHeaderPart5Async(stream, toc.OffsetToPart5, toc.Part5Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 6"))
            {
                if (toc.OffsetToPart6 == 0)
                {
                    // game.dat files don't have part 6
                    Log.LogDebug("Archive does not have header part 6.");
                    part6 = new NefsHeaderPart6(new List <NefsHeaderPart6Entry>());
                }
                else
                {
                    part6 = await this.ReadHeaderPart6Async(stream, toc.OffsetToPart6, toc.Part6Size, part2, p);
                }
            }

            using (p.BeginTask(weight, "Reading header part 7"))
            {
                if (toc.OffsetToPart6 == 0)
                {
                    // game.dat files don't have part 7. Still checking if part 6 offset is 0. For
                    // some reason, the part 7 offset still has a value, but doesn't appear to be a
                    // correct one, so skipping part 7 as well
                    Log.LogDebug("Archive does not have header part 7.");
                    part7 = new NefsHeaderPart7(new List <NefsHeaderPart7Entry>());
                }
                else
                {
                    part7 = await this.ReadHeaderPart7Async(stream, toc.OffsetToPart7, toc.Part7Size, p);
                }
            }

            using (p.BeginTask(weight, "Reading header part 8"))
            {
                var part8Size = intro.HeaderSize - toc.OffsetToPart8;
                part8 = await this.ReadHeaderPart8Async(stream, toc.OffsetToPart8, part8Size, p);
            }

            // Validate header hash
            if (!this.ValidateHash(stream, offset, intro))
            {
                Log.LogWarning("Header hash does not match expected value.");
            }

            // The header stream must be disposed
            stream.Dispose();

            return(new NefsHeader(intro, toc, part1, part2, part3, part4, part5, part6, part7, part8));
        }
Пример #30
0
        /// <summary>
        /// Reads header part 4 from an input stream.
        /// </summary>
        /// <param name="stream">The stream to read from.</param>
        /// <param name="offset">The offset to the header part from the beginning of the stream.</param>
        /// <param name="size">The size of the header part.</param>
        /// <param name="part1">Header part 1.</param>
        /// <param name="part2">Header part 2.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header part.</returns>
        internal async Task <NefsHeaderPart4> ReadHeaderPart4Async(
            Stream stream,
            uint offset,
            uint size,
            NefsHeaderPart1 part1,
            NefsHeaderPart2 part2,
            NefsProgress p)
        {
            var entries = new Dictionary <uint, NefsHeaderPart4Entry>();

            // Validate inputs
            if (!this.ValidateHeaderPartStream(stream, offset, size, "4"))
            {
                return(new NefsHeaderPart4(entries));
            }

            // Get the chunk sizes for each item in the archive
            var numItems = part1.EntriesById.Count;

            for (var i = 0; i < numItems; ++i)
            {
                using (p.BeginTask(1.0f / numItems))
                {
                    var id = new NefsItemId((uint)i);

                    // Part 1 entry
                    if (!part1.EntriesById.ContainsKey(id))
                    {
                        Log.LogError($"Failed to find part 1 entry for item {id} when reading part 4.");
                        continue;
                    }

                    var p1 = part1.EntriesById[id];

                    // Part 2 entry
                    if (!part2.EntriesById.ContainsKey(id))
                    {
                        Log.LogError($"Failed to find part 2 entry for item {id} when reading part 4.");
                        continue;
                    }

                    var p2 = part2.EntriesById[id];

                    // Create part 4 entry
                    var entry = new NefsHeaderPart4Entry(id);

                    // Check if item has part 4 entry
                    if (p1.IndexIntoPart4 == 0xFFFFFFFF)
                    {
                        // Item is most likely not compressed or has no data
                        continue;
                    }

                    if (p2.Data0x0c_ExtractedSize.Value == 0)
                    {
                        // Item is probably a directory
                        continue;
                    }

                    // Get number of chunks
                    var numChunks = (int)Math.Ceiling(p2.Data0x0c_ExtractedSize.Value / (double)NefsHeader.ChunkSize);
                    if (numChunks == 0)
                    {
                        Log.LogError($"Item {p1.Id} contains no compressed chunks but was expected to.");
                        continue;
                    }

                    // Seek stream to start of chunk sizes for this item
                    var itemOffset = offset + p1.OffsetIntoPart4;
                    if ((long)itemOffset + NefsHeaderPart4.DataSize > stream.Length)
                    {
                        Log.LogError($"Item {p1.Id} has part 4 entry that is outside the bounds of header part 4.");
                        continue;
                    }

                    // Seek stream
                    stream.Seek((long)itemOffset, SeekOrigin.Begin);

                    // Process the chunk sizes
                    for (var chunkIdx = 0; chunkIdx < numChunks; ++chunkIdx)
                    {
                        var bytes = new byte[NefsHeaderPart4.DataSize];
                        await stream.ReadAsync(bytes, 0, NefsHeaderPart4.DataSize);

                        entry.ChunkSizes.Add(BitConverter.ToUInt32(bytes, 0));
                    }

                    // Record entry
                    entries.Add(p1.IndexIntoPart4, entry);
                }
            }

            // Return part 4
            return(new NefsHeaderPart4(entries));
        }