public void NefsHeaderPart1_MultipleItems_EntriesPopulated()
        {
            var items = new NefsItemList(@"C:\archive.nefs");

            var file1DataSource = new NefsItemListDataSource(items, 123, new NefsItemSize(456, new List <UInt32> {
                11, 12, 13
            }));
            var file1 = new NefsItem(new NefsItemId(0), "file1", new NefsItemId(0), NefsItemType.File, file1DataSource, TestHelpers.CreateUnknownData());

            items.Add(file1);

            var file2DataSource = new NefsItemListDataSource(items, 456, new NefsItemSize(789, new List <UInt32> {
                14, 15, 16
            }));
            var file2 = new NefsItem(new NefsItemId(1), "file2", new NefsItemId(1), NefsItemType.File, file2DataSource, TestHelpers.CreateUnknownData());

            items.Add(file2);

            var dir1DataSource = new NefsEmptyDataSource();
            var dir1           = new NefsItem(new NefsItemId(2), "dir1", new NefsItemId(2), NefsItemType.Directory, dir1DataSource, TestHelpers.CreateUnknownData());

            items.Add(dir1);

            var p4 = new NefsHeaderPart4(items);
            var p1 = new NefsHeaderPart1(items, p4);

            Assert.Equal(3, p1.EntriesById.Count);

            /*
             * dir1
             */

            // Offset to data and index to p4 are both 0 since this is a directory
            Assert.Equal(2, (int)p1.EntriesById[dir1.Id].Id.Value);
            Assert.Equal(0, (int)p1.EntriesById[dir1.Id].OffsetToData);
            Assert.Equal(0, (int)p1.EntriesById[dir1.Id].MetadataIndex);
            Assert.Equal(0, (int)p1.EntriesById[dir1.Id].IndexIntoPart4);

            /*
             * file1
             */

            Assert.Equal(0, (int)p1.EntriesById[file1.Id].Id.Value);
            Assert.Equal(123, (int)p1.EntriesById[file1.Id].OffsetToData);
            Assert.Equal(1, (int)p1.EntriesById[file1.Id].MetadataIndex);
            Assert.Equal(0, (int)p1.EntriesById[file1.Id].IndexIntoPart4);

            /*
             * file2
             */

            Assert.Equal(1, (int)p1.EntriesById[file2.Id].Id.Value);
            Assert.Equal(456, (int)p1.EntriesById[file2.Id].OffsetToData);
            Assert.Equal(2, (int)p1.EntriesById[file2.Id].MetadataIndex);

            // There are 3 chunks for file1, so file2's chunks start right after that (hence p4
            // index == 3)
            Assert.Equal(3, (int)p1.EntriesById[file2.Id].IndexIntoPart4);
        }
        public void NefsHeaderPart1_NoItems_EntriesEmpty()
        {
            var items = new NefsItemList(@"C:\archive.nefs");
            var p4    = new NefsHeaderPart4(items);
            var p1    = new NefsHeaderPart1(items, p4);

            Assert.Empty(p1.EntriesById);
        }
Exemple #3
0
        /// <summary>
        /// Writes the header part to an output stream.
        /// </summary>
        /// <param name="stream">The stream to write to.</param>
        /// <param name="offset">The absolute offset in the stream to write at.</param>
        /// <param name="part1">The data to write.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>An async task.</returns>
        internal async Task WriteHeaderPart1Async(Stream stream, UInt64 offset, NefsHeaderPart1 part1, NefsProgress p)
        {
            foreach (var entry in part1.EntriesByIndex)
            {
                await FileData.WriteDataAsync(stream, offset, entry, NefsVersion.Version200, p);

                offset += NefsHeaderPart1Entry.Size;
            }
        }
Exemple #4
0
        public void NefsHeaderPart1_MultipleItems_EntriesPopulated()
        {
            var items = new NefsItemList(@"C:\archive.nefs");

            var file1Chunks = NefsDataChunk.CreateChunkList(new List <UInt32> {
                11, 12, 13
            }, TestHelpers.TestTransform);
            var file1DataSource = new NefsItemListDataSource(items, 123, new NefsItemSize(456, file1Chunks));
            var file1           = TestHelpers.CreateFile(0, 0, "file1", file1DataSource);

            items.Add(file1);

            var file2Chunks = NefsDataChunk.CreateChunkList(new List <UInt32> {
                14, 15, 16
            }, TestHelpers.TestTransform);
            var file2DataSource = new NefsItemListDataSource(items, 456, new NefsItemSize(789, file2Chunks));
            var file2           = TestHelpers.CreateFile(1, 1, "file2", file2DataSource);

            items.Add(file2);

            var dir1 = TestHelpers.CreateDirectory(2, 2, "dir1");

            items.Add(dir1);

            var p4 = new Nefs20HeaderPart4(items);
            var p1 = new NefsHeaderPart1(items, p4);

            Assert.Equal(3, p1.EntriesByGuid.Count);
            Assert.Equal(3, p1.EntriesByIndex.Count);

            /*
             * dir1
             */

            // Offset to data and index to p4 are both 0 since this is a directory
            Assert.Equal(2, (int)p1.EntriesByGuid[dir1.Guid].Id.Value);
            Assert.Equal(0, (int)p1.EntriesByGuid[dir1.Guid].OffsetToData);
            Assert.Equal(0, (int)p1.EntriesByGuid[dir1.Guid].IndexPart2);
            Assert.Equal(0, (int)p1.EntriesByGuid[dir1.Guid].IndexPart4);

            /*
             * file1
             */

            Assert.Equal(0, (int)p1.EntriesByGuid[file1.Guid].Id.Value);
            Assert.Equal(123, (int)p1.EntriesByGuid[file1.Guid].OffsetToData);
            Assert.Equal(1, (int)p1.EntriesByGuid[file1.Guid].IndexPart2);
            Assert.Equal(0, (int)p1.EntriesByGuid[file1.Guid].IndexPart4);

            /*
             * file2
             */

            Assert.Equal(1, (int)p1.EntriesByGuid[file2.Guid].Id.Value);
            Assert.Equal(456, (int)p1.EntriesByGuid[file2.Guid].OffsetToData);
            Assert.Equal(2, (int)p1.EntriesByGuid[file2.Guid].IndexPart2);

            // There are 3 chunks for file1, so file2's chunks start right after that (hence p4
            // index == 3)
            Assert.Equal(3, (int)p1.EntriesByGuid[file2.Guid].IndexPart4);
        }
Exemple #5
0
        /// <summary>
        /// Reads header part 6 from an input stream.
        /// </summary>
        /// <param name="stream">The stream to read from.</param>
        /// <param name="offset">The offset to the header part from the beginning of the stream.</param>
        /// <param name="part1">Header part 1. Used to match part 6 data with an item.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header part.</returns>
        internal async Task <Nefs20HeaderPart6> Read20HeaderPart6Async(Stream stream, uint offset, NefsHeaderPart1 part1, NefsProgress p)
        {
            var entries  = new List <Nefs20HeaderPart6Entry>();
            var numItems = part1.EntriesByIndex.Count;
            var size     = numItems * Nefs20HeaderPart6Entry.Size;

            // Validate inputs
            if (!this.ValidateHeaderPartStream(stream, offset, (uint)size, "6"))
            {
                return(new Nefs20HeaderPart6(entries));
            }

            // Get entries in part 6
            var entryOffset = offset;

            for (var i = 0; i < numItems; ++i)
            {
                using (p.BeginTask(1.0f / numItems))
                {
                    // Make sure there is a corresponding index in part 1
                    if (i >= part1.EntriesByIndex.Count)
                    {
                        Log.LogError($"Could not find matching item entry for part 6 index {i} in part 1.");
                        continue;
                    }

                    // Get Guid from part 1. Part 1 entry order matches part 6 entry order.
                    var guid = part1.EntriesByIndex[i].Guid;

                    // Read the entry data
                    var entry = new Nefs20HeaderPart6Entry(guid);
                    await FileData.ReadDataAsync(stream, entryOffset, entry, NefsVersion.Version200, p);

                    entryOffset += Nefs20HeaderPart6Entry.Size;

                    entries.Add(entry);
                }
            }

            return(new Nefs20HeaderPart6(entries));
        }
Exemple #6
0
        /// <summary>
        /// Reads header part 4 from an input stream.
        /// </summary>
        /// <param name="stream">The stream to read from.</param>
        /// <param name="offset">The offset to the header part from the beginning of the stream.</param>
        /// <param name="size">The size of the header part.</param>
        /// <param name="part1">Header part 1.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header part.</returns>
        internal async Task <Nefs20HeaderPart4> Read20HeaderPart4Async(Stream stream, uint offset, uint size, NefsHeaderPart1 part1, NefsProgress p)
        {
            var entries     = new List <Nefs20HeaderPart4Entry>();
            var indexLookup = new Dictionary <Guid, uint>();

            // Validate inputs
            if (!this.ValidateHeaderPartStream(stream, offset, size, "4"))
            {
                return(new Nefs20HeaderPart4(entries, indexLookup));
            }

            // Get entries in part 4
            var numEntries  = size / Nefs20HeaderPart4Entry.Size;
            var entryOffset = offset;

            for (var i = 0; i < numEntries; ++i)
            {
                using (p.BeginTask(1.0f / numEntries))
                {
                    var entry = new Nefs20HeaderPart4Entry();
                    await FileData.ReadDataAsync(stream, entryOffset, entry, NefsVersion.Version200, p);

                    entryOffset += Nefs20HeaderPart4Entry.Size;

                    entries.Add(entry);
                }
            }

            // Create a table to allow looking up a part 4 index by item Guid
            foreach (var p1 in part1.EntriesByIndex)
            {
                indexLookup.Add(p1.Guid, p1.IndexPart4);
            }

            return(new Nefs20HeaderPart4(entries, indexLookup));
        }
Exemple #7
0
        /// <summary>
        /// Reads a version 2.0 header from an input stream.
        /// </summary>
        /// <param name="stream">The stream to read from.</param>
        /// <param name="offset">The offset to the header from the beginning of the stream.</param>
        /// <param name="part6Stream">The stream that contains part 6/7 data.</param>
        /// <param name="part6Offset">The offset to the start of part 6/7 data.</param>
        /// <param name="intro">The pre-parsed header intro.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header.</returns>
        internal async Task <Nefs20Header> Read20HeaderAsync(
            Stream stream,
            ulong offset,
            Stream part6Stream,
            ulong part6Offset,
            NefsHeaderIntro intro,
            NefsProgress p)
        {
            Nefs20HeaderIntroToc toc   = null;
            NefsHeaderPart1      part1 = null;
            NefsHeaderPart2      part2 = null;
            NefsHeaderPart3      part3 = null;
            Nefs20HeaderPart4    part4 = null;
            NefsHeaderPart5      part5 = null;
            Nefs20HeaderPart6    part6 = null;
            NefsHeaderPart7      part7 = null;
            NefsHeaderPart8      part8 = null;

            // Calc weight of each task (8 parts + table of contents)
            var weight = 1.0f / 10.0f;

            using (p.BeginTask(weight, "Reading header intro table of contents"))
            {
                toc = await this.Read20HeaderIntroTocAsync(stream, Nefs20HeaderIntroToc.Offset, p);
            }

            using (p.BeginTask(weight, "Reading header part 1"))
            {
                part1 = await this.ReadHeaderPart1Async(stream, toc.OffsetToPart1, toc.Part1Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 2"))
            {
                part2 = await this.ReadHeaderPart2Async(stream, toc.OffsetToPart2, toc.Part2Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 3"))
            {
                part3 = await this.ReadHeaderPart3Async(stream, toc.OffsetToPart3, toc.Part3Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 4"))
            {
                part4 = await this.Read20HeaderPart4Async(stream, toc.OffsetToPart4, toc.Part4Size, part1, p);
            }

            using (p.BeginTask(weight, "Reading header part 5"))
            {
                part5 = await this.ReadHeaderPart5Async(stream, toc.OffsetToPart5, NefsHeaderPart5.Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 6"))
            {
                part6 = await this.Read20HeaderPart6Async(part6Stream, (uint)part6Offset + toc.OffsetToPart6, part1, p);
            }

            using (p.BeginTask(weight, "Reading header part 7"))
            {
                var numEntries = (uint)part2.EntriesByIndex.Count;
                part7 = await this.ReadHeaderPart7Async(part6Stream, (uint)part6Offset + toc.OffsetToPart7, numEntries, p);
            }

            using (p.BeginTask(weight, "Reading header part 8"))
            {
                var part8Size = intro.HeaderSize - toc.OffsetToPart8;
                part8 = await this.ReadHeaderPart8Async(stream, toc.OffsetToPart8, part8Size, p);
            }

            // Validate header hash
            if (!this.ValidateHash(stream, offset, intro))
            {
                Log.LogWarning("Header hash does not match expected value.");
            }

            // The header stream must be disposed
            stream.Dispose();

            return(new Nefs20Header(intro, toc, part1, part2, part3, part4, part5, part6, part7, part8));
        }
Exemple #8
0
        public async Task WriteHeaderPart1Async_ValidData_Written()
        {
            var items = new NefsItemList(@"C:\hi.txt");
            var file1 = TestHelpers.CreateItem(0, 0, "file1", 10, 11, new List <UInt32> {
                12, 13
            }, NefsItemType.File);
            var file2 = TestHelpers.CreateItem(1, 1, "file2", 20, 21, new List <UInt32> {
                22, 23
            }, NefsItemType.File);
            var dir1 = TestHelpers.CreateItem(2, 2, "dir1", 0, 0, new List <UInt32> {
                0
            }, NefsItemType.Directory);

            items.Add(file1);
            items.Add(file2);
            items.Add(dir1);

            var part4 = new Nefs20HeaderPart4(items);
            var part1 = new NefsHeaderPart1(items, part4);

            /*
             * Write
             */

            var writer = this.CreateWriter();

            byte[] buffer;
            var    offset = 5;

            using (var ms = new MemoryStream())
            {
                await writer.WriteHeaderPart1Async(ms, (uint)offset, part1, new NefsProgress());

                buffer = ms.ToArray();
            }

            /*
             * Verify
             */

            /*
             * file1
             */

            // Data offset (8 bytes)
            Assert.Equal(10, BitConverter.ToInt64(buffer, offset + 0));

            // Index part 2
            Assert.Equal(1, BitConverter.ToInt32(buffer, offset + 8));

            // Index part 4
            Assert.Equal(0, BitConverter.ToInt32(buffer, offset + 0x0c));

            // Item id
            Assert.Equal(0, BitConverter.ToInt32(buffer, offset + 0x10));

            /*
             * file2
             */

            offset += (int)NefsHeaderPart1Entry.Size;

            // Data offset (8 bytes)
            Assert.Equal(20, BitConverter.ToInt64(buffer, offset + 0));

            // Index part 2
            Assert.Equal(2, BitConverter.ToInt32(buffer, offset + 8));

            // Index part 4
            Assert.Equal(2, BitConverter.ToInt32(buffer, offset + 0x0c));

            // Item id
            Assert.Equal(1, BitConverter.ToInt32(buffer, offset + 0x10));

            /*
             * dir1
             */

            offset += (int)NefsHeaderPart1Entry.Size;

            // Data offset (8 bytes)
            Assert.Equal(0, BitConverter.ToInt64(buffer, offset + 0));

            // Index part 2
            Assert.Equal(0, BitConverter.ToInt32(buffer, offset + 8));

            // Index part 4
            Assert.Equal(0, BitConverter.ToInt32(buffer, offset + 0x0c));

            // Item id
            Assert.Equal(2, BitConverter.ToInt32(buffer, offset + 0x10));
        }
Exemple #9
0
        /// <summary>
        /// Reads header part 4 from an input stream.
        /// </summary>
        /// <param name="stream">The stream to read from.</param>
        /// <param name="offset">The offset to the header part from the beginning of the stream.</param>
        /// <param name="size">The size of the header part.</param>
        /// <param name="part1">Header part 1.</param>
        /// <param name="part2">Header part 2.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header part.</returns>
        internal async Task <NefsHeaderPart4> ReadHeaderPart4Async(
            Stream stream,
            uint offset,
            uint size,
            NefsHeaderPart1 part1,
            NefsHeaderPart2 part2,
            NefsProgress p)
        {
            var entries = new Dictionary <uint, NefsHeaderPart4Entry>();

            // Validate inputs
            if (!this.ValidateHeaderPartStream(stream, offset, size, "4"))
            {
                return(new NefsHeaderPart4(entries));
            }

            // Get the chunk sizes for each item in the archive
            var numItems = part1.EntriesById.Count;

            for (var i = 0; i < numItems; ++i)
            {
                using (p.BeginTask(1.0f / numItems))
                {
                    var id = new NefsItemId((uint)i);

                    // Part 1 entry
                    if (!part1.EntriesById.ContainsKey(id))
                    {
                        Log.LogError($"Failed to find part 1 entry for item {id} when reading part 4.");
                        continue;
                    }

                    var p1 = part1.EntriesById[id];

                    // Part 2 entry
                    if (!part2.EntriesById.ContainsKey(id))
                    {
                        Log.LogError($"Failed to find part 2 entry for item {id} when reading part 4.");
                        continue;
                    }

                    var p2 = part2.EntriesById[id];

                    // Create part 4 entry
                    var entry = new NefsHeaderPart4Entry(id);

                    // Check if item has part 4 entry
                    if (p1.IndexIntoPart4 == 0xFFFFFFFF)
                    {
                        // Item is most likely not compressed or has no data
                        continue;
                    }

                    if (p2.Data0x0c_ExtractedSize.Value == 0)
                    {
                        // Item is probably a directory
                        continue;
                    }

                    // Get number of chunks
                    var numChunks = (int)Math.Ceiling(p2.Data0x0c_ExtractedSize.Value / (double)NefsHeader.ChunkSize);
                    if (numChunks == 0)
                    {
                        Log.LogError($"Item {p1.Id} contains no compressed chunks but was expected to.");
                        continue;
                    }

                    // Seek stream to start of chunk sizes for this item
                    var itemOffset = offset + p1.OffsetIntoPart4;
                    if ((long)itemOffset + NefsHeaderPart4.DataSize > stream.Length)
                    {
                        Log.LogError($"Item {p1.Id} has part 4 entry that is outside the bounds of header part 4.");
                        continue;
                    }

                    // Seek stream
                    stream.Seek((long)itemOffset, SeekOrigin.Begin);

                    // Process the chunk sizes
                    for (var chunkIdx = 0; chunkIdx < numChunks; ++chunkIdx)
                    {
                        var bytes = new byte[NefsHeaderPart4.DataSize];
                        await stream.ReadAsync(bytes, 0, NefsHeaderPart4.DataSize);

                        entry.ChunkSizes.Add(BitConverter.ToUInt32(bytes, 0));
                    }

                    // Record entry
                    entries.Add(p1.IndexIntoPart4, entry);
                }
            }

            // Return part 4
            return(new NefsHeaderPart4(entries));
        }
Exemple #10
0
        /// <summary>
        /// Reads the header from an input stream.
        /// </summary>
        /// <param name="originalStream">The stream to read from.</param>
        /// <param name="offset">The offset to the header from the beginning of the stream.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header.</returns>
        internal async Task <NefsHeader> ReadHeaderAsync(Stream originalStream, ulong offset, NefsProgress p)
        {
            Stream             stream;
            NefsHeaderIntro    intro = null;
            NefsHeaderIntroToc toc   = null;
            NefsHeaderPart1    part1 = null;
            NefsHeaderPart2    part2 = null;
            NefsHeaderPart3    part3 = null;
            NefsHeaderPart4    part4 = null;
            NefsHeaderPart5    part5 = null;
            NefsHeaderPart6    part6 = null;
            NefsHeaderPart7    part7 = null;
            NefsHeaderPart8    part8 = null;

            // Calc weight of each task (8 parts + intro + table of contents)
            var weight = 1.0f / 10.0f;

            using (p.BeginTask(weight, "Reading header intro"))
            {
                // Decrypt header if needed
                (intro, stream) = await this.ReadHeaderIntroAsync(originalStream, offset, p);
            }

            using (p.BeginTask(weight, "Reading header intro table of contents"))
            {
                toc = await this.ReadHeaderIntroTocAsync(stream, NefsHeaderIntroToc.Offset, p);
            }

            using (p.BeginTask(weight, "Reading header part 1"))
            {
                part1 = await this.ReadHeaderPart1Async(stream, toc.OffsetToPart1, toc.Part1Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 2"))
            {
                part2 = await this.ReadHeaderPart2Async(stream, toc.OffsetToPart2, toc.Part2Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 3"))
            {
                part3 = await this.ReadHeaderPart3Async(stream, toc.OffsetToPart3, toc.Part3Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 4"))
            {
                part4 = await this.ReadHeaderPart4Async(stream, toc.OffsetToPart4, toc.Part4Size, part1, part2, p);
            }

            using (p.BeginTask(weight, "Reading header part 5"))
            {
                part5 = await this.ReadHeaderPart5Async(stream, toc.OffsetToPart5, toc.Part5Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 6"))
            {
                if (toc.OffsetToPart6 == 0)
                {
                    // game.dat files don't have part 6
                    Log.LogDebug("Archive does not have header part 6.");
                    part6 = new NefsHeaderPart6(new List <NefsHeaderPart6Entry>());
                }
                else
                {
                    part6 = await this.ReadHeaderPart6Async(stream, toc.OffsetToPart6, toc.Part6Size, part2, p);
                }
            }

            using (p.BeginTask(weight, "Reading header part 7"))
            {
                if (toc.OffsetToPart6 == 0)
                {
                    // game.dat files don't have part 7. Still checking if part 6 offset is 0. For
                    // some reason, the part 7 offset still has a value, but doesn't appear to be a
                    // correct one, so skipping part 7 as well
                    Log.LogDebug("Archive does not have header part 7.");
                    part7 = new NefsHeaderPart7(new List <NefsHeaderPart7Entry>());
                }
                else
                {
                    part7 = await this.ReadHeaderPart7Async(stream, toc.OffsetToPart7, toc.Part7Size, p);
                }
            }

            using (p.BeginTask(weight, "Reading header part 8"))
            {
                var part8Size = intro.HeaderSize - toc.OffsetToPart8;
                part8 = await this.ReadHeaderPart8Async(stream, toc.OffsetToPart8, part8Size, p);
            }

            // Validate header hash
            if (!this.ValidateHash(stream, offset, intro))
            {
                Log.LogWarning("Header hash does not match expected value.");
            }

            // The header stream must be disposed
            stream.Dispose();

            return(new NefsHeader(intro, toc, part1, part2, part3, part4, part5, part6, part7, part8));
        }
Exemple #11
0
        public async void ReadHeaderPart4Async_ValidData_DataRead()
        {
            // Item 1 has 2 chunk sizes
            var e1p1 = new NefsHeaderPart1Entry(Guid.NewGuid());

            e1p1.Data0x10_Id.Value         = 0;
            e1p1.Data0x0c_IndexPart4.Value = 0;

            // Item 2 has 1 chunk size
            var e2p1 = new NefsHeaderPart1Entry(Guid.NewGuid());

            e2p1.Data0x10_Id.Value         = 1;
            e2p1.Data0x0c_IndexPart4.Value = 2;

            // Item 3 has no chunks
            var e3p1 = new NefsHeaderPart1Entry(Guid.NewGuid());

            e3p1.Data0x10_Id.Value         = 2;
            e3p1.Data0x0c_IndexPart4.Value = 0xFFFFFFFF;

            // Item 4 is a directory (extracted size == 0)
            var e4p1 = new NefsHeaderPart1Entry(Guid.NewGuid());

            e4p1.Data0x10_Id.Value         = 3;
            e4p1.Data0x0c_IndexPart4.Value = 0;

            // Item 5 has 3 chunks
            var e5p1 = new NefsHeaderPart1Entry(Guid.NewGuid());

            e5p1.Data0x10_Id.Value         = 4;
            e5p1.Data0x0c_IndexPart4.Value = 3;

            var part1Items = new List <NefsHeaderPart1Entry>
            {
                e1p1,
                e2p1,
                e3p1,
                e4p1,
                e5p1,
            };

            var part1 = new NefsHeaderPart1(part1Items);

            // Setup data
            byte[] bytes =
            {
                // Offset
                0xFF, 0xFF,

                // Item 1
                0x11, 0x12, 0x13, 0x14,
                0x15, 0x16, 0x17, 0x18,

                // Item 2
                0x21, 0x22, 0x23, 0x24,

                // Item 5
                0x31, 0x32, 0x33, 0x34,
                0x35, 0x36, 0x37, 0x38,
                0x39, 0x3A, 0x3B, 0x3C,

                // Last four bytes
                0x01, 0x02, 0x03, 0x04,
            };

            var stream = new MemoryStream(bytes);
            var reader = new NefsReader(this.fileSystem);
            var size   = (uint)28;
            var offset = (uint)2;

            // Test
            var part4 = await reader.Read20HeaderPart4Async(stream, offset, size, part1, this.p);

            // Verify
            Assert.Equal(7, part4.EntriesByIndex.Count);

            // Item 1
            Assert.Equal((uint)0x14131211, part4.EntriesByIndex[0].CumulativeChunkSize);
            Assert.Equal((uint)0x18171615, part4.EntriesByIndex[1].CumulativeChunkSize);

            // Item 2
            Assert.Equal((uint)0x24232221, part4.EntriesByIndex[2].CumulativeChunkSize);

            // Item 3
            Assert.Equal((uint)0x34333231, part4.EntriesByIndex[3].CumulativeChunkSize);
            Assert.Equal((uint)0x38373635, part4.EntriesByIndex[4].CumulativeChunkSize);
            Assert.Equal((uint)0x3C3B3A39, part4.EntriesByIndex[5].CumulativeChunkSize);
        }
Exemple #12
0
        /// <summary>
        /// Writes an archive to the specified stream. A new archive obejct is returned that
        /// contains the updated header and item metadata.
        /// </summary>
        /// <param name="stream">The stream to write to.</param>
        /// <param name="sourceHeader">Donor header information.</param>
        /// <param name="sourceItems">List of items to write. This list is not modified directly.</param>
        /// <param name="workDir">Temp working directory path.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>A new NefsArchive object containing the updated header and item metadata.</returns>
        private async Task <NefsArchive> WriteArchiveAsync(
            Stream stream,
            Nefs20Header sourceHeader,
            NefsItemList sourceItems,
            string workDir,
            NefsProgress p)
        {
            // Setup task weights
            var taskWeightPrepareItems = 0.45f;
            var taskWeightWriteItems   = 0.45f;
            var taskWeightHeader       = 0.1f;

            // Prepare items for writing
            NefsItemList items;

            using (var t = p.BeginTask(taskWeightPrepareItems, "Preparing items"))
            {
                items = await this.PrepareItemsAsync(sourceItems, workDir, p);
            }

            // Determine number of items
            var numItems = items.Count;

            // Update header parts 3 and 4 first (need to know their sizes)
            var p4 = new Nefs20HeaderPart4(items);
            var p3 = new NefsHeaderPart3(items);

            // Compute header size
            var introSize  = NefsHeaderIntro.Size;
            var tocSize    = Nefs20HeaderIntroToc.Size;
            var p1Size     = numItems * NefsHeaderPart1Entry.Size; // TODO : What about duplicates?
            var p2Size     = numItems * NefsHeaderPart2Entry.Size; // TODO : What about duplicates?
            var p3Size     = p3.Size;
            var p4Size     = p4.Size;
            var p5Size     = NefsHeaderPart5.Size;
            var p6Size     = numItems * Nefs20HeaderPart6Entry.Size;
            var p7Size     = numItems * NefsHeaderPart7Entry.Size;
            var p8Size     = sourceHeader.Intro.HeaderSize - sourceHeader.TableOfContents.OffsetToPart8;
            var headerSize = introSize + tocSize + p1Size + p2Size + p3Size + p4Size + p5Size + p6Size + p7Size + p8Size;

            // Determine first data offset. There are two known offset values. If the header is
            // large enough, the second (larger) offset is used.
            var firstDataOffset = Nefs20Header.DataOffsetDefault;

            if (headerSize > firstDataOffset)
            {
                firstDataOffset = Nefs20Header.DataOffsetLarge;
            }

            // Write item data
            UInt64 archiveSize;

            using (var t = p.BeginTask(taskWeightWriteItems, "Writing items"))
            {
                archiveSize = await this.WriteItemsAsync(stream, items, firstDataOffset, p);
            }

            // Update remaining header data
            var p1 = new NefsHeaderPart1(items, p4);
            var p2 = new NefsHeaderPart2(items, p3);
            var p6 = new Nefs20HeaderPart6(items);
            var p7 = new NefsHeaderPart7(items);

            // Compute total archive size
            var p5 = new NefsHeaderPart5();

            p5.Data0x00_ArchiveSize.Value             = archiveSize;
            p5.Data0x08_ArchiveNameStringOffset.Value = p3.OffsetsByFileName[items.DataFileName];
            p5.Data0x0C_FirstDataOffset.Value         = sourceHeader.Part5.FirstDataOffset;

            // Update header intro
            var intro = new NefsHeaderIntro();

            intro.Data0x00_MagicNumber.Value     = sourceHeader.Intro.MagicNumber;
            intro.Data0x24_AesKeyHexString.Value = sourceHeader.Intro.AesKeyHexString;
            intro.Data0x64_HeaderSize.Value      = (uint)headerSize;
            intro.Data0x68_NefsVersion.Value     = sourceHeader.Intro.NefsVersion;
            intro.Data0x6c_NumberOfItems.Value   = (uint)numItems;
            intro.Data0x70_UnknownZlib.Value     = sourceHeader.Intro.Unknown0x70zlib;
            intro.Data0x78_Unknown.Value         = sourceHeader.Intro.Unknown0x78;

            var toc = new Nefs20HeaderIntroToc();

            toc.Data0x00_NumVolumes.Value    = sourceHeader.TableOfContents.NumVolumes;
            toc.Data0x02_HashBlockSize.Value = sourceHeader.TableOfContents.Data0x02_HashBlockSize.Value;
            toc.Data0x04_OffsetToPart1.Value = introSize + tocSize;
            toc.Data0x0c_OffsetToPart2.Value = toc.OffsetToPart1 + (uint)p1Size;
            toc.Data0x14_OffsetToPart3.Value = toc.OffsetToPart2 + (uint)p2Size;
            toc.Data0x18_OffsetToPart4.Value = toc.OffsetToPart3 + (uint)p3Size;
            toc.Data0x1c_OffsetToPart5.Value = toc.OffsetToPart4 + (uint)p4Size;
            toc.Data0x08_OffsetToPart6.Value = toc.OffsetToPart5 + (uint)p5Size;
            toc.Data0x10_OffsetToPart7.Value = toc.OffsetToPart6 + (uint)p6Size;
            toc.Data0x20_OffsetToPart8.Value = toc.OffsetToPart7 + (uint)p7Size;
            toc.Data0x24_Unknown.Value       = sourceHeader.TableOfContents.Unknown0x24;

            // Part 8 - not writing anything for now
            var p8 = new NefsHeaderPart8(p8Size);

            // Create new header object
            var header = new Nefs20Header(intro, toc, p1, p2, p3, p4, p5, p6, p7, p8);

            // Write the header
            using (var t = p.BeginTask(taskWeightHeader, "Writing header"))
            {
                await this.WriteHeaderAsync(stream, 0, header, p);
            }

            // Update hash
            await this.UpdateHashAsync(stream, 0, header, p);

            // Create new archive object
            return(new NefsArchive(header, items));
        }