Exemplo n.º 1
0
        public void NefsHeaderPart2_NoItems_EntriesEmpty()
        {
            var items = new NefsItemList(@"C:\archive.nefs");
            var p3    = new NefsHeaderPart3(items);
            var p2    = new NefsHeaderPart2(items, p3);

            Assert.Empty(p2.EntriesByIndex);
        }
Exemplo n.º 2
0
        /// <summary>
        /// Writes the header part to an output stream.
        /// </summary>
        /// <param name="stream">The stream to write to.</param>
        /// <param name="offset">The absolute offset in the stream to write at.</param>
        /// <param name="part2">The data to write.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>An async task.</returns>
        internal async Task WriteHeaderPart2Async(Stream stream, UInt64 offset, NefsHeaderPart2 part2, NefsProgress p)
        {
            foreach (var entry in part2.EntriesByIndex)
            {
                await FileData.WriteDataAsync(stream, offset, entry, NefsVersion.Version200, p);

                offset += NefsHeaderPart2Entry.Size;
            }
        }
Exemplo n.º 3
0
        public void NefsHeaderPart2_MultipleItems_EntriesPopulated()
        {
            var items = new NefsItemList(@"C:\archive.nefs");

            var file1Chunks = NefsDataChunk.CreateChunkList(new List <UInt32> {
                11, 12, 13
            }, TestHelpers.TestTransform);
            var file1DataSource = new NefsItemListDataSource(items, 123, new NefsItemSize(456, file1Chunks));
            var file1           = TestHelpers.CreateFile(0, 0, "file1", file1DataSource);

            items.Add(file1);

            var file2Chunks = NefsDataChunk.CreateChunkList(new List <UInt32> {
                14, 15, 16
            }, TestHelpers.TestTransform);
            var file2DataSource = new NefsItemListDataSource(items, 456, new NefsItemSize(789, file2Chunks));
            var file2           = TestHelpers.CreateFile(1, 1, "file2", file2DataSource);

            items.Add(file2);

            var dir1 = TestHelpers.CreateDirectory(2, 2, "dir1");

            items.Add(dir1);

            var file3Chunks = NefsDataChunk.CreateChunkList(new List <UInt32> {
                22, 23, 24
            }, TestHelpers.TestTransform);
            var file3DataSource = new NefsItemListDataSource(items, 222, new NefsItemSize(333, file3Chunks));
            var file3           = TestHelpers.CreateFile(3, dir1.Id.Value, "file3", file3DataSource);

            items.Add(file3);

            var p3 = new NefsHeaderPart3(items);
            var p2 = new NefsHeaderPart2(items, p3);

            Assert.Equal(4, p2.EntriesByIndex.Count);

            // NOTE: Part 3 is the strings table. So offset into p3 must take into account null
            // terminated file/dir names. Also note strings table is alphabetized. Also note the
            // data file name is added to the strings table.

            // NOTE: The order of part 2 is generated by depth first traversal of the file tree,
            // with items sorted by filename.

            /*
             * dir1
             */

            Assert.Equal(2, (int)p2.EntriesByIndex[0].Id.Value);
            Assert.Equal(2, (int)p2.EntriesByIndex[0].Data0x00_DirectoryId.Value);
            Assert.Equal(3, (int)p2.EntriesByIndex[0].Data0x04_FirstChildId.Value);
            Assert.Equal(0, (int)p2.EntriesByIndex[0].Data0x0c_ExtractedSize.Value);
            Assert.Equal(13, (int)p2.EntriesByIndex[0].Data0x08_OffsetIntoPart3.Value);

            /*
             * file3
             */

            Assert.Equal(3, (int)p2.EntriesByIndex[1].Id.Value);
            Assert.Equal(2, (int)p2.EntriesByIndex[1].Data0x00_DirectoryId.Value);
            Assert.Equal(3, (int)p2.EntriesByIndex[1].Data0x04_FirstChildId.Value);
            Assert.Equal(333, (int)p2.EntriesByIndex[1].Data0x0c_ExtractedSize.Value);
            Assert.Equal(30, (int)p2.EntriesByIndex[1].Data0x08_OffsetIntoPart3.Value);

            /*
             * file1
             */

            Assert.Equal(0, (int)p2.EntriesByIndex[2].Id.Value);
            Assert.Equal(0, (int)p2.EntriesByIndex[2].DirectoryId.Value);
            Assert.Equal(0, (int)p2.EntriesByIndex[2].FirstChildId.Value);
            Assert.Equal(456, (int)p2.EntriesByIndex[2].ExtractedSize);
            Assert.Equal(18, (int)p2.EntriesByIndex[2].OffsetIntoPart3);

            /*
             * file2
             */

            Assert.Equal(1, (int)p2.EntriesByIndex[3].Id.Value);
            Assert.Equal(1, (int)p2.EntriesByIndex[3].DirectoryId.Value);
            Assert.Equal(1, (int)p2.EntriesByIndex[3].FirstChildId.Value);
            Assert.Equal(789, (int)p2.EntriesByIndex[3].ExtractedSize);
            Assert.Equal(24, (int)p2.EntriesByIndex[3].OffsetIntoPart3);
        }
Exemplo n.º 4
0
        /// <summary>
        /// Reads a version 2.0 header from an input stream.
        /// </summary>
        /// <param name="stream">The stream to read from.</param>
        /// <param name="offset">The offset to the header from the beginning of the stream.</param>
        /// <param name="part6Stream">The stream that contains part 6/7 data.</param>
        /// <param name="part6Offset">The offset to the start of part 6/7 data.</param>
        /// <param name="intro">The pre-parsed header intro.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header.</returns>
        internal async Task <Nefs20Header> Read20HeaderAsync(
            Stream stream,
            ulong offset,
            Stream part6Stream,
            ulong part6Offset,
            NefsHeaderIntro intro,
            NefsProgress p)
        {
            Nefs20HeaderIntroToc toc   = null;
            NefsHeaderPart1      part1 = null;
            NefsHeaderPart2      part2 = null;
            NefsHeaderPart3      part3 = null;
            Nefs20HeaderPart4    part4 = null;
            NefsHeaderPart5      part5 = null;
            Nefs20HeaderPart6    part6 = null;
            NefsHeaderPart7      part7 = null;
            NefsHeaderPart8      part8 = null;

            // Calc weight of each task (8 parts + table of contents)
            var weight = 1.0f / 10.0f;

            using (p.BeginTask(weight, "Reading header intro table of contents"))
            {
                toc = await this.Read20HeaderIntroTocAsync(stream, Nefs20HeaderIntroToc.Offset, p);
            }

            using (p.BeginTask(weight, "Reading header part 1"))
            {
                part1 = await this.ReadHeaderPart1Async(stream, toc.OffsetToPart1, toc.Part1Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 2"))
            {
                part2 = await this.ReadHeaderPart2Async(stream, toc.OffsetToPart2, toc.Part2Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 3"))
            {
                part3 = await this.ReadHeaderPart3Async(stream, toc.OffsetToPart3, toc.Part3Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 4"))
            {
                part4 = await this.Read20HeaderPart4Async(stream, toc.OffsetToPart4, toc.Part4Size, part1, p);
            }

            using (p.BeginTask(weight, "Reading header part 5"))
            {
                part5 = await this.ReadHeaderPart5Async(stream, toc.OffsetToPart5, NefsHeaderPart5.Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 6"))
            {
                part6 = await this.Read20HeaderPart6Async(part6Stream, (uint)part6Offset + toc.OffsetToPart6, part1, p);
            }

            using (p.BeginTask(weight, "Reading header part 7"))
            {
                var numEntries = (uint)part2.EntriesByIndex.Count;
                part7 = await this.ReadHeaderPart7Async(part6Stream, (uint)part6Offset + toc.OffsetToPart7, numEntries, p);
            }

            using (p.BeginTask(weight, "Reading header part 8"))
            {
                var part8Size = intro.HeaderSize - toc.OffsetToPart8;
                part8 = await this.ReadHeaderPart8Async(stream, toc.OffsetToPart8, part8Size, p);
            }

            // Validate header hash
            if (!this.ValidateHash(stream, offset, intro))
            {
                Log.LogWarning("Header hash does not match expected value.");
            }

            // The header stream must be disposed
            stream.Dispose();

            return(new Nefs20Header(intro, toc, part1, part2, part3, part4, part5, part6, part7, part8));
        }
Exemplo n.º 5
0
        public async Task WriteHeaderPart2Async_ValidData_Written()
        {
            var items = new NefsItemList(@"C:\hi.txt");
            var file1 = TestHelpers.CreateItem(0, 0, "file1", 10, 11, new List <UInt32> {
                12, 13
            }, NefsItemType.File);
            var file2 = TestHelpers.CreateItem(1, 1, "file2", 20, 21, new List <UInt32> {
                22, 23
            }, NefsItemType.File);
            var dir1 = TestHelpers.CreateItem(2, 2, "dir1", 0, 0, new List <UInt32> {
                0
            }, NefsItemType.Directory);
            var file3 = TestHelpers.CreateItem(3, 2, "file3", 30, 31, new List <UInt32> {
                32, 33
            }, NefsItemType.File);

            items.Add(file1);
            items.Add(file2);
            items.Add(dir1);
            items.Add(file3);

            var part3 = new NefsHeaderPart3(items);
            var part2 = new NefsHeaderPart2(items, part3);

            /*
             * Write
             */

            var writer = this.CreateWriter();

            byte[] buffer;
            var    offset = 5;

            using (var ms = new MemoryStream())
            {
                await writer.WriteHeaderPart2Async(ms, (uint)offset, part2, new NefsProgress());

                buffer = ms.ToArray();
            }

            /*
             * Verify
             */

            /*
             * dir1
             */

            // Dir id
            Assert.Equal(2, BitConverter.ToInt32(buffer, offset + 0));

            // First child id
            Assert.Equal(3, BitConverter.ToInt32(buffer, offset + 0x04));

            // Part 3 offset
            Assert.Equal(0, BitConverter.ToInt32(buffer, offset + 0x08));

            // Extracted size
            Assert.Equal(0, BitConverter.ToInt32(buffer, offset + 0x0c));

            // Item id
            Assert.Equal(2, BitConverter.ToInt32(buffer, offset + 0x10));

            /*
             * file3
             */

            offset += (int)NefsHeaderPart2Entry.Size;

            // Dir id
            Assert.Equal(2, BitConverter.ToInt32(buffer, offset + 0));

            // First child id
            Assert.Equal(3, BitConverter.ToInt32(buffer, offset + 0x04));

            // Part 3 offset
            Assert.Equal(17, BitConverter.ToInt32(buffer, offset + 0x08));

            // Extracted size
            Assert.Equal(31, BitConverter.ToInt32(buffer, offset + 0x0c));

            // Item id
            Assert.Equal(3, BitConverter.ToInt32(buffer, offset + 0x10));

            /*
             * file1
             */

            offset += (int)NefsHeaderPart2Entry.Size;

            // Dir id
            Assert.Equal(0, BitConverter.ToInt32(buffer, offset + 0));

            // First child id
            Assert.Equal(0, BitConverter.ToInt32(buffer, offset + 0x04));

            // Part 3 offset
            Assert.Equal(5, BitConverter.ToInt32(buffer, offset + 0x08));

            // Extracted size
            Assert.Equal(11, BitConverter.ToInt32(buffer, offset + 0x0c));

            // Item id
            Assert.Equal(0, BitConverter.ToInt32(buffer, offset + 0x10));

            /*
             * file2
             */

            offset += (int)NefsHeaderPart2Entry.Size;

            // Dir id
            Assert.Equal(1, BitConverter.ToInt32(buffer, offset + 0));

            // First child id
            Assert.Equal(1, BitConverter.ToInt32(buffer, offset + 0x04));

            // Part 3 offset
            Assert.Equal(11, BitConverter.ToInt32(buffer, offset + 0x08));

            // Extracted size
            Assert.Equal(21, BitConverter.ToInt32(buffer, offset + 0x0c));

            // Item id
            Assert.Equal(1, BitConverter.ToInt32(buffer, offset + 0x10));
        }
Exemplo n.º 6
0
        public void NefsHeaderPart2_MultipleItems_EntriesPopulated()
        {
            var items = new NefsItemList(@"C:\archive.nefs");

            var file1DataSource = new NefsItemListDataSource(items, 123, new NefsItemSize(456, new List <UInt32> {
                11, 12, 13
            }));
            var file1 = new NefsItem(new NefsItemId(0), "file1", new NefsItemId(0), NefsItemType.File, file1DataSource, TestHelpers.CreateUnknownData());

            items.Add(file1);

            var file2DataSource = new NefsItemListDataSource(items, 456, new NefsItemSize(789, new List <UInt32> {
                14, 15, 16
            }));
            var file2 = new NefsItem(new NefsItemId(1), "file2", new NefsItemId(1), NefsItemType.File, file2DataSource, TestHelpers.CreateUnknownData());

            items.Add(file2);

            var dir1DataSource = new NefsEmptyDataSource();
            var dir1           = new NefsItem(new NefsItemId(2), "dir1", new NefsItemId(2), NefsItemType.Directory, dir1DataSource, TestHelpers.CreateUnknownData());

            items.Add(dir1);

            var file3DataSource = new NefsItemListDataSource(items, 222, new NefsItemSize(333, new List <UInt32> {
                22, 23, 24
            }));
            var file3 = new NefsItem(new NefsItemId(3), "file3", dir1.Id, NefsItemType.File, file3DataSource, TestHelpers.CreateUnknownData());

            items.Add(file3);

            var p3 = new NefsHeaderPart3(items);
            var p2 = new NefsHeaderPart2(items, p3);

            Assert.Equal(4, p2.EntriesById.Count);

            // NOTE: Part 3 is the strings table. So offset into p3 must take into account null
            // terminated file/dir names. Also note strings table is alphabetized. Also note the
            // data file name is added to the strings table.

            /*
             * file1
             */

            Assert.Equal(0, (int)p2.EntriesById[file1.Id].Id.Value);
            Assert.Equal(0, (int)p2.EntriesById[file1.Id].DirectoryId.Value);
            Assert.Equal(0, (int)p2.EntriesById[file1.Id].FirstChildId.Value);
            Assert.Equal(456, (int)p2.EntriesById[file1.Id].ExtractedSize);
            Assert.Equal(18, (int)p2.EntriesById[file1.Id].OffsetIntoPart3);

            /*
             * file2
             */

            Assert.Equal(1, (int)p2.EntriesById[file2.Id].Id.Value);
            Assert.Equal(1, (int)p2.EntriesById[file2.Id].DirectoryId.Value);
            Assert.Equal(1, (int)p2.EntriesById[file2.Id].FirstChildId.Value);
            Assert.Equal(789, (int)p2.EntriesById[file2.Id].ExtractedSize);
            Assert.Equal(24, (int)p2.EntriesById[file2.Id].OffsetIntoPart3);

            /*
             * dir1
             */

            Assert.Equal(2, (int)p2.EntriesById[dir1.Id].Id.Value);
            Assert.Equal(2, (int)p2.EntriesById[dir1.Id].Data0x00_DirectoryId.Value);
            Assert.Equal(3, (int)p2.EntriesById[dir1.Id].Data0x04_FirstChildId.Value);
            Assert.Equal(0, (int)p2.EntriesById[dir1.Id].Data0x0c_ExtractedSize.Value);
            Assert.Equal(13, (int)p2.EntriesById[dir1.Id].Data0x08_OffsetIntoPart3.Value);

            /*
             * file3
             */

            Assert.Equal(3, (int)p2.EntriesById[file3.Id].Id.Value);
            Assert.Equal(2, (int)p2.EntriesById[file3.Id].Data0x00_DirectoryId.Value);
            Assert.Equal(3, (int)p2.EntriesById[file3.Id].Data0x04_FirstChildId.Value);
            Assert.Equal(333, (int)p2.EntriesById[file3.Id].Data0x0c_ExtractedSize.Value);
            Assert.Equal(30, (int)p2.EntriesById[file3.Id].Data0x08_OffsetIntoPart3.Value);
        }
Exemplo n.º 7
0
        /// <summary>
        /// Reads header part 6 from an input stream.
        /// </summary>
        /// <param name="stream">The stream to read from.</param>
        /// <param name="offset">The offset to the header part from the beginning of the stream.</param>
        /// <param name="size">The size of the header part.</param>
        /// <param name="part2">
        /// Header part 2. This is used to lookup item ids since part 6 metadata does not store item ids.
        /// </param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header part.</returns>
        internal async Task <NefsHeaderPart6> ReadHeaderPart6Async(Stream stream, uint offset, uint size, NefsHeaderPart2 part2, NefsProgress p)
        {
            var entries = new List <NefsHeaderPart6Entry>();
            var ids     = new HashSet <NefsItemId>();

            // Validate inputs
            if (!this.ValidateHeaderPartStream(stream, offset, size, "6"))
            {
                return(new NefsHeaderPart6(entries));
            }

            // Get entries in part 6
            var numEntries  = size / NefsHeaderPart6Entry.Size;
            var entryOffset = offset;

            for (var i = 0; i < numEntries; ++i)
            {
                using (p.BeginTask(1.0f / numEntries))
                {
                    // Make sure there is a corresponding index in part 2
                    if (i >= part2.EntriesByIndex.Count)
                    {
                        Log.LogError($"Could not find matching item entry for part 6 index {i} in part 2.");
                        continue;
                    }

                    // Check for duplicate item ids
                    var id = new NefsItemId(part2.EntriesByIndex[i].Id.Value);
                    if (ids.Contains(id))
                    {
                        Log.LogError($"Found duplicate item id in part 6: {id.Value}");
                        continue;
                    }

                    var entry = new NefsHeaderPart6Entry(id);
                    await FileData.ReadDataAsync(stream, entryOffset, entry, p);

                    ids.Add(id);
                    entries.Add(entry);
                    entryOffset += NefsHeaderPart6Entry.Size;
                }
            }

            return(new NefsHeaderPart6(entries));
        }
Exemplo n.º 8
0
        /// <summary>
        /// Reads header part 4 from an input stream.
        /// </summary>
        /// <param name="stream">The stream to read from.</param>
        /// <param name="offset">The offset to the header part from the beginning of the stream.</param>
        /// <param name="size">The size of the header part.</param>
        /// <param name="part1">Header part 1.</param>
        /// <param name="part2">Header part 2.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header part.</returns>
        internal async Task <NefsHeaderPart4> ReadHeaderPart4Async(
            Stream stream,
            uint offset,
            uint size,
            NefsHeaderPart1 part1,
            NefsHeaderPart2 part2,
            NefsProgress p)
        {
            var entries = new Dictionary <uint, NefsHeaderPart4Entry>();

            // Validate inputs
            if (!this.ValidateHeaderPartStream(stream, offset, size, "4"))
            {
                return(new NefsHeaderPart4(entries));
            }

            // Get the chunk sizes for each item in the archive
            var numItems = part1.EntriesById.Count;

            for (var i = 0; i < numItems; ++i)
            {
                using (p.BeginTask(1.0f / numItems))
                {
                    var id = new NefsItemId((uint)i);

                    // Part 1 entry
                    if (!part1.EntriesById.ContainsKey(id))
                    {
                        Log.LogError($"Failed to find part 1 entry for item {id} when reading part 4.");
                        continue;
                    }

                    var p1 = part1.EntriesById[id];

                    // Part 2 entry
                    if (!part2.EntriesById.ContainsKey(id))
                    {
                        Log.LogError($"Failed to find part 2 entry for item {id} when reading part 4.");
                        continue;
                    }

                    var p2 = part2.EntriesById[id];

                    // Create part 4 entry
                    var entry = new NefsHeaderPart4Entry(id);

                    // Check if item has part 4 entry
                    if (p1.IndexIntoPart4 == 0xFFFFFFFF)
                    {
                        // Item is most likely not compressed or has no data
                        continue;
                    }

                    if (p2.Data0x0c_ExtractedSize.Value == 0)
                    {
                        // Item is probably a directory
                        continue;
                    }

                    // Get number of chunks
                    var numChunks = (int)Math.Ceiling(p2.Data0x0c_ExtractedSize.Value / (double)NefsHeader.ChunkSize);
                    if (numChunks == 0)
                    {
                        Log.LogError($"Item {p1.Id} contains no compressed chunks but was expected to.");
                        continue;
                    }

                    // Seek stream to start of chunk sizes for this item
                    var itemOffset = offset + p1.OffsetIntoPart4;
                    if ((long)itemOffset + NefsHeaderPart4.DataSize > stream.Length)
                    {
                        Log.LogError($"Item {p1.Id} has part 4 entry that is outside the bounds of header part 4.");
                        continue;
                    }

                    // Seek stream
                    stream.Seek((long)itemOffset, SeekOrigin.Begin);

                    // Process the chunk sizes
                    for (var chunkIdx = 0; chunkIdx < numChunks; ++chunkIdx)
                    {
                        var bytes = new byte[NefsHeaderPart4.DataSize];
                        await stream.ReadAsync(bytes, 0, NefsHeaderPart4.DataSize);

                        entry.ChunkSizes.Add(BitConverter.ToUInt32(bytes, 0));
                    }

                    // Record entry
                    entries.Add(p1.IndexIntoPart4, entry);
                }
            }

            // Return part 4
            return(new NefsHeaderPart4(entries));
        }
Exemplo n.º 9
0
        /// <summary>
        /// Reads the header from an input stream.
        /// </summary>
        /// <param name="originalStream">The stream to read from.</param>
        /// <param name="offset">The offset to the header from the beginning of the stream.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>The loaded header.</returns>
        internal async Task <NefsHeader> ReadHeaderAsync(Stream originalStream, ulong offset, NefsProgress p)
        {
            Stream             stream;
            NefsHeaderIntro    intro = null;
            NefsHeaderIntroToc toc   = null;
            NefsHeaderPart1    part1 = null;
            NefsHeaderPart2    part2 = null;
            NefsHeaderPart3    part3 = null;
            NefsHeaderPart4    part4 = null;
            NefsHeaderPart5    part5 = null;
            NefsHeaderPart6    part6 = null;
            NefsHeaderPart7    part7 = null;
            NefsHeaderPart8    part8 = null;

            // Calc weight of each task (8 parts + intro + table of contents)
            var weight = 1.0f / 10.0f;

            using (p.BeginTask(weight, "Reading header intro"))
            {
                // Decrypt header if needed
                (intro, stream) = await this.ReadHeaderIntroAsync(originalStream, offset, p);
            }

            using (p.BeginTask(weight, "Reading header intro table of contents"))
            {
                toc = await this.ReadHeaderIntroTocAsync(stream, NefsHeaderIntroToc.Offset, p);
            }

            using (p.BeginTask(weight, "Reading header part 1"))
            {
                part1 = await this.ReadHeaderPart1Async(stream, toc.OffsetToPart1, toc.Part1Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 2"))
            {
                part2 = await this.ReadHeaderPart2Async(stream, toc.OffsetToPart2, toc.Part2Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 3"))
            {
                part3 = await this.ReadHeaderPart3Async(stream, toc.OffsetToPart3, toc.Part3Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 4"))
            {
                part4 = await this.ReadHeaderPart4Async(stream, toc.OffsetToPart4, toc.Part4Size, part1, part2, p);
            }

            using (p.BeginTask(weight, "Reading header part 5"))
            {
                part5 = await this.ReadHeaderPart5Async(stream, toc.OffsetToPart5, toc.Part5Size, p);
            }

            using (p.BeginTask(weight, "Reading header part 6"))
            {
                if (toc.OffsetToPart6 == 0)
                {
                    // game.dat files don't have part 6
                    Log.LogDebug("Archive does not have header part 6.");
                    part6 = new NefsHeaderPart6(new List <NefsHeaderPart6Entry>());
                }
                else
                {
                    part6 = await this.ReadHeaderPart6Async(stream, toc.OffsetToPart6, toc.Part6Size, part2, p);
                }
            }

            using (p.BeginTask(weight, "Reading header part 7"))
            {
                if (toc.OffsetToPart6 == 0)
                {
                    // game.dat files don't have part 7. Still checking if part 6 offset is 0. For
                    // some reason, the part 7 offset still has a value, but doesn't appear to be a
                    // correct one, so skipping part 7 as well
                    Log.LogDebug("Archive does not have header part 7.");
                    part7 = new NefsHeaderPart7(new List <NefsHeaderPart7Entry>());
                }
                else
                {
                    part7 = await this.ReadHeaderPart7Async(stream, toc.OffsetToPart7, toc.Part7Size, p);
                }
            }

            using (p.BeginTask(weight, "Reading header part 8"))
            {
                var part8Size = intro.HeaderSize - toc.OffsetToPart8;
                part8 = await this.ReadHeaderPart8Async(stream, toc.OffsetToPart8, part8Size, p);
            }

            // Validate header hash
            if (!this.ValidateHash(stream, offset, intro))
            {
                Log.LogWarning("Header hash does not match expected value.");
            }

            // The header stream must be disposed
            stream.Dispose();

            return(new NefsHeader(intro, toc, part1, part2, part3, part4, part5, part6, part7, part8));
        }
Exemplo n.º 10
0
        public async void ReadHeaderPart4Async_ValidData_DataRead()
        {
            // Item 1 has 2 chunk sizes
            var e1p1 = new NefsHeaderPart1Entry();

            e1p1.Data0x10_Id.Value             = 0;
            e1p1.Data0x0c_IndexIntoPart4.Value = 0;
            var e1p2 = new NefsHeaderPart2Entry();

            e1p1.Data0x10_Id.Value            = e1p1.Id.Value;
            e1p2.Data0x0c_ExtractedSize.Value = NefsHeader.ChunkSize * 2;

            // Item 2 has 1 chunk size
            var e2p1 = new NefsHeaderPart1Entry();

            e2p1.Data0x10_Id.Value             = 1;
            e2p1.Data0x0c_IndexIntoPart4.Value = 2;
            var e2p2 = new NefsHeaderPart2Entry();

            e2p2.Data0x10_Id.Value            = e2p1.Id.Value;
            e2p2.Data0x0c_ExtractedSize.Value = NefsHeader.ChunkSize;

            // Item 3 has no chunks
            var e3p1 = new NefsHeaderPart1Entry();

            e3p1.Data0x10_Id.Value             = 2;
            e3p1.Data0x0c_IndexIntoPart4.Value = 0xFFFFFFFF;
            var e3p2 = new NefsHeaderPart2Entry();

            e3p2.Data0x10_Id.Value            = e3p1.Id.Value;
            e3p2.Data0x0c_ExtractedSize.Value = NefsHeader.ChunkSize;

            // Item 4 is a directory (extracted size == 0)
            var e4p1 = new NefsHeaderPart1Entry();

            e4p1.Data0x10_Id.Value             = 3;
            e4p1.Data0x0c_IndexIntoPart4.Value = 0;
            var e4p2 = new NefsHeaderPart2Entry();

            e4p2.Data0x10_Id.Value            = e4p1.Id.Value;
            e4p2.Data0x0c_ExtractedSize.Value = 0;

            // Item 5 has 3 chunks
            var e5p1 = new NefsHeaderPart1Entry();

            e5p1.Data0x10_Id.Value             = 4;
            e5p1.Data0x0c_IndexIntoPart4.Value = 3;
            var e5p2 = new NefsHeaderPart2Entry();

            e5p2.Data0x10_Id.Value            = e5p1.Id.Value;
            e5p2.Data0x0c_ExtractedSize.Value = (NefsHeader.ChunkSize * 2) + 5;

            var part1Items = new List <NefsHeaderPart1Entry>
            {
                e1p1,
                e2p1,
                e3p1,
                e4p1,
                e5p1,
            };

            var part2Items = new List <NefsHeaderPart2Entry>
            {
                e1p2,
                e2p2,
                e3p2,
                e4p2,
                e5p2,
            };

            var part1 = new NefsHeaderPart1(part1Items);
            var part2 = new NefsHeaderPart2(part2Items);

            // Setup data
            byte[] bytes =
            {
                // Offset
                0xFF, 0xFF,

                // Item 1
                0x11, 0x12, 0x13, 0x14,
                0x15, 0x16, 0x17, 0x18,

                // Item 2
                0x21, 0x22, 0x23, 0x24,

                // Item 5
                0x31, 0x32, 0x33, 0x34,
                0x35, 0x36, 0x37, 0x38,
                0x39, 0x3A, 0x3B, 0x3C,

                // Last four bytes
                0x01, 0x02, 0x03, 0x04,
            };

            var stream = new MemoryStream(bytes);
            var reader = new NefsReader(this.fileSystem);
            var size   = (uint)28;
            var offset = (uint)2;

            // Test
            var part4 = await reader.ReadHeaderPart4Async(stream, offset, size, part1, part2, this.p);

            // Verify
            Assert.Equal(3, part4.EntriesByIndex.Count);

            Assert.Equal((uint)0x14131211, part4.EntriesByIndex[0].ChunkSizes[0]);
            Assert.Equal((uint)0x18171615, part4.EntriesByIndex[0].ChunkSizes[1]);

            Assert.Equal((uint)0x24232221, part4.EntriesByIndex[2].ChunkSizes[0]);

            Assert.Equal((uint)0x34333231, part4.EntriesByIndex[3].ChunkSizes[0]);
            Assert.Equal((uint)0x38373635, part4.EntriesByIndex[3].ChunkSizes[1]);
            Assert.Equal((uint)0x3C3B3A39, part4.EntriesByIndex[3].ChunkSizes[2]);
        }
Exemplo n.º 11
0
        /// <summary>
        /// Writes an archive to the specified stream. A new archive obejct is returned that
        /// contains the updated header and item metadata.
        /// </summary>
        /// <param name="stream">The stream to write to.</param>
        /// <param name="sourceHeader">Donor header information.</param>
        /// <param name="sourceItems">List of items to write. This list is not modified directly.</param>
        /// <param name="workDir">Temp working directory path.</param>
        /// <param name="p">Progress info.</param>
        /// <returns>A new NefsArchive object containing the updated header and item metadata.</returns>
        private async Task <NefsArchive> WriteArchiveAsync(
            Stream stream,
            Nefs20Header sourceHeader,
            NefsItemList sourceItems,
            string workDir,
            NefsProgress p)
        {
            // Setup task weights
            var taskWeightPrepareItems = 0.45f;
            var taskWeightWriteItems   = 0.45f;
            var taskWeightHeader       = 0.1f;

            // Prepare items for writing
            NefsItemList items;

            using (var t = p.BeginTask(taskWeightPrepareItems, "Preparing items"))
            {
                items = await this.PrepareItemsAsync(sourceItems, workDir, p);
            }

            // Determine number of items
            var numItems = items.Count;

            // Update header parts 3 and 4 first (need to know their sizes)
            var p4 = new Nefs20HeaderPart4(items);
            var p3 = new NefsHeaderPart3(items);

            // Compute header size
            var introSize  = NefsHeaderIntro.Size;
            var tocSize    = Nefs20HeaderIntroToc.Size;
            var p1Size     = numItems * NefsHeaderPart1Entry.Size; // TODO : What about duplicates?
            var p2Size     = numItems * NefsHeaderPart2Entry.Size; // TODO : What about duplicates?
            var p3Size     = p3.Size;
            var p4Size     = p4.Size;
            var p5Size     = NefsHeaderPart5.Size;
            var p6Size     = numItems * Nefs20HeaderPart6Entry.Size;
            var p7Size     = numItems * NefsHeaderPart7Entry.Size;
            var p8Size     = sourceHeader.Intro.HeaderSize - sourceHeader.TableOfContents.OffsetToPart8;
            var headerSize = introSize + tocSize + p1Size + p2Size + p3Size + p4Size + p5Size + p6Size + p7Size + p8Size;

            // Determine first data offset. There are two known offset values. If the header is
            // large enough, the second (larger) offset is used.
            var firstDataOffset = Nefs20Header.DataOffsetDefault;

            if (headerSize > firstDataOffset)
            {
                firstDataOffset = Nefs20Header.DataOffsetLarge;
            }

            // Write item data
            UInt64 archiveSize;

            using (var t = p.BeginTask(taskWeightWriteItems, "Writing items"))
            {
                archiveSize = await this.WriteItemsAsync(stream, items, firstDataOffset, p);
            }

            // Update remaining header data
            var p1 = new NefsHeaderPart1(items, p4);
            var p2 = new NefsHeaderPart2(items, p3);
            var p6 = new Nefs20HeaderPart6(items);
            var p7 = new NefsHeaderPart7(items);

            // Compute total archive size
            var p5 = new NefsHeaderPart5();

            p5.Data0x00_ArchiveSize.Value             = archiveSize;
            p5.Data0x08_ArchiveNameStringOffset.Value = p3.OffsetsByFileName[items.DataFileName];
            p5.Data0x0C_FirstDataOffset.Value         = sourceHeader.Part5.FirstDataOffset;

            // Update header intro
            var intro = new NefsHeaderIntro();

            intro.Data0x00_MagicNumber.Value     = sourceHeader.Intro.MagicNumber;
            intro.Data0x24_AesKeyHexString.Value = sourceHeader.Intro.AesKeyHexString;
            intro.Data0x64_HeaderSize.Value      = (uint)headerSize;
            intro.Data0x68_NefsVersion.Value     = sourceHeader.Intro.NefsVersion;
            intro.Data0x6c_NumberOfItems.Value   = (uint)numItems;
            intro.Data0x70_UnknownZlib.Value     = sourceHeader.Intro.Unknown0x70zlib;
            intro.Data0x78_Unknown.Value         = sourceHeader.Intro.Unknown0x78;

            var toc = new Nefs20HeaderIntroToc();

            toc.Data0x00_NumVolumes.Value    = sourceHeader.TableOfContents.NumVolumes;
            toc.Data0x02_HashBlockSize.Value = sourceHeader.TableOfContents.Data0x02_HashBlockSize.Value;
            toc.Data0x04_OffsetToPart1.Value = introSize + tocSize;
            toc.Data0x0c_OffsetToPart2.Value = toc.OffsetToPart1 + (uint)p1Size;
            toc.Data0x14_OffsetToPart3.Value = toc.OffsetToPart2 + (uint)p2Size;
            toc.Data0x18_OffsetToPart4.Value = toc.OffsetToPart3 + (uint)p3Size;
            toc.Data0x1c_OffsetToPart5.Value = toc.OffsetToPart4 + (uint)p4Size;
            toc.Data0x08_OffsetToPart6.Value = toc.OffsetToPart5 + (uint)p5Size;
            toc.Data0x10_OffsetToPart7.Value = toc.OffsetToPart6 + (uint)p6Size;
            toc.Data0x20_OffsetToPart8.Value = toc.OffsetToPart7 + (uint)p7Size;
            toc.Data0x24_Unknown.Value       = sourceHeader.TableOfContents.Unknown0x24;

            // Part 8 - not writing anything for now
            var p8 = new NefsHeaderPart8(p8Size);

            // Create new header object
            var header = new Nefs20Header(intro, toc, p1, p2, p3, p4, p5, p6, p7, p8);

            // Write the header
            using (var t = p.BeginTask(taskWeightHeader, "Writing header"))
            {
                await this.WriteHeaderAsync(stream, 0, header, p);
            }

            // Update hash
            await this.UpdateHashAsync(stream, 0, header, p);

            // Create new archive object
            return(new NefsArchive(header, items));
        }