public void NefsHeaderPart2_NoItems_EntriesEmpty() { var items = new NefsItemList(@"C:\archive.nefs"); var p3 = new NefsHeaderPart3(items); var p2 = new NefsHeaderPart2(items, p3); Assert.Empty(p2.EntriesByIndex); }
public void NefsHeaderPart3_NoItems_EntriesEmpty() { var items = new NefsItemList(@"C:\archive.nefs"); var p3 = new NefsHeaderPart3(items); Assert.Single(p3.OffsetsByFileName); Assert.Single(p3.FileNamesByOffset); Assert.Equal(13, (int)p3.Size); }
public async Task WriteHeaderPart3Async_ValidData_Written() { var items = new NefsItemList(@"C:\hi.txt"); var file1 = TestHelpers.CreateItem(0, 0, "file1", 10, 11, new List <UInt32> { 12, 13 }, NefsItemType.File); var file2 = TestHelpers.CreateItem(1, 1, "file2", 20, 21, new List <UInt32> { 22, 23 }, NefsItemType.File); var dir1 = TestHelpers.CreateItem(2, 2, "dir1", 0, 0, new List <UInt32> { 0 }, NefsItemType.Directory); var file3 = TestHelpers.CreateItem(3, 2, "file3", 30, 31, new List <UInt32> { 32, 33 }, NefsItemType.File); items.Add(file1); items.Add(file2); items.Add(dir1); items.Add(file3); var part3 = new NefsHeaderPart3(items); /* * Write */ var writer = this.CreateWriter(); byte[] buffer; var offset = 5; using (var ms = new MemoryStream()) { await writer.WriteHeaderPart3Async(ms, (uint)offset, part3, new NefsProgress()); buffer = ms.ToArray(); } /* * Verify */ // Null terminated strings Assert.Equal("dir1", Encoding.ASCII.GetString(buffer, offset + 0, 4)); Assert.Equal(0, buffer[offset + 4]); Assert.Equal("file1", Encoding.ASCII.GetString(buffer, offset + 5, 5)); Assert.Equal(0, buffer[offset + 10]); Assert.Equal("file2", Encoding.ASCII.GetString(buffer, offset + 11, 5)); Assert.Equal(0, buffer[offset + 16]); Assert.Equal("file3", Encoding.ASCII.GetString(buffer, offset + 17, 5)); Assert.Equal(0, buffer[offset + 22]); }
public void NefsHeaderPart3_MultipleItems_EntriesPopulated() { var items = new NefsItemList(@"C:\archive.nefs"); var file1Chunks = NefsDataChunk.CreateChunkList(new List <UInt32> { 11, 12, 13 }, TestHelpers.TestTransform); var file1DataSource = new NefsItemListDataSource(items, 123, new NefsItemSize(456, file1Chunks)); var file1 = TestHelpers.CreateFile(0, 0, "file1", file1DataSource); items.Add(file1); var file2Chunks = NefsDataChunk.CreateChunkList(new List <UInt32> { 14, 15, 16 }, TestHelpers.TestTransform); var file2DataSource = new NefsItemListDataSource(items, 456, new NefsItemSize(789, file2Chunks)); var file2 = TestHelpers.CreateFile(1, 1, "file2", file2DataSource); items.Add(file2); var dir1 = TestHelpers.CreateDirectory(2, 2, "dir1"); items.Add(dir1); var file3Chunks = NefsDataChunk.CreateChunkList(new List <UInt32> { 22, 23, 24 }, TestHelpers.TestTransform); var file3DataSource = new NefsItemListDataSource(items, 222, new NefsItemSize(333, file3Chunks)); var file3 = TestHelpers.CreateFile(3, dir1.Id.Value, "file3", file3DataSource); items.Add(file3); var p3 = new NefsHeaderPart3(items); Assert.Equal(5, p3.OffsetsByFileName.Count); Assert.Equal(5, p3.FileNamesByOffset.Count); // Four file names plus a null terminal for each. Assert.Equal(36, (int)p3.Size); // Strings table is sorted alphabetically - and also contains data file name Assert.Equal("archive.nefs", p3.FileNamesByOffset[0]); Assert.Equal("dir1", p3.FileNamesByOffset[13]); Assert.Equal("file1", p3.FileNamesByOffset[18]); Assert.Equal("file2", p3.FileNamesByOffset[24]); Assert.Equal("file3", p3.FileNamesByOffset[30]); Assert.Equal(18, (int)p3.OffsetsByFileName[file1.FileName]); Assert.Equal(24, (int)p3.OffsetsByFileName[file2.FileName]); Assert.Equal(13, (int)p3.OffsetsByFileName[dir1.FileName]); Assert.Equal(30, (int)p3.OffsetsByFileName[file3.FileName]); }
/// <summary> /// Writes the header part to an output stream. /// </summary> /// <param name="stream">The stream to write to.</param> /// <param name="offset">The absolute offset in the stream to write at.</param> /// <param name="part3">The data to write.</param> /// <param name="p">Progress info.</param> /// <returns>An async task.</returns> internal async Task WriteHeaderPart3Async(Stream stream, UInt64 offset, NefsHeaderPart3 part3, NefsProgress p) { stream.Seek((long)offset, SeekOrigin.Begin); foreach (var entry in part3.FileNames) { var fileNameBytes = Encoding.ASCII.GetBytes(entry); await stream.WriteAsync(fileNameBytes, 0, fileNameBytes.Length, p.CancellationToken); // Write null terminator await stream.WriteAsync(new byte[] { 0 }, 0, 1, p.CancellationToken); } }
public void NefsHeaderPart3_MultipleItems_EntriesPopulated() { var items = new NefsItemList(@"C:\archive.nefs"); var file1DataSource = new NefsItemListDataSource(items, 123, new NefsItemSize(456, new List <UInt32> { 11, 12, 13 })); var file1 = new NefsItem(new NefsItemId(0), "file1", new NefsItemId(0), NefsItemType.File, file1DataSource, TestHelpers.CreateUnknownData()); items.Add(file1); var file2DataSource = new NefsItemListDataSource(items, 456, new NefsItemSize(789, new List <UInt32> { 14, 15, 16 })); var file2 = new NefsItem(new NefsItemId(1), "file2", new NefsItemId(1), NefsItemType.File, file2DataSource, TestHelpers.CreateUnknownData()); items.Add(file2); var dir1DataSource = new NefsEmptyDataSource(); var dir1 = new NefsItem(new NefsItemId(2), "dir1", new NefsItemId(2), NefsItemType.Directory, dir1DataSource, TestHelpers.CreateUnknownData()); items.Add(dir1); var file3DataSource = new NefsItemListDataSource(items, 222, new NefsItemSize(333, new List <UInt32> { 22, 23, 24 })); var file3 = new NefsItem(new NefsItemId(3), "file3", dir1.Id, NefsItemType.File, file3DataSource, TestHelpers.CreateUnknownData()); items.Add(file3); var p3 = new NefsHeaderPart3(items); Assert.Equal(5, p3.OffsetsByFileName.Count); Assert.Equal(5, p3.FileNamesByOffset.Count); // Four file names plus a null terminal for each. Assert.Equal(36, (int)p3.Size); // Strings table is sorted alphabetically - and also contains data file name Assert.Equal("archive.nefs", p3.FileNamesByOffset[0]); Assert.Equal("dir1", p3.FileNamesByOffset[13]); Assert.Equal("file1", p3.FileNamesByOffset[18]); Assert.Equal("file2", p3.FileNamesByOffset[24]); Assert.Equal("file3", p3.FileNamesByOffset[30]); Assert.Equal(18, (int)p3.OffsetsByFileName[file1.FileName]); Assert.Equal(24, (int)p3.OffsetsByFileName[file2.FileName]); Assert.Equal(13, (int)p3.OffsetsByFileName[dir1.FileName]); Assert.Equal(30, (int)p3.OffsetsByFileName[file3.FileName]); }
public void NefsHeaderPart2_MultipleItems_EntriesPopulated() { var items = new NefsItemList(@"C:\archive.nefs"); var file1Chunks = NefsDataChunk.CreateChunkList(new List <UInt32> { 11, 12, 13 }, TestHelpers.TestTransform); var file1DataSource = new NefsItemListDataSource(items, 123, new NefsItemSize(456, file1Chunks)); var file1 = TestHelpers.CreateFile(0, 0, "file1", file1DataSource); items.Add(file1); var file2Chunks = NefsDataChunk.CreateChunkList(new List <UInt32> { 14, 15, 16 }, TestHelpers.TestTransform); var file2DataSource = new NefsItemListDataSource(items, 456, new NefsItemSize(789, file2Chunks)); var file2 = TestHelpers.CreateFile(1, 1, "file2", file2DataSource); items.Add(file2); var dir1 = TestHelpers.CreateDirectory(2, 2, "dir1"); items.Add(dir1); var file3Chunks = NefsDataChunk.CreateChunkList(new List <UInt32> { 22, 23, 24 }, TestHelpers.TestTransform); var file3DataSource = new NefsItemListDataSource(items, 222, new NefsItemSize(333, file3Chunks)); var file3 = TestHelpers.CreateFile(3, dir1.Id.Value, "file3", file3DataSource); items.Add(file3); var p3 = new NefsHeaderPart3(items); var p2 = new NefsHeaderPart2(items, p3); Assert.Equal(4, p2.EntriesByIndex.Count); // NOTE: Part 3 is the strings table. So offset into p3 must take into account null // terminated file/dir names. Also note strings table is alphabetized. Also note the // data file name is added to the strings table. // NOTE: The order of part 2 is generated by depth first traversal of the file tree, // with items sorted by filename. /* * dir1 */ Assert.Equal(2, (int)p2.EntriesByIndex[0].Id.Value); Assert.Equal(2, (int)p2.EntriesByIndex[0].Data0x00_DirectoryId.Value); Assert.Equal(3, (int)p2.EntriesByIndex[0].Data0x04_FirstChildId.Value); Assert.Equal(0, (int)p2.EntriesByIndex[0].Data0x0c_ExtractedSize.Value); Assert.Equal(13, (int)p2.EntriesByIndex[0].Data0x08_OffsetIntoPart3.Value); /* * file3 */ Assert.Equal(3, (int)p2.EntriesByIndex[1].Id.Value); Assert.Equal(2, (int)p2.EntriesByIndex[1].Data0x00_DirectoryId.Value); Assert.Equal(3, (int)p2.EntriesByIndex[1].Data0x04_FirstChildId.Value); Assert.Equal(333, (int)p2.EntriesByIndex[1].Data0x0c_ExtractedSize.Value); Assert.Equal(30, (int)p2.EntriesByIndex[1].Data0x08_OffsetIntoPart3.Value); /* * file1 */ Assert.Equal(0, (int)p2.EntriesByIndex[2].Id.Value); Assert.Equal(0, (int)p2.EntriesByIndex[2].DirectoryId.Value); Assert.Equal(0, (int)p2.EntriesByIndex[2].FirstChildId.Value); Assert.Equal(456, (int)p2.EntriesByIndex[2].ExtractedSize); Assert.Equal(18, (int)p2.EntriesByIndex[2].OffsetIntoPart3); /* * file2 */ Assert.Equal(1, (int)p2.EntriesByIndex[3].Id.Value); Assert.Equal(1, (int)p2.EntriesByIndex[3].DirectoryId.Value); Assert.Equal(1, (int)p2.EntriesByIndex[3].FirstChildId.Value); Assert.Equal(789, (int)p2.EntriesByIndex[3].ExtractedSize); Assert.Equal(24, (int)p2.EntriesByIndex[3].OffsetIntoPart3); }
/// <summary> /// Reads a version 2.0 header from an input stream. /// </summary> /// <param name="stream">The stream to read from.</param> /// <param name="offset">The offset to the header from the beginning of the stream.</param> /// <param name="part6Stream">The stream that contains part 6/7 data.</param> /// <param name="part6Offset">The offset to the start of part 6/7 data.</param> /// <param name="intro">The pre-parsed header intro.</param> /// <param name="p">Progress info.</param> /// <returns>The loaded header.</returns> internal async Task <Nefs20Header> Read20HeaderAsync( Stream stream, ulong offset, Stream part6Stream, ulong part6Offset, NefsHeaderIntro intro, NefsProgress p) { Nefs20HeaderIntroToc toc = null; NefsHeaderPart1 part1 = null; NefsHeaderPart2 part2 = null; NefsHeaderPart3 part3 = null; Nefs20HeaderPart4 part4 = null; NefsHeaderPart5 part5 = null; Nefs20HeaderPart6 part6 = null; NefsHeaderPart7 part7 = null; NefsHeaderPart8 part8 = null; // Calc weight of each task (8 parts + table of contents) var weight = 1.0f / 10.0f; using (p.BeginTask(weight, "Reading header intro table of contents")) { toc = await this.Read20HeaderIntroTocAsync(stream, Nefs20HeaderIntroToc.Offset, p); } using (p.BeginTask(weight, "Reading header part 1")) { part1 = await this.ReadHeaderPart1Async(stream, toc.OffsetToPart1, toc.Part1Size, p); } using (p.BeginTask(weight, "Reading header part 2")) { part2 = await this.ReadHeaderPart2Async(stream, toc.OffsetToPart2, toc.Part2Size, p); } using (p.BeginTask(weight, "Reading header part 3")) { part3 = await this.ReadHeaderPart3Async(stream, toc.OffsetToPart3, toc.Part3Size, p); } using (p.BeginTask(weight, "Reading header part 4")) { part4 = await this.Read20HeaderPart4Async(stream, toc.OffsetToPart4, toc.Part4Size, part1, p); } using (p.BeginTask(weight, "Reading header part 5")) { part5 = await this.ReadHeaderPart5Async(stream, toc.OffsetToPart5, NefsHeaderPart5.Size, p); } using (p.BeginTask(weight, "Reading header part 6")) { part6 = await this.Read20HeaderPart6Async(part6Stream, (uint)part6Offset + toc.OffsetToPart6, part1, p); } using (p.BeginTask(weight, "Reading header part 7")) { var numEntries = (uint)part2.EntriesByIndex.Count; part7 = await this.ReadHeaderPart7Async(part6Stream, (uint)part6Offset + toc.OffsetToPart7, numEntries, p); } using (p.BeginTask(weight, "Reading header part 8")) { var part8Size = intro.HeaderSize - toc.OffsetToPart8; part8 = await this.ReadHeaderPart8Async(stream, toc.OffsetToPart8, part8Size, p); } // Validate header hash if (!this.ValidateHash(stream, offset, intro)) { Log.LogWarning("Header hash does not match expected value."); } // The header stream must be disposed stream.Dispose(); return(new Nefs20Header(intro, toc, part1, part2, part3, part4, part5, part6, part7, part8)); }
public async Task WriteHeaderPart2Async_ValidData_Written() { var items = new NefsItemList(@"C:\hi.txt"); var file1 = TestHelpers.CreateItem(0, 0, "file1", 10, 11, new List <UInt32> { 12, 13 }, NefsItemType.File); var file2 = TestHelpers.CreateItem(1, 1, "file2", 20, 21, new List <UInt32> { 22, 23 }, NefsItemType.File); var dir1 = TestHelpers.CreateItem(2, 2, "dir1", 0, 0, new List <UInt32> { 0 }, NefsItemType.Directory); var file3 = TestHelpers.CreateItem(3, 2, "file3", 30, 31, new List <UInt32> { 32, 33 }, NefsItemType.File); items.Add(file1); items.Add(file2); items.Add(dir1); items.Add(file3); var part3 = new NefsHeaderPart3(items); var part2 = new NefsHeaderPart2(items, part3); /* * Write */ var writer = this.CreateWriter(); byte[] buffer; var offset = 5; using (var ms = new MemoryStream()) { await writer.WriteHeaderPart2Async(ms, (uint)offset, part2, new NefsProgress()); buffer = ms.ToArray(); } /* * Verify */ /* * dir1 */ // Dir id Assert.Equal(2, BitConverter.ToInt32(buffer, offset + 0)); // First child id Assert.Equal(3, BitConverter.ToInt32(buffer, offset + 0x04)); // Part 3 offset Assert.Equal(0, BitConverter.ToInt32(buffer, offset + 0x08)); // Extracted size Assert.Equal(0, BitConverter.ToInt32(buffer, offset + 0x0c)); // Item id Assert.Equal(2, BitConverter.ToInt32(buffer, offset + 0x10)); /* * file3 */ offset += (int)NefsHeaderPart2Entry.Size; // Dir id Assert.Equal(2, BitConverter.ToInt32(buffer, offset + 0)); // First child id Assert.Equal(3, BitConverter.ToInt32(buffer, offset + 0x04)); // Part 3 offset Assert.Equal(17, BitConverter.ToInt32(buffer, offset + 0x08)); // Extracted size Assert.Equal(31, BitConverter.ToInt32(buffer, offset + 0x0c)); // Item id Assert.Equal(3, BitConverter.ToInt32(buffer, offset + 0x10)); /* * file1 */ offset += (int)NefsHeaderPart2Entry.Size; // Dir id Assert.Equal(0, BitConverter.ToInt32(buffer, offset + 0)); // First child id Assert.Equal(0, BitConverter.ToInt32(buffer, offset + 0x04)); // Part 3 offset Assert.Equal(5, BitConverter.ToInt32(buffer, offset + 0x08)); // Extracted size Assert.Equal(11, BitConverter.ToInt32(buffer, offset + 0x0c)); // Item id Assert.Equal(0, BitConverter.ToInt32(buffer, offset + 0x10)); /* * file2 */ offset += (int)NefsHeaderPart2Entry.Size; // Dir id Assert.Equal(1, BitConverter.ToInt32(buffer, offset + 0)); // First child id Assert.Equal(1, BitConverter.ToInt32(buffer, offset + 0x04)); // Part 3 offset Assert.Equal(11, BitConverter.ToInt32(buffer, offset + 0x08)); // Extracted size Assert.Equal(21, BitConverter.ToInt32(buffer, offset + 0x0c)); // Item id Assert.Equal(1, BitConverter.ToInt32(buffer, offset + 0x10)); }
public void NefsHeaderPart2_MultipleItems_EntriesPopulated() { var items = new NefsItemList(@"C:\archive.nefs"); var file1DataSource = new NefsItemListDataSource(items, 123, new NefsItemSize(456, new List <UInt32> { 11, 12, 13 })); var file1 = new NefsItem(new NefsItemId(0), "file1", new NefsItemId(0), NefsItemType.File, file1DataSource, TestHelpers.CreateUnknownData()); items.Add(file1); var file2DataSource = new NefsItemListDataSource(items, 456, new NefsItemSize(789, new List <UInt32> { 14, 15, 16 })); var file2 = new NefsItem(new NefsItemId(1), "file2", new NefsItemId(1), NefsItemType.File, file2DataSource, TestHelpers.CreateUnknownData()); items.Add(file2); var dir1DataSource = new NefsEmptyDataSource(); var dir1 = new NefsItem(new NefsItemId(2), "dir1", new NefsItemId(2), NefsItemType.Directory, dir1DataSource, TestHelpers.CreateUnknownData()); items.Add(dir1); var file3DataSource = new NefsItemListDataSource(items, 222, new NefsItemSize(333, new List <UInt32> { 22, 23, 24 })); var file3 = new NefsItem(new NefsItemId(3), "file3", dir1.Id, NefsItemType.File, file3DataSource, TestHelpers.CreateUnknownData()); items.Add(file3); var p3 = new NefsHeaderPart3(items); var p2 = new NefsHeaderPart2(items, p3); Assert.Equal(4, p2.EntriesById.Count); // NOTE: Part 3 is the strings table. So offset into p3 must take into account null // terminated file/dir names. Also note strings table is alphabetized. Also note the // data file name is added to the strings table. /* * file1 */ Assert.Equal(0, (int)p2.EntriesById[file1.Id].Id.Value); Assert.Equal(0, (int)p2.EntriesById[file1.Id].DirectoryId.Value); Assert.Equal(0, (int)p2.EntriesById[file1.Id].FirstChildId.Value); Assert.Equal(456, (int)p2.EntriesById[file1.Id].ExtractedSize); Assert.Equal(18, (int)p2.EntriesById[file1.Id].OffsetIntoPart3); /* * file2 */ Assert.Equal(1, (int)p2.EntriesById[file2.Id].Id.Value); Assert.Equal(1, (int)p2.EntriesById[file2.Id].DirectoryId.Value); Assert.Equal(1, (int)p2.EntriesById[file2.Id].FirstChildId.Value); Assert.Equal(789, (int)p2.EntriesById[file2.Id].ExtractedSize); Assert.Equal(24, (int)p2.EntriesById[file2.Id].OffsetIntoPart3); /* * dir1 */ Assert.Equal(2, (int)p2.EntriesById[dir1.Id].Id.Value); Assert.Equal(2, (int)p2.EntriesById[dir1.Id].Data0x00_DirectoryId.Value); Assert.Equal(3, (int)p2.EntriesById[dir1.Id].Data0x04_FirstChildId.Value); Assert.Equal(0, (int)p2.EntriesById[dir1.Id].Data0x0c_ExtractedSize.Value); Assert.Equal(13, (int)p2.EntriesById[dir1.Id].Data0x08_OffsetIntoPart3.Value); /* * file3 */ Assert.Equal(3, (int)p2.EntriesById[file3.Id].Id.Value); Assert.Equal(2, (int)p2.EntriesById[file3.Id].Data0x00_DirectoryId.Value); Assert.Equal(3, (int)p2.EntriesById[file3.Id].Data0x04_FirstChildId.Value); Assert.Equal(333, (int)p2.EntriesById[file3.Id].Data0x0c_ExtractedSize.Value); Assert.Equal(30, (int)p2.EntriesById[file3.Id].Data0x08_OffsetIntoPart3.Value); }
/// <summary> /// Reads the header from an input stream. /// </summary> /// <param name="originalStream">The stream to read from.</param> /// <param name="offset">The offset to the header from the beginning of the stream.</param> /// <param name="p">Progress info.</param> /// <returns>The loaded header.</returns> internal async Task <NefsHeader> ReadHeaderAsync(Stream originalStream, ulong offset, NefsProgress p) { Stream stream; NefsHeaderIntro intro = null; NefsHeaderIntroToc toc = null; NefsHeaderPart1 part1 = null; NefsHeaderPart2 part2 = null; NefsHeaderPart3 part3 = null; NefsHeaderPart4 part4 = null; NefsHeaderPart5 part5 = null; NefsHeaderPart6 part6 = null; NefsHeaderPart7 part7 = null; NefsHeaderPart8 part8 = null; // Calc weight of each task (8 parts + intro + table of contents) var weight = 1.0f / 10.0f; using (p.BeginTask(weight, "Reading header intro")) { // Decrypt header if needed (intro, stream) = await this.ReadHeaderIntroAsync(originalStream, offset, p); } using (p.BeginTask(weight, "Reading header intro table of contents")) { toc = await this.ReadHeaderIntroTocAsync(stream, NefsHeaderIntroToc.Offset, p); } using (p.BeginTask(weight, "Reading header part 1")) { part1 = await this.ReadHeaderPart1Async(stream, toc.OffsetToPart1, toc.Part1Size, p); } using (p.BeginTask(weight, "Reading header part 2")) { part2 = await this.ReadHeaderPart2Async(stream, toc.OffsetToPart2, toc.Part2Size, p); } using (p.BeginTask(weight, "Reading header part 3")) { part3 = await this.ReadHeaderPart3Async(stream, toc.OffsetToPart3, toc.Part3Size, p); } using (p.BeginTask(weight, "Reading header part 4")) { part4 = await this.ReadHeaderPart4Async(stream, toc.OffsetToPart4, toc.Part4Size, part1, part2, p); } using (p.BeginTask(weight, "Reading header part 5")) { part5 = await this.ReadHeaderPart5Async(stream, toc.OffsetToPart5, toc.Part5Size, p); } using (p.BeginTask(weight, "Reading header part 6")) { if (toc.OffsetToPart6 == 0) { // game.dat files don't have part 6 Log.LogDebug("Archive does not have header part 6."); part6 = new NefsHeaderPart6(new List <NefsHeaderPart6Entry>()); } else { part6 = await this.ReadHeaderPart6Async(stream, toc.OffsetToPart6, toc.Part6Size, part2, p); } } using (p.BeginTask(weight, "Reading header part 7")) { if (toc.OffsetToPart6 == 0) { // game.dat files don't have part 7. Still checking if part 6 offset is 0. For // some reason, the part 7 offset still has a value, but doesn't appear to be a // correct one, so skipping part 7 as well Log.LogDebug("Archive does not have header part 7."); part7 = new NefsHeaderPart7(new List <NefsHeaderPart7Entry>()); } else { part7 = await this.ReadHeaderPart7Async(stream, toc.OffsetToPart7, toc.Part7Size, p); } } using (p.BeginTask(weight, "Reading header part 8")) { var part8Size = intro.HeaderSize - toc.OffsetToPart8; part8 = await this.ReadHeaderPart8Async(stream, toc.OffsetToPart8, part8Size, p); } // Validate header hash if (!this.ValidateHash(stream, offset, intro)) { Log.LogWarning("Header hash does not match expected value."); } // The header stream must be disposed stream.Dispose(); return(new NefsHeader(intro, toc, part1, part2, part3, part4, part5, part6, part7, part8)); }
/// <summary> /// Writes an archive to the specified stream. A new archive obejct is returned that /// contains the updated header and item metadata. /// </summary> /// <param name="stream">The stream to write to.</param> /// <param name="sourceHeader">Donor header information.</param> /// <param name="sourceItems">List of items to write. This list is not modified directly.</param> /// <param name="workDir">Temp working directory path.</param> /// <param name="p">Progress info.</param> /// <returns>A new NefsArchive object containing the updated header and item metadata.</returns> private async Task <NefsArchive> WriteArchiveAsync( Stream stream, Nefs20Header sourceHeader, NefsItemList sourceItems, string workDir, NefsProgress p) { // Setup task weights var taskWeightPrepareItems = 0.45f; var taskWeightWriteItems = 0.45f; var taskWeightHeader = 0.1f; // Prepare items for writing NefsItemList items; using (var t = p.BeginTask(taskWeightPrepareItems, "Preparing items")) { items = await this.PrepareItemsAsync(sourceItems, workDir, p); } // Determine number of items var numItems = items.Count; // Update header parts 3 and 4 first (need to know their sizes) var p4 = new Nefs20HeaderPart4(items); var p3 = new NefsHeaderPart3(items); // Compute header size var introSize = NefsHeaderIntro.Size; var tocSize = Nefs20HeaderIntroToc.Size; var p1Size = numItems * NefsHeaderPart1Entry.Size; // TODO : What about duplicates? var p2Size = numItems * NefsHeaderPart2Entry.Size; // TODO : What about duplicates? var p3Size = p3.Size; var p4Size = p4.Size; var p5Size = NefsHeaderPart5.Size; var p6Size = numItems * Nefs20HeaderPart6Entry.Size; var p7Size = numItems * NefsHeaderPart7Entry.Size; var p8Size = sourceHeader.Intro.HeaderSize - sourceHeader.TableOfContents.OffsetToPart8; var headerSize = introSize + tocSize + p1Size + p2Size + p3Size + p4Size + p5Size + p6Size + p7Size + p8Size; // Determine first data offset. There are two known offset values. If the header is // large enough, the second (larger) offset is used. var firstDataOffset = Nefs20Header.DataOffsetDefault; if (headerSize > firstDataOffset) { firstDataOffset = Nefs20Header.DataOffsetLarge; } // Write item data UInt64 archiveSize; using (var t = p.BeginTask(taskWeightWriteItems, "Writing items")) { archiveSize = await this.WriteItemsAsync(stream, items, firstDataOffset, p); } // Update remaining header data var p1 = new NefsHeaderPart1(items, p4); var p2 = new NefsHeaderPart2(items, p3); var p6 = new Nefs20HeaderPart6(items); var p7 = new NefsHeaderPart7(items); // Compute total archive size var p5 = new NefsHeaderPart5(); p5.Data0x00_ArchiveSize.Value = archiveSize; p5.Data0x08_ArchiveNameStringOffset.Value = p3.OffsetsByFileName[items.DataFileName]; p5.Data0x0C_FirstDataOffset.Value = sourceHeader.Part5.FirstDataOffset; // Update header intro var intro = new NefsHeaderIntro(); intro.Data0x00_MagicNumber.Value = sourceHeader.Intro.MagicNumber; intro.Data0x24_AesKeyHexString.Value = sourceHeader.Intro.AesKeyHexString; intro.Data0x64_HeaderSize.Value = (uint)headerSize; intro.Data0x68_NefsVersion.Value = sourceHeader.Intro.NefsVersion; intro.Data0x6c_NumberOfItems.Value = (uint)numItems; intro.Data0x70_UnknownZlib.Value = sourceHeader.Intro.Unknown0x70zlib; intro.Data0x78_Unknown.Value = sourceHeader.Intro.Unknown0x78; var toc = new Nefs20HeaderIntroToc(); toc.Data0x00_NumVolumes.Value = sourceHeader.TableOfContents.NumVolumes; toc.Data0x02_HashBlockSize.Value = sourceHeader.TableOfContents.Data0x02_HashBlockSize.Value; toc.Data0x04_OffsetToPart1.Value = introSize + tocSize; toc.Data0x0c_OffsetToPart2.Value = toc.OffsetToPart1 + (uint)p1Size; toc.Data0x14_OffsetToPart3.Value = toc.OffsetToPart2 + (uint)p2Size; toc.Data0x18_OffsetToPart4.Value = toc.OffsetToPart3 + (uint)p3Size; toc.Data0x1c_OffsetToPart5.Value = toc.OffsetToPart4 + (uint)p4Size; toc.Data0x08_OffsetToPart6.Value = toc.OffsetToPart5 + (uint)p5Size; toc.Data0x10_OffsetToPart7.Value = toc.OffsetToPart6 + (uint)p6Size; toc.Data0x20_OffsetToPart8.Value = toc.OffsetToPart7 + (uint)p7Size; toc.Data0x24_Unknown.Value = sourceHeader.TableOfContents.Unknown0x24; // Part 8 - not writing anything for now var p8 = new NefsHeaderPart8(p8Size); // Create new header object var header = new Nefs20Header(intro, toc, p1, p2, p3, p4, p5, p6, p7, p8); // Write the header using (var t = p.BeginTask(taskWeightHeader, "Writing header")) { await this.WriteHeaderAsync(stream, 0, header, p); } // Update hash await this.UpdateHashAsync(stream, 0, header, p); // Create new archive object return(new NefsArchive(header, items)); }