public void BeginTask_Message_ProgressChangedRaised() { var ct = new CancellationTokenSource().Token; var p = new NefsProgress(ct); NefsProgressEventArgs args = null; p.ProgressChanged += (o, e) => args = e; p.BeginTask(1.0f, "A"); Assert.Equal(p.StatusMessage, args.Message); Assert.Equal(p.StatusSubMessage, args.SubMessage); Assert.Equal(p.Percent, args.Progress); }
/// <inheritdoc/> public async Task <NefsItemSize> TransformFileAsync( string inputFile, Int64 inputOffset, UInt32 inputLength, string outputFile, Int64 outputOffset, NefsDataTransform transform, NefsProgress p) { using (var inputStream = this.FileSystem.File.OpenRead(inputFile)) using (var outputStream = this.FileSystem.File.OpenWrite(outputFile)) { return(await this.TransformAsync(inputStream, inputOffset, inputLength, outputStream, outputOffset, transform, p)); } }
/// <inheritdoc/> public async Task <NefsItemSize> CompressFileAsync( string inputFile, Int64 inputOffset, UInt32 inputLength, string outputFile, Int64 outputOffset, UInt32 chunkSize, NefsProgress p) { using (var inputStream = this.FileSystem.File.OpenRead(inputFile)) using (var outputStream = this.FileSystem.File.OpenWrite(outputFile)) { return(await this.CompressAsync(inputStream, inputOffset, inputLength, outputStream, outputOffset, chunkSize, p)); } }
public void Test_MultipleTasks() { var ct = new CancellationTokenSource().Token; var p = new NefsProgress(ct); p.BeginTask(1.0f); p.BeginTask(0.5f); Assert.Equal(0.0f, p.Percent); p.EndTask(); Assert.Equal(0.5f, p.Percent); p.EndTask(); Assert.Equal(1.0f, p.Percent); }
public void Test_SingleTask() { var ct = new CancellationTokenSource().Token; var p = new NefsProgress(ct); p.BeginTask(1.0f); Assert.Equal(0.0f, p.Percent); Assert.Equal("", p.StatusMessage); Assert.Equal("", p.StatusSubMessage); p.EndTask(); Assert.Equal(1.0f, p.Percent); Assert.Equal("", p.StatusMessage); Assert.Equal("", p.StatusSubMessage); }
/// <inheritdoc/> public async Task DetransformFileAsync( string inputFile, Int64 inputOffset, string outputFile, Int64 outputOffset, uint extractedSize, IReadOnlyList <NefsDataChunk> chunks, NefsProgress p) { using (var t = p.BeginTask(1.0f)) using (var inputStream = this.FileSystem.File.OpenRead(inputFile)) using (var outputStream = this.FileSystem.File.OpenWrite(outputFile)) { await this.DetransformAsync(inputStream, inputOffset, outputStream, outputOffset, extractedSize, chunks, p); } }
/// <inheritdoc/> public async Task DecompressFileAsync( string inputFile, Int64 inputOffset, IReadOnlyList <UInt32> chunkSizes, string outputFile, Int64 outputOffset, NefsProgress p, byte[] aes256key = null) { using (var t = p.BeginTask(1.0f)) using (var inputStream = this.FileSystem.File.OpenRead(inputFile)) using (var outputStream = this.FileSystem.File.OpenWrite(outputFile)) { await this.DecompressAsync(inputStream, inputOffset, chunkSizes, outputStream, outputOffset, p, aes256key); } }
/// <inheritdoc/> public async Task <NefsItemSize> TransformAsync( Stream input, Int64 inputOffset, UInt32 inputLength, Stream output, Int64 outputOffset, NefsDataTransform transform, NefsProgress p) { var chunks = new List <NefsDataChunk>(); var rawChunkSize = transform.ChunkSize; input.Seek(inputOffset, SeekOrigin.Begin); output.Seek(outputOffset, SeekOrigin.Begin); // Split file into chunks and transform them using (var t = p.BeginTask(1.0f, $"Transforming stream")) { var cumulativeChunkSize = 0U; var bytesRemaining = (int)inputLength; // Determine how many chunks to split file into var numChunks = (int)Math.Ceiling(inputLength / (double)rawChunkSize); for (var i = 0; i < numChunks; ++i) { using (var st = p.BeginSubTask(1.0f / numChunks, $"Transforming chunk {i + 1}/{numChunks}")) { // The last chunk may not be exactly equal to the raw chunk size var nextChunkSize = (int)Math.Min(rawChunkSize, bytesRemaining); bytesRemaining -= nextChunkSize; // Transform chunk and write to output stream var chunkSize = await this.TransformChunkAsync(input, (uint)nextChunkSize, output, transform, p); cumulativeChunkSize += chunkSize; // Record chunk info var chunk = new NefsDataChunk(chunkSize, cumulativeChunkSize, transform); chunks.Add(chunk); } } } // Return item size return(new NefsItemSize(inputLength, chunks)); }
/// <inheritdoc/> public async Task DecompressAsync( Stream input, Int64 inputOffset, IReadOnlyList <UInt32> chunkSizes, Stream output, Int64 outputOffset, NefsProgress p, byte[] aes256key = null) { var numChunks = chunkSizes.Count; input.Seek(inputOffset, SeekOrigin.Begin); // For each compressed chunk, decompress it and write it to the output file for (int i = 0; i < numChunks; i++) { using (var st = p.BeginSubTask(1.0f / numChunks, $"Extracting chunk {i + 1}/{numChunks}...")) { // Get chunk size var chunkSize = chunkSizes[i]; // Remember that values in the ChunkSize list are cumulative, so to get the // actual chunk size we need to subtract the previous ChunkSize entry if (i > 0) { chunkSize -= chunkSizes[i - 1]; } // Read in the comrpessed chunk var chunk = new byte[chunkSize]; await input.ReadAsync(chunk, 0, (int)chunkSize, p.CancellationToken); // Decompress the chunk (and decrpyt if needed) if (aes256key == null) { // Not encrypted await this.DecompressChunkAsync(chunk, output, p); } else { // Encrypted await this.DecompressChunkAsync(chunk, aes256key, output, p); } } } }
/// <summary> /// Looks through the game executable to find header offsets for game.dat files. /// </summary> /// <returns>A list of game.dat archive sources.</returns> private async Task <List <NefsArchiveSource> > FindGameDatHeaderOffsetsAsync( string gameDatDir, string gameExePath, NefsProgress p) { if (!this.FileSystem.File.Exists(gameExePath)) { this.UiService.ShowMessageBox($"Cannot find executable file: {gameExePath}."); return(new List <NefsArchiveSource>()); } // Search for headers in the exe using (var t = p.BeginTask(1.0f, "Searching for headers")) { return(await this.Reader.FindHeadersAsync(gameExePath, gameDatDir, p)); } }
/// <inheritdoc/> public async Task <NefsItemSize> CompressAsync( Stream input, Int64 inputOffset, UInt32 inputLength, Stream output, Int64 outputOffset, UInt32 chunkSize, NefsProgress p) { var chunkSizes = new List <UInt32>(); input.Seek(inputOffset, SeekOrigin.Begin); output.Seek(outputOffset, SeekOrigin.Begin); // Split file into chunks and compress them using (var t = p.BeginTask(1.0f, $"Compressing stream")) { var lastChunkSize = 0; var totalChunkSize = 0; var lastBytesRead = 0; var bytesRemaining = (int)inputLength; // Determine how many chunks to split file into var numChunks = (int)Math.Ceiling(inputLength / (double)chunkSize); for (var i = 0; i < numChunks; ++i) { using (var st = p.BeginSubTask(1.0f / numChunks, $"Compressing chunk {i + 1}/{numChunks}")) { var nextBytes = Math.Min(chunkSize, bytesRemaining); // Compress this chunk and write it to the output file (lastBytesRead, lastChunkSize) = await DeflateHelper.DeflateAsync(input, (int)nextBytes, output, p.CancellationToken); totalChunkSize += lastChunkSize; bytesRemaining -= lastBytesRead; // Record the total compressed size after this chunk chunkSizes.Add((UInt32)totalChunkSize); } } } // Return item size return(new NefsItemSize(inputLength, chunkSizes)); }
/// <summary> /// Writes the stored data in little endian format. /// </summary> /// <param name="file">The file stream to read from.</param> /// <param name="baseOffset">Base offset to write at.</param> /// <param name="p">Progress info.</param> /// <returns>The async task.</returns> public async Task WriteAsync(Stream file, UInt64 baseOffset, NefsProgress p) { var actualOffset = (long)baseOffset + this.Offset; // Validate inputs if (file == null) { throw new ArgumentNullException("File stream required to read data from."); } if (actualOffset < 0) { var ex = new InvalidOperationException("Invalid offset into file."); throw ex; } file.Seek(actualOffset, SeekOrigin.Begin); await file.WriteAsync(this.GetBytes(), 0, (int)this.Size, p.CancellationToken); }
/// <summary> /// Writes items' data to the output stream. /// </summary> /// <param name="stream">The stream to write to.</param> /// <param name="items">List of items to write.</param> /// <param name="firstDataOffset"> /// The offset from the beginning of the stream to write the first data. /// </param> /// <param name="p">Progress info.</param> /// <returns>The offset to the end of the last data written.</returns> private async Task <UInt64> WriteItemsAsync( Stream stream, NefsItemList items, UInt64 firstDataOffset, NefsProgress p) { var nextDataOffset = firstDataOffset; // Prepare stream stream.Seek((long)firstDataOffset, SeekOrigin.Begin); // Update item info and write out item data var i = 1; foreach (var item in items.EnumerateById()) { using (var t = p.BeginSubTask(1.0f / items.Count, $"Writing data for item {i}/{items.Count}")) { // Get item var itemOffset = nextDataOffset; var itemSize = item.DataSource.Size; // Nothing to write if item is directory if (item.Type == NefsItemType.Directory) { continue; } // Write out item data nextDataOffset = await this.WriteItemAsync(stream, itemOffset, item, p); // Update item data source to point to the newly written data var dataSource = new NefsItemListDataSource(items, itemOffset, itemSize); item.UpdateDataSource(dataSource, NefsItemState.None); } i++; } // Return the next data offset, which is the end of the written data return(nextDataOffset); }
/// <inheritdoc/> public NefsItemList CreateItemList(string dataFilePath, NefsProgress p) { var items = new NefsItemList(dataFilePath); for (var i = 0; i < this.Part1.EntriesByIndex.Count; ++i) { p.CancellationToken.ThrowIfCancellationRequested(); try { var item = this.CreateItemInfo((uint)i, items); items.Add(item); } catch (Exception) { Log.LogError($"Failed to create item with part 1 index {i}, skipping."); } } return(items); }
public void Test_MultipleTasksWithMessage() { var ct = new CancellationTokenSource().Token; var p = new NefsProgress(ct); p.BeginTask(1.0f, "A"); Assert.Equal(0.0f, p.Percent); Assert.Equal("A", p.StatusMessage); Assert.Equal("", p.StatusSubMessage); p.BeginTask(0.25f, "B"); Assert.Equal(0.0f, p.Percent); Assert.Equal("B", p.StatusMessage); Assert.Equal("", p.StatusSubMessage); p.EndTask(); Assert.Equal(0.25f, p.Percent); p.EndTask(); Assert.Equal(1.0f, p.Percent); }
public void BeginSubTask_Valid_ProgressChangedRaised() { var ct = new CancellationTokenSource().Token; var p = new NefsProgress(ct); p.BeginTask(1.0f, "A"); { NefsProgressEventArgs args = null; p.ProgressChanged += (o, e) => args = e; p.BeginSubTask(1.0f, "sub"); this.Verify(p, 0.0f, "A", "sub"); Assert.Equal("A", args.Message); Assert.Equal("sub", args.SubMessage); Assert.Equal(0.0f, args.Progress); p.EndTask(); this.Verify(p, 1.0f, "A", ""); } p.EndTask(); this.Verify(p, 1.0f, "", ""); }
public void Test_MoreTests() { var ct = new CancellationTokenSource().Token; var p = new NefsProgress(ct); p.BeginTask(1.0f); { p.BeginTask(0.1f); this.Verify(p, 0.0f, "", ""); { p.BeginSubTask(0.4f, "sub"); this.Verify(p, 0.0f, "", "sub"); p.EndTask(); this.Verify(p, 0.04f, "", ""); p.BeginSubTask(0.6f, "sub"); this.Verify(p, 0.04f, "", "sub"); p.EndTask(); this.Verify(p, 0.1f, "", ""); } p.EndTask(); this.Verify(p, 0.1f, "", ""); p.BeginTask(0.8f); this.Verify(p, 0.1f, "", ""); p.EndTask(); this.Verify(p, 0.9f, "", ""); p.BeginTask(0.05f); this.Verify(p, 0.9f, "", ""); p.EndTask(); this.Verify(p, 0.95f, "", ""); // 0.1 + 0.8 + 0.05 == 0.95 (does not add up to 1) } p.EndTask(); this.Verify(p, 1.0f, "", ""); }
/// <summary> /// Reads header part 5 from an input stream. /// </summary> /// <param name="stream">The stream to read from.</param> /// <param name="offset">The offset to the header part from the beginning of the stream.</param> /// <param name="size">The size of the header part.</param> /// <param name="p">Progress info.</param> /// <returns>The loaded header part.</returns> internal async Task <NefsHeaderPart5> ReadHeaderPart5Async(Stream stream, uint offset, uint size, NefsProgress p) { var part5 = new NefsHeaderPart5(); // Validate inputs if (!this.ValidateHeaderPartStream(stream, offset, size, "5")) { return(part5); } // Read part 5 data await FileData.ReadDataAsync(stream, offset, part5, NefsVersion.Version200, p); return(part5); }
/// <summary> /// Reads header part 3 from an input stream. /// </summary> /// <param name="stream">The stream to read from.</param> /// <param name="offset">The offset to the header part from the beginning of the stream.</param> /// <param name="size">The size of the header part.</param> /// <param name="p">Progress info.</param> /// <returns>The loaded header part.</returns> internal async Task <NefsHeaderPart3> ReadHeaderPart3Async(Stream stream, uint offset, uint size, NefsProgress p) { var entries = new List <string>(); // Validate inputs if (!this.ValidateHeaderPartStream(stream, offset, size, "3")) { return(new NefsHeaderPart3(entries)); } // Read in header part 3 var bytes = new byte[size]; stream.Seek(offset, SeekOrigin.Begin); await stream.ReadAsync(bytes, 0, (int)size); // Process all strings in the strings table var nextOffset = 0; while (nextOffset < size) { using (p.BeginTask(nextOffset / size)) { // Find the next null terminator var nullOffset = (int)size; for (var i = nextOffset; i < size; ++i) { if (bytes[i] == 0) { nullOffset = i; break; } } if (nullOffset == size) { // No null terminator found, assume end of part 3. There can be a few // garbage bytes at the end of this part. break; } // Get the string var str = Encoding.ASCII.GetString(bytes, nextOffset, nullOffset - nextOffset); // Record entry entries.Add(str); // Find next string nextOffset = nullOffset + 1; } } return(new NefsHeaderPart3(entries)); }
/// <inheritdoc/> /// <remarks> /// This method isn't pretty, but it works. It's based on assumptions that may break. /// </remarks> public async Task <List <NefsArchiveSource> > FindHeadersAsync(string exePath, string dataFileDir, NefsProgress p) { var sources = new List <NefsArchiveSource>(); var nextPart6Offset = 0U; // Load exe into memory var exeBytes = this.FileSystem.File.ReadAllBytes(exePath); // Search for the part 6 base offset. For NeFS version 1.6 and 2.0 (maybe others?) // header parts 6 and 7 are stored separate from the other header parts. So far all the // part 6/7 data has been in the ".data" section of the exe. So we can get that offset // from the PE header. Some games (e.g. Grid 2) have other data that comes before the // part 6/7 data in the ".data" section. So we have to look for a pattern that looks // like the data we are looking for. try { if (!PeHelper.GetRawOffsetToSection(exeBytes, ".data", out var dataSectionOffset)) { Log.LogError("Failed to find part 6 offset; using 0 as offset."); } nextPart6Offset = (uint)dataSectionOffset; } catch (Exception ex) { Log.LogError(ex, "Failed to find part 6 offset; using 0 as offset."); } // Search for headers var i = 0; while (i + 4 <= exeBytes.Length) { var offset = i; i += 4; // Check for cancel p.CancellationToken.ThrowIfCancellationRequested(); // Searching for a NeFS header: Look for 4E 65 46 53 (NeFS). This is the NeFS header // magic number. if (exeBytes[offset] != 0x4E || exeBytes[offset + 1] != 0x65 || exeBytes[offset + 2] != 0x46 || exeBytes[offset + 3] != 0x53) { continue; } // Check for a known version number var version = BitConverter.ToUInt32(exeBytes, offset + 0x68); if (version != 0x20000 && version != 0x10600) { continue; } // Try to read header intro try { using (var byteStream = new MemoryStream(exeBytes)) { var(intro, headerStream) = await this.ReadHeaderIntroAsync(byteStream, (ulong)offset, p); using (headerStream) { INefsHeaderIntroToc toc; uint p6Size; uint p7Size; // Find next part 6 offset - there may be padding or other data before // the part 6/7 data nextPart6Offset = this.FindNextPart6Offset(nextPart6Offset, exeBytes); // Read table of contents if (version == (int)NefsVersion.Version200) { toc = await this.Read20HeaderIntroTocAsync(headerStream, Nefs20HeaderIntroToc.Offset, p); var numPart1Entries = toc.Part1Size / NefsHeaderPart1Entry.Size; var numPart2Entries = toc.Part2Size / NefsHeaderPart2Entry.Size; p6Size = numPart1Entries * Nefs20HeaderPart6Entry.Size; p7Size = numPart2Entries * NefsHeaderPart7Entry.Size; } else { toc = await this.Read16HeaderIntroTocAsync(headerStream, Nefs16HeaderIntroToc.Offset, p); var numPart1Entries = toc.Part1Size / NefsHeaderPart1Entry.Size; var numPart2Entries = toc.Part2Size / NefsHeaderPart2Entry.Size; p6Size = numPart1Entries * Nefs16HeaderPart6Entry.Size; p7Size = numPart2Entries * NefsHeaderPart7Entry.Size; } // Read part 5 var p5 = await this.ReadHeaderPart5Async(headerStream, toc.OffsetToPart5, NefsHeaderPart5.Size, p); // Find file name headerStream.Seek(toc.OffsetToPart3, SeekOrigin.Begin); headerStream.Seek(p5.ArchiveNameStringOffset, SeekOrigin.Current); // Read 256 bytes - this is overkill, probably won't have a filename // that big var nameBytes = new byte[256]; await headerStream.ReadAsync(nameBytes, 0, 256, p.CancellationToken); var name = StringHelper.TryReadNullTerminatedAscii(nameBytes, 0, nameBytes.Length); if (string.IsNullOrWhiteSpace(name)) { // Failed to get name Log.LogError($"Thought we found a header at {offset}, but could not read data file name."); continue; } // Create archive source for this header var dataFilePath = Path.Combine(dataFileDir, name); var source = new NefsArchiveSource(exePath, (ulong)offset, nextPart6Offset, dataFilePath); sources.Add(source); // Keep looking offset += (int)intro.HeaderSize; // Update part 6 search offset to skip the one we just used nextPart6Offset += p6Size + p7Size; } } } catch (Exception) { // Failed to read header, so assume not a header continue; } } return(sources); }
/// <summary> /// Reads header part 2 from an input stream. /// </summary> /// <param name="stream">The stream to read from.</param> /// <param name="offset">The offset to the header part from the beginning of the stream.</param> /// <param name="size">The size of the header part.</param> /// <param name="p">Progress info.</param> /// <returns>The loaded header part.</returns> internal async Task <NefsHeaderPart2> ReadHeaderPart2Async(Stream stream, uint offset, uint size, NefsProgress p) { var entries = new List <NefsHeaderPart2Entry>(); // Validate inputs if (!this.ValidateHeaderPartStream(stream, offset, size, "2")) { return(new NefsHeaderPart2(entries)); } // Get entries in part 2 var numEntries = size / NefsHeaderPart2Entry.Size; var entryOffset = offset; for (var i = 0; i < numEntries; ++i) { using (p.BeginTask(1.0f / numEntries)) { var entry = new NefsHeaderPart2Entry(); await FileData.ReadDataAsync(stream, entryOffset, entry, NefsVersion.Version200, p); entryOffset += NefsHeaderPart2Entry.Size; entries.Add(entry); } } return(new NefsHeaderPart2(entries)); }
/// <summary> /// Reads the header intro from an input stream. Returns a new stream that contains the /// header data. This stream must be disposed by the caller. If the header is encrypted, the /// header data is decrypted before being placed in the new stream. /// </summary> /// <param name="stream">The stream to read from.</param> /// <param name="offset">The offset to the header intro from the beginning of the stream.</param> /// <param name="p">Progress info.</param> /// <returns>The loaded header intro and the stream to use for the rest of the header.</returns> internal async Task <(NefsHeaderIntro Intro, Stream HeaderStream)> ReadHeaderIntroAsync( Stream stream, ulong offset, NefsProgress p) { // The decrypted stream will need to be disposed by the caller var decryptedStream = new MemoryStream(); NefsHeaderIntro intro; // Read magic number (first four bytes) stream.Seek((long)offset, SeekOrigin.Begin); var magicNum = new UInt32Type(0); await magicNum.ReadAsync(stream, offset, p); // Reset stream position stream.Seek((long)offset, SeekOrigin.Begin); // Check magic number if (magicNum.Value == NefsHeaderIntro.NefsMagicNumber) { // This is a non-encrypted NeFS header intro = new NefsHeaderIntro(); await FileData.ReadDataAsync(stream, offset, intro, NefsVersion.Version200, p); // Copy the entire header to the decrypted stream (nothing to decrypt) stream.Seek((long)offset, SeekOrigin.Begin); await stream.CopyPartialAsync(decryptedStream, intro.HeaderSize, p.CancellationToken); } else { // Magic number is incorrect, assume file is encrpyted Log.LogInformation("Header magic number mismatch, assuming header is encrypted."); // Encrypted headers: // - Headers are "encrypted" in a two-step process. RSA-1024. No padding is used. // - First 0x80 bytes are signed with an RSA private key (data -> decrypt -> // scrambled data). // - Must use an RSA 1024-bit public key to unscramble the data (scrambled data -> // encrypt -> data). // - For DiRT Rally 2 this public key is stored in the main executable. byte[] encryptedHeader = new byte[NefsHeaderIntro.Size + 1]; // TODO : Why the +1? await stream.ReadAsync(encryptedHeader, 0, (int)NefsHeaderIntro.Size, p.CancellationToken); encryptedHeader[NefsHeaderIntro.Size] = 0; // Use big integers instead of RSA since the c# implementation forces the use of padding. var n = new BigInteger(this.RsaPublicKey); var e = new BigInteger(this.RsaExponent); var m = new BigInteger(encryptedHeader); // Decrypt the header intro byte[] decrypted = BigInteger.ModPow(m, e, n).ToByteArray(); decryptedStream.Write(decrypted, 0, decrypted.Length); // Fill any leftover space with zeros if (decrypted.Length != NefsHeaderIntro.Size) { for (int i = 0; i < (NefsHeaderIntro.Size - decrypted.Length); i++) { decryptedStream.WriteByte(0); } } // Read header intro data from decrypted stream intro = new NefsHeaderIntro(isEncrpyted: true); await FileData.ReadDataAsync(decryptedStream, 0, intro, NefsVersion.Version200, p); // The rest of the header is encrypted using AES-256, decrypt using the key from the // header intro byte[] key = intro.GetAesKey(); var headerSize = intro.HeaderSize; // Decrypt the rest of the header using (var rijAlg = new RijndaelManaged()) { rijAlg.KeySize = 256; rijAlg.Key = key; rijAlg.Mode = CipherMode.ECB; rijAlg.BlockSize = 128; rijAlg.Padding = PaddingMode.Zeros; var decryptor = rijAlg.CreateDecryptor(); decryptedStream.Seek(0, SeekOrigin.End); // Decrypt the data - make sure to leave open the base stream using (var cryptoStream = new CryptoStream(stream, decryptor, CryptoStreamMode.Read, true)) { // Decrypt data from input stream and copy to the decrypted stream await cryptoStream.CopyPartialAsync(decryptedStream, headerSize, p.CancellationToken); } } } return(intro, decryptedStream); }
/// <summary> /// Reads the header from an input stream. /// </summary> /// <param name="originalStream">The stream to read from.</param> /// <param name="offset">The offset to the header from the beginning of the stream.</param> /// <param name="part6Offset"> /// The offset to the start of part 6 data from the beginning of the stream. /// </param> /// <param name="p">Progress info.</param> /// <returns>The loaded header.</returns> internal async Task <INefsHeader> ReadHeaderAsync(Stream originalStream, ulong offset, ulong part6Offset, NefsProgress p) { Stream stream; Stream part6Stream; INefsHeader header = null; NefsHeaderIntro intro = null; using (p.BeginTask(0.2f, "Reading header intro")) { // Decrypt header if needed (intro, stream) = await this.ReadHeaderIntroAsync(originalStream, offset, p); } // For now, assume that if the header is encrypted, then the part 6 data is not // separated. We've only seen encrypted headers in some nefs 2.0 archives (i.e., DLC content). part6Stream = intro.IsEncrypted ? stream : originalStream; using (p.BeginTask(0.8f)) { if (intro.NefsVersion == 0x20000) { // 2.0.0 Log.LogInformation("Detected NeFS version 2.0."); header = await this.Read20HeaderAsync(stream, 0, part6Stream, part6Offset, intro, p); } else if (intro.NefsVersion == 0x10600) { // 1.6.0 Log.LogInformation("Detected NeFS version 1.6."); header = await this.Read16HeaderAsync(stream, 0, part6Stream, part6Offset, intro, p); } else { Log.LogInformation($"Detected unkown NeFS version {intro.NefsVersion}."); header = await this.Read20HeaderAsync(stream, 0, part6Stream, part6Offset, intro, p); } } // The header stream must be disposed stream.Dispose(); return(header); }
/// <summary> /// Reads header part 4 from an input stream. /// </summary> /// <param name="stream">The stream to read from.</param> /// <param name="offset">The offset to the header part from the beginning of the stream.</param> /// <param name="size">The size of the header part.</param> /// <param name="part1">Header part 1.</param> /// <param name="p">Progress info.</param> /// <returns>The loaded header part.</returns> internal async Task <Nefs20HeaderPart4> Read20HeaderPart4Async(Stream stream, uint offset, uint size, NefsHeaderPart1 part1, NefsProgress p) { var entries = new List <Nefs20HeaderPart4Entry>(); var indexLookup = new Dictionary <Guid, uint>(); // Validate inputs if (!this.ValidateHeaderPartStream(stream, offset, size, "4")) { return(new Nefs20HeaderPart4(entries, indexLookup)); } // Get entries in part 4 var numEntries = size / Nefs20HeaderPart4Entry.Size; var entryOffset = offset; for (var i = 0; i < numEntries; ++i) { using (p.BeginTask(1.0f / numEntries)) { var entry = new Nefs20HeaderPart4Entry(); await FileData.ReadDataAsync(stream, entryOffset, entry, NefsVersion.Version200, p); entryOffset += Nefs20HeaderPart4Entry.Size; entries.Add(entry); } } // Create a table to allow looking up a part 4 index by item Guid foreach (var p1 in part1.EntriesByIndex) { indexLookup.Add(p1.Guid, p1.IndexPart4); } return(new Nefs20HeaderPart4(entries, indexLookup)); }
/// <summary> /// Reads the header intro table of contents from an input stream. /// </summary> /// <param name="stream">The stream to read from.</param> /// <param name="offset"> /// The offset to the header intro table of contents from the beginning of the stream. /// </param> /// <param name="p">Progress info.</param> /// <returns>The loaded header intro offets data.</returns> internal async Task <Nefs20HeaderIntroToc> Read20HeaderIntroTocAsync(Stream stream, uint offset, NefsProgress p) { var toc = new Nefs20HeaderIntroToc(); await FileData.ReadDataAsync(stream, offset, toc, NefsVersion.Version200, p); return(toc); }
/// <summary> /// Reads a version 2.0 header from an input stream. /// </summary> /// <param name="stream">The stream to read from.</param> /// <param name="offset">The offset to the header from the beginning of the stream.</param> /// <param name="part6Stream">The stream that contains part 6/7 data.</param> /// <param name="part6Offset">The offset to the start of part 6/7 data.</param> /// <param name="intro">The pre-parsed header intro.</param> /// <param name="p">Progress info.</param> /// <returns>The loaded header.</returns> internal async Task <Nefs20Header> Read20HeaderAsync( Stream stream, ulong offset, Stream part6Stream, ulong part6Offset, NefsHeaderIntro intro, NefsProgress p) { Nefs20HeaderIntroToc toc = null; NefsHeaderPart1 part1 = null; NefsHeaderPart2 part2 = null; NefsHeaderPart3 part3 = null; Nefs20HeaderPart4 part4 = null; NefsHeaderPart5 part5 = null; Nefs20HeaderPart6 part6 = null; NefsHeaderPart7 part7 = null; NefsHeaderPart8 part8 = null; // Calc weight of each task (8 parts + table of contents) var weight = 1.0f / 10.0f; using (p.BeginTask(weight, "Reading header intro table of contents")) { toc = await this.Read20HeaderIntroTocAsync(stream, Nefs20HeaderIntroToc.Offset, p); } using (p.BeginTask(weight, "Reading header part 1")) { part1 = await this.ReadHeaderPart1Async(stream, toc.OffsetToPart1, toc.Part1Size, p); } using (p.BeginTask(weight, "Reading header part 2")) { part2 = await this.ReadHeaderPart2Async(stream, toc.OffsetToPart2, toc.Part2Size, p); } using (p.BeginTask(weight, "Reading header part 3")) { part3 = await this.ReadHeaderPart3Async(stream, toc.OffsetToPart3, toc.Part3Size, p); } using (p.BeginTask(weight, "Reading header part 4")) { part4 = await this.Read20HeaderPart4Async(stream, toc.OffsetToPart4, toc.Part4Size, part1, p); } using (p.BeginTask(weight, "Reading header part 5")) { part5 = await this.ReadHeaderPart5Async(stream, toc.OffsetToPart5, NefsHeaderPart5.Size, p); } using (p.BeginTask(weight, "Reading header part 6")) { part6 = await this.Read20HeaderPart6Async(part6Stream, (uint)part6Offset + toc.OffsetToPart6, part1, p); } using (p.BeginTask(weight, "Reading header part 7")) { var numEntries = (uint)part2.EntriesByIndex.Count; part7 = await this.ReadHeaderPart7Async(part6Stream, (uint)part6Offset + toc.OffsetToPart7, numEntries, p); } using (p.BeginTask(weight, "Reading header part 8")) { var part8Size = intro.HeaderSize - toc.OffsetToPart8; part8 = await this.ReadHeaderPart8Async(stream, toc.OffsetToPart8, part8Size, p); } // Validate header hash if (!this.ValidateHash(stream, offset, intro)) { Log.LogWarning("Header hash does not match expected value."); } // The header stream must be disposed stream.Dispose(); return(new Nefs20Header(intro, toc, part1, part2, part3, part4, part5, part6, part7, part8)); }
/// <inheritdoc/> public async Task <NefsArchive> ReadArchiveAsync(NefsArchiveSource source, NefsProgress p) { return(await this.ReadArchiveAsync(source.HeaderFilePath, source.HeaderOffset, source.HeaderPart6Offset, source.DataFilePath, p)); }
/// <inheritdoc/> public async Task <NefsArchive> ReadArchiveAsync(string filePath, NefsProgress p) { return(await this.ReadArchiveAsync(filePath, Nefs20Header.IntroOffset, 0, filePath, p)); }
public override async Task ReadAsync(Stream file, UInt64 baseOffset, NefsProgress p) { this.Value = await this.ReadFileAsync(file, baseOffset, p); }
/// <summary> /// Reads header part 6 from an input stream. /// </summary> /// <param name="stream">The stream to read from.</param> /// <param name="offset">The offset to the header part from the beginning of the stream.</param> /// <param name="part1">Header part 1. Used to match part 6 data with an item.</param> /// <param name="p">Progress info.</param> /// <returns>The loaded header part.</returns> internal async Task <Nefs20HeaderPart6> Read20HeaderPart6Async(Stream stream, uint offset, NefsHeaderPart1 part1, NefsProgress p) { var entries = new List <Nefs20HeaderPart6Entry>(); var numItems = part1.EntriesByIndex.Count; var size = numItems * Nefs20HeaderPart6Entry.Size; // Validate inputs if (!this.ValidateHeaderPartStream(stream, offset, (uint)size, "6")) { return(new Nefs20HeaderPart6(entries)); } // Get entries in part 6 var entryOffset = offset; for (var i = 0; i < numItems; ++i) { using (p.BeginTask(1.0f / numItems)) { // Make sure there is a corresponding index in part 1 if (i >= part1.EntriesByIndex.Count) { Log.LogError($"Could not find matching item entry for part 6 index {i} in part 1."); continue; } // Get Guid from part 1. Part 1 entry order matches part 6 entry order. var guid = part1.EntriesByIndex[i].Guid; // Read the entry data var entry = new Nefs20HeaderPart6Entry(guid); await FileData.ReadDataAsync(stream, entryOffset, entry, NefsVersion.Version200, p); entryOffset += Nefs20HeaderPart6Entry.Size; entries.Add(entry); } } return(new Nefs20HeaderPart6(entries)); }