/// <inheritdoc/> public async Task <NefsItemSize> TransformFileAsync( INefsDataSource input, string outputFile, NefsDataTransform transform, NefsProgress p) { return(await this.TransformFileAsync(input.FilePath, outputFile, transform, p)); }
/// <inheritdoc/> public async Task <UInt32> TransformChunkAsync( Stream input, UInt32 inputChunkSize, Stream output, NefsDataTransform transform, NefsProgress p) { using (var transformedStream = new MemoryStream()) { // Copy raw chunk to temp stream await input.CopyPartialAsync(transformedStream, inputChunkSize, p.CancellationToken); transformedStream.Seek(0, SeekOrigin.Begin); // Compress if (transform.IsZlibCompressed) { using (var tempStream = new MemoryStream()) { await DeflateHelper.DeflateAsync(transformedStream, (int)inputChunkSize, tempStream); tempStream.Seek(0, SeekOrigin.Begin); transformedStream.Seek(0, SeekOrigin.Begin); await tempStream.CopyPartialAsync(transformedStream, tempStream.Length, p.CancellationToken); transformedStream.Seek(0, SeekOrigin.Begin); transformedStream.SetLength(tempStream.Length); } } // Encrypt if (transform.IsAesEncrypted) { using (var aesManager = this.CreateAesManager(transform.Aes256Key)) using (var cryptoStream = new CryptoStream(transformedStream, aesManager.CreateEncryptor(), CryptoStreamMode.Read, leaveOpen: true)) using (var tempStream = new MemoryStream()) { await cryptoStream.CopyToAsync(tempStream, p.CancellationToken); tempStream.Seek(0, SeekOrigin.Begin); transformedStream.Seek(0, SeekOrigin.Begin); await tempStream.CopyPartialAsync(transformedStream, tempStream.Length, p.CancellationToken); transformedStream.Seek(0, SeekOrigin.Begin); transformedStream.SetLength(tempStream.Length); } } // Copy transformed chunk to output stream await transformedStream.CopyToAsync(output, p.CancellationToken); // Return size of transformed chunk return((uint)transformedStream.Length); } }
/// <summary> /// Initializes a new instance of the <see cref="NefsItemSize"/> class. This constructor can /// be used if the item is not compressed (i.e., extracted size == compressed size). /// </summary> /// <param name="extractedSize">The size of the item's data when extracted from the archive.</param> public NefsItemSize(UInt32 extractedSize) { this.ExtractedSize = extractedSize; var transform = new NefsDataTransform(extractedSize); var chunk = new NefsDataChunk(extractedSize, extractedSize, transform); this.Chunks = new List <NefsDataChunk> { chunk }; }
/// <inheritdoc/> public async Task <NefsItemSize> TransformFileAsync( string inputFile, string outputFile, NefsDataTransform transform, NefsProgress p) { using (var inputStream = this.FileSystem.File.OpenRead(inputFile)) using (var outputStream = this.FileSystem.File.OpenWrite(outputFile)) { return(await this.TransformAsync(inputStream, 0, (uint)inputStream.Length, outputStream, 0, transform, p)); } }
public async Task DetransformAsync_ExtractedSizeSmallerThanTransformed_DataExtracted() { // There are situations in version 1.6 headers where the extracted size is smaller than // the compressed size, resulting in extra garbage data/padding at the end of an // extracted file. Need to make sure this extra garbage data is ignored. const string Data = "Hello there!"; var dataBytes = Encoding.ASCII.GetBytes(Data); var aesStr = "542E5211BD8A3AE494554DA4A18884B1C546258BCCA4B76D055D52602819525A"; var aes = StringHelper.FromHexString(aesStr); var chunkSize = 0x10000U; var transform = new NefsDataTransform(chunkSize, false, aes); var transformer = new NefsTransformer(this.fileSystem); using (var inputStream = new MemoryStream()) using (var transformedStream = new MemoryStream()) using (var outputStream = new MemoryStream()) { // Copy data to input stream inputStream.Write(dataBytes, 0, dataBytes.Length); // Add some garbage data to end of stream await transformedStream.WriteAsync(Encoding.ASCII.GetBytes("HAHAHAHAHA"), 0, 10); // Transform await transformer.TransformAsync(inputStream, 0, (uint)dataBytes.Length, transformedStream, 0, transform, new NefsProgress()); transformedStream.Seek(0, SeekOrigin.Begin); // Setup chunk info var extractedSize = Data.Length; var transformedSize = transformedStream.Length; var chunk = new NefsDataChunk((uint)transformedSize, (uint)transformedSize, transform); var chunks = new List <NefsDataChunk> { chunk }; // Extract await transformer.DetransformAsync(transformedStream, 0, outputStream, 0, (uint)extractedSize, chunks, new NefsProgress()); outputStream.Seek(0, SeekOrigin.Begin); var outputBytes = new byte[Data.Length]; await outputStream.ReadAsync(outputBytes, 0, (int)outputStream.Length); var outputStr = Encoding.ASCII.GetString(outputBytes); // Verify Assert.Equal(extractedSize, outputStream.Length); Assert.Equal(Data, outputStr); } }
/// <inheritdoc/> public NefsItem CreateItemInfo(Guid guid, NefsItemList dataSourceList) { var p1 = this.Part1.EntriesByGuid[guid]; var p2 = this.Part2.EntriesByIndex[(int)p1.IndexPart2]; var p6 = this.Part6.EntriesByGuid[guid]; var id = p1.Id; // Gather attributes var attributes = p6.CreateAttributes(); // Find parent var parentId = this.GetItemDirectoryId(p1.IndexPart2); // Offset and size var dataOffset = p1.Data0x00_OffsetToData.Value; var extractedSize = p2.Data0x0c_ExtractedSize.Value; // Transform var transform = new NefsDataTransform(this.TableOfContents.BlockSize, true, this.Intro.IsEncrypted ? this.Intro.GetAesKey() : null); // Data source INefsDataSource dataSource; if (attributes.IsDirectory) { // Item is a directory dataSource = new NefsEmptyDataSource(); transform = null; } else if (p1.IndexPart4 == 0xFFFFFFFFU) { // Item is not compressed var size = new NefsItemSize(extractedSize); dataSource = new NefsItemListDataSource(dataSourceList, dataOffset, size); } else { // Item is compressed var numChunks = this.TableOfContents.ComputeNumChunks(p2.ExtractedSize); var chunkSize = this.TableOfContents.BlockSize; var chunks = this.Part4.CreateChunksList(p1.IndexPart4, numChunks, chunkSize, this.Intro.GetAesKey()); var size = new NefsItemSize(extractedSize, chunks); dataSource = new NefsItemListDataSource(dataSourceList, dataOffset, size); } // File name and path var fileName = this.GetItemFileName(p1.IndexPart2); // Create item return(new NefsItem(p1.Guid, id, fileName, parentId, dataSource, transform, attributes)); }
/// <inheritdoc/> public async Task <NefsItemSize> TransformFileAsync( string inputFile, Int64 inputOffset, UInt32 inputLength, Stream output, Int64 outputOffset, NefsDataTransform transform, NefsProgress p) { using (var inputStream = this.FileSystem.File.OpenRead(inputFile)) { return(await this.TransformAsync(inputStream, inputOffset, inputLength, output, outputOffset, transform, p)); } }
/// <inheritdoc/> public async Task <NefsItemSize> TransformAsync( Stream input, Int64 inputOffset, UInt32 inputLength, Stream output, Int64 outputOffset, NefsDataTransform transform, NefsProgress p) { var chunks = new List <NefsDataChunk>(); var rawChunkSize = transform.ChunkSize; input.Seek(inputOffset, SeekOrigin.Begin); output.Seek(outputOffset, SeekOrigin.Begin); // Split file into chunks and transform them using (var t = p.BeginTask(1.0f, $"Transforming stream")) { var cumulativeChunkSize = 0U; var bytesRemaining = (int)inputLength; // Determine how many chunks to split file into var numChunks = (int)Math.Ceiling(inputLength / (double)rawChunkSize); for (var i = 0; i < numChunks; ++i) { using (var st = p.BeginSubTask(1.0f / numChunks, $"Transforming chunk {i + 1}/{numChunks}")) { // The last chunk may not be exactly equal to the raw chunk size var nextChunkSize = (int)Math.Min(rawChunkSize, bytesRemaining); bytesRemaining -= nextChunkSize; // Transform chunk and write to output stream var chunkSize = await this.TransformChunkAsync(input, (uint)nextChunkSize, output, transform, p); cumulativeChunkSize += chunkSize; // Record chunk info var chunk = new NefsDataChunk(chunkSize, cumulativeChunkSize, transform); chunks.Add(chunk); } } } // Return item size return(new NefsItemSize(inputLength, chunks)); }
/// <summary> /// Creates a test archive. Does not write an archive to disk. Just creates a <see /// cref="NefsArchive"/> object. /// </summary> /// <param name="filePath">The file path to use for the archive.</param> /// <returns>A <see cref="NefsArchive"/>.</returns> public static NefsArchive Create(string filePath) { var items = new NefsItemList(filePath); var aesString = "44927647059D3D73CDCC8D4C6E808538CAD7622D076A507E16C43A8DD8E3B5AB"; var file1Attributes = new NefsItemAttributes(v20IsZlib: true); var file1Chunks = NefsDataChunk.CreateChunkList(File1ChunkSizes, TestHelpers.TestTransform); var file1DataSource = new NefsItemListDataSource(items, File1Offset, new NefsItemSize(File1ExtractedSize, file1Chunks)); var file1 = new NefsItem(File1Guid, new NefsItemId(File1ItemId), File1Name, new NefsItemId(File1DirectoryId), file1DataSource, TestHelpers.TestTransform, file1Attributes); items.Add(file1); var dir1Attributes = new NefsItemAttributes(isDirectory: true); var dir1DataSource = new NefsEmptyDataSource(); var dir1 = new NefsItem(Dir1Guid, new NefsItemId(Dir1ItemId), Dir1Name, new NefsItemId(Dir1DirectoryId), dir1DataSource, null, dir1Attributes); items.Add(dir1); var file2Attributes = new NefsItemAttributes(v20IsZlib: true); var file2Chunks = NefsDataChunk.CreateChunkList(File2ChunkSizes, TestHelpers.TestTransform); var file2DataSource = new NefsItemListDataSource(items, File2Offset, new NefsItemSize(File2ExtractedSize, file2Chunks)); var file2 = new NefsItem(File2Guid, new NefsItemId(File2ItemId), File2Name, new NefsItemId(File2DirectoryId), file2DataSource, TestHelpers.TestTransform, file2Attributes); items.Add(file2); var file3Attributes = new NefsItemAttributes(v20IsZlib: true); var file3Transform = new NefsDataTransform(File3ExtractedSize); var file3Chunks = NefsDataChunk.CreateChunkList(File3ChunkSizes, file3Transform); var file3DataSource = new NefsItemListDataSource(items, File3Offset, new NefsItemSize(File3ExtractedSize, file3Chunks)); var file3 = new NefsItem(File3Guid, new NefsItemId(File3ItemId), File3Name, new NefsItemId(File3DirectoryId), file3DataSource, file3Transform, file3Attributes); items.Add(file3); Assert.Equal((int)NumItems, items.Count); var intro = new NefsHeaderIntro(); intro.Data0x6c_NumberOfItems.Value = (uint)items.Count; intro.Data0x24_AesKeyHexString.Value = Encoding.ASCII.GetBytes(aesString); var toc = new Nefs20HeaderIntroToc(); var header = new Nefs20Header(intro, toc, items); return(new NefsArchive(header, items)); }
/// <summary> /// Creates a list of chunk metadata for an item. /// </summary> /// <param name="index">The part 4 index where the chunk list starts at.</param> /// <param name="numChunks">The number of chunks.</param> /// <param name="chunkSize">The raw chunk size used in the transform.</param> /// <param name="aes256key">The AES 256 key to use if chunk is encrypted.</param> /// <returns>A list of chunk data.</returns> public List <NefsDataChunk> CreateChunksList(uint index, uint numChunks, uint chunkSize, byte[] aes256key) { var chunks = new List <NefsDataChunk>(); for (var i = index; i < index + numChunks; ++i) { var entry = this.entriesByIndex[(int)i]; var cumulativeSize = entry.CumulativeBlockSize; var size = cumulativeSize; if (i > index) { size -= this.entriesByIndex[(int)i - 1].CumulativeBlockSize; } // Determine transform -- need to clean this up NefsDataTransform transform; var transformVal = entry.Data0x04_TransformType.Value; switch (transformVal) { case (int)Nefs16HeaderPart4TransformType.Zlib: transform = new NefsDataTransform(chunkSize, true); break; case (int)Nefs16HeaderPart4TransformType.Aes: transform = new NefsDataTransform(chunkSize, false, aes256key); break; case (int)Nefs16HeaderPart4TransformType.None: transform = new NefsDataTransform(chunkSize, false); break; default: Log.LogError("Found v1.6 data chunk with unknown transform; aborting."); return(new List <NefsDataChunk>()); } // Create data chunk info var chunk = new NefsDataChunk(size, cumulativeSize, transform); chunks.Add(chunk); } return(chunks); }
public async Task DetransformFileAsync_NotEncrypted_DataDecompressed() { const string Data = @"Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted. Hello. This is the input data. It is not encrypted."; var sourceFilePath = @"C:\source.txt"; var compressedFilePath = @"C:\compressed.dat"; var destFilePath = @"C:\dest.txt"; var chunkSize = 0x10000U; var transform = new NefsDataTransform(chunkSize, true); this.fileSystem.AddFile(sourceFilePath, new MockFileData(Data)); // Compress the source data var transformer = new NefsTransformer(this.fileSystem); var size = await transformer.TransformFileAsync(sourceFilePath, compressedFilePath, transform, new NefsProgress()); // Decompress the data await transformer.DetransformFileAsync(compressedFilePath, 0, destFilePath, 0, (uint)Data.Length, size.Chunks, new NefsProgress()); // Verify var decompressedText = this.fileSystem.File.ReadAllText(destFilePath); Assert.Equal(Data, decompressedText); }
private Nefs16HeaderPart4TransformType GetTransformType(NefsDataTransform transform) { // Can v1.6 have both aes and zlib simulatneously? if (transform.IsAesEncrypted && transform.IsZlibCompressed) { Log.LogWarning("Found multiple data transforms for header part 4 entry."); } if (transform.IsAesEncrypted) { return(Nefs16HeaderPart4TransformType.Aes); } else if (transform.IsZlibCompressed) { return(Nefs16HeaderPart4TransformType.Zlib); } return(Nefs16HeaderPart4TransformType.None); }
/// <summary> /// Creates a list of chunk metadata for an item. /// </summary> /// <param name="index">The part 4 index where the chunk list starts at.</param> /// <param name="numChunks">The number of chunks.</param> /// <param name="transform">The transform used for data chunks.</param> /// <returns>A list of chunk data.</returns> public List <NefsDataChunk> CreateChunksList(uint index, uint numChunks, NefsDataTransform transform) { var chunks = new List <NefsDataChunk>(); for (var i = index; i < index + numChunks; ++i) { var cumulativeSize = this.entriesByIndex[(int)i].CumulativeChunkSize; var size = cumulativeSize; if (i > index) { size -= this.entriesByIndex[(int)i - 1].CumulativeChunkSize; } var chunk = new NefsDataChunk(size, cumulativeSize, transform); chunks.Add(chunk); } return(chunks); }
/// <summary> /// Initializes a new instance of the <see cref="NefsItem"/> class. /// </summary> /// <param name="guid">The unique identifier for this item.</param> /// <param name="id">The item id.</param> /// <param name="fileName">The file name within the archive.</param> /// <param name="directoryId">The directory id the item is in.</param> /// <param name="dataSource">The data source for the item's data.</param> /// <param name="transform"> /// The transform that is applied to this item's data. Can be null if no transform. /// </param> /// <param name="attributes">Additional attributes.</param> /// <param name="state">The item state.</param> public NefsItem( Guid guid, NefsItemId id, string fileName, NefsItemId directoryId, INefsDataSource dataSource, NefsDataTransform transform, NefsItemAttributes attributes, NefsItemState state = NefsItemState.None) { this.Guid = guid; this.Id = id; this.DirectoryId = directoryId; this.DataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource)); this.State = state; this.Transform = transform; this.Attributes = attributes; // Save file name this.FileName = fileName ?? throw new ArgumentNullException(nameof(fileName)); }
/// <summary> /// Creates a <see cref="NefsArchive"/> to be used for testing. /// </summary> /// <param name="filePath">The file path to associate with the archive.</param> /// <returns>An archive object.</returns> /// <remarks><![CDATA[ Test archive items: /file1 /dir1 /dir1/file2 ]]></remarks> internal static NefsArchive CreateTestArchive(string filePath) { var items = new NefsItemList(filePath); var transform = new NefsDataTransform(50, true); var file1Attributes = new NefsItemAttributes(v20IsZlib: true); var file1Chunks = NefsDataChunk.CreateChunkList(new List <UInt32> { 2, 3, 4 }, transform); var file1DataSource = new NefsItemListDataSource(items, 100, new NefsItemSize(20, file1Chunks)); var file1 = new NefsItem(Guid.NewGuid(), new NefsItemId(0), "file1", new NefsItemId(0), file1DataSource, transform, file1Attributes); items.Add(file1); var dir1Attributes = new NefsItemAttributes(isDirectory: true); var dir1DataSource = new NefsEmptyDataSource(); var dir1 = new NefsItem(Guid.NewGuid(), new NefsItemId(1), "dir1", new NefsItemId(1), dir1DataSource, null, dir1Attributes); items.Add(dir1); var file2Attributes = new NefsItemAttributes(v20IsZlib: true); var file2Chunks = NefsDataChunk.CreateChunkList(new List <UInt32> { 5, 6, 7 }, transform); var file2DataSource = new NefsItemListDataSource(items, 104, new NefsItemSize(15, file2Chunks)); var file2 = new NefsItem(Guid.NewGuid(), new NefsItemId(2), "file2", dir1.Id, file2DataSource, transform, file2Attributes); items.Add(file2); var intro = new NefsHeaderIntro(); var toc = new Nefs20HeaderIntroToc(); var header = new Nefs20Header(intro, toc, items); return(new NefsArchive(header, items)); }
public async Task TransformAsync_VariousData_DataCompressed(CompressAsyncTestData test) { var input = Encoding.ASCII.GetBytes(InputDataString); using (var inputStream = new MemoryStream(input)) using (var outputStream = new MemoryStream()) { var transformer = new NefsTransformer(this.fileSystem); var transform = new NefsDataTransform(test.ChunkSize, true); var size = await transformer.TransformAsync(inputStream, test.Offset, test.Length, outputStream, 0, transform, new NefsProgress()); // Read data from output stream var resultData = new byte[outputStream.Length]; outputStream.Seek(0, SeekOrigin.Begin); await outputStream.ReadAsync(resultData, 0, (int)outputStream.Length); // Verify Assert.Equal(test.Length, size.ExtractedSize); Assert.Equal(test.ExpectedChunks.Count, size.Chunks.Count); Assert.True(test.ExpectedChunks.SequenceEqual(size.Chunks.Select(c => c.CumulativeSize))); Assert.Equal(test.ExpectedData.Length, resultData.Length); Assert.True(test.ExpectedData.SequenceEqual(resultData)); } }