public async Task Decompress(CompressionType type) { string testFilePath = CreateCompressedFile(type); int _bufferSize = 1024; var bytes = new byte[_bufferSize]; using (MemoryStream gzStream = await LocalMemoryStream.readAppFileAsync(testFilePath)) using (MemoryStream strippedMs = StripHeaderAndFooter.Strip(gzStream)) foreach (var iteration in Benchmark.Iterations) { using (iteration.StartMeasurement()) for (int i = 0; i < Benchmark.InnerIterationCount; i++) { int retCount = -1; using (DeflateStream zip = new DeflateStream(strippedMs, CompressionMode.Decompress, leaveOpen: true)) { while (retCount != 0) { retCount = zip.Read(bytes, 0, _bufferSize); } } strippedMs.Seek(0, SeekOrigin.Begin); } } File.Delete(testFilePath); }
public static async Task ZipArchiveEntry_CorruptedStream_ReadMode_Read_UpToUncompressedSize() { MemoryStream stream = await LocalMemoryStream.readAppFileAsync(zfile("normal.zip")); int nameOffset = PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 8); // patch uncompressed size in file header PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 22, nameOffset + s_tamperedFileName.Length); // patch in central directory too using (ZipArchive archive = new ZipArchive(stream, ZipArchiveMode.Read)) { ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); using (MemoryStream ms = new MemoryStream()) using (Stream source = e.Open()) { byte[] buffer = new byte[s_bufferSize]; int read; while ((read = source.Read(buffer, 0, buffer.Length)) != 0) { ms.Write(buffer, 0, read); } Assert.Equal(e.Length, ms.Length); // Only allow to decompress up to uncompressed size Assert.Equal(0, source.Read(buffer, 0, s_bufferSize)); // shouldn't be able read more ms.Seek(0, SeekOrigin.Begin); while ((read = ms.Read(buffer, 0, buffer.Length)) != 0) { // No need to do anything, just making sure all bytes readable from output stream } Assert.Equal(ms.Position, ms.Length); // all bytes must be read } } }
public async Task OverlappingFlushAsync_DuringWriteAsync() { byte[] buffer = null; string testFilePath = gzTestFile("GZTestDocument.pdf"); using (var origStream = await LocalMemoryStream.readAppFileAsync(testFilePath)) { buffer = origStream.ToArray(); } using (var writeStream = new ManualSyncMemoryStream(false)) using (var zip = CreateStream(writeStream, CompressionMode.Compress)) { Task task = null; try { task = WriteAsync(zip, buffer, 0, buffer.Length); Assert.True(writeStream.WriteHit); Assert.Throws <InvalidOperationException>(() => { zip.FlushAsync(); }); // "overlapping flushes" } finally { // Unblock Async operations writeStream.manualResetEvent.Set(); // The original WriteAsync should be able to complete Assert.True(task.Wait(100 * 500)); } } }
public static async Task Zip64ArchiveEntry_CorruptedStream_CopyTo_UpToUncompressedSize() { MemoryStream stream = await LocalMemoryStream.readAppFileAsync(compat("deflate64.zip")); int nameOffset = PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 8); // patch uncompressed size in file header PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 22, nameOffset + s_tamperedFileName.Length); // patch in central directory too using (ZipArchive archive = new ZipArchive(stream, ZipArchiveMode.Read)) { ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); using (var ms = new MemoryStream()) using (Stream source = e.Open()) { source.CopyTo(ms); Assert.Equal(e.Length, ms.Length); // Only allow to decompress up to uncompressed size ms.Seek(0, SeekOrigin.Begin); int read; byte[] buffer = new byte[s_bufferSize]; while ((read = ms.Read(buffer, 0, buffer.Length)) != 0) { // No need to do anything, just making sure all bytes readable } Assert.Equal(ms.Position, ms.Length); // all bytes must be read } } }
public static async Task Zip64ArchiveEntry_CorruptedFile_Read_UpToUncompressedSize() { MemoryStream stream = await LocalMemoryStream.readAppFileAsync(compat("deflate64.zip")); int nameOffset = PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 8); // patch uncompressed size in file header PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 22, nameOffset + s_tamperedFileName.Length); // patch in central directory too using (ZipArchive archive = new ZipArchive(stream, ZipArchiveMode.Read)) { ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); using (var ms = new MemoryStream()) using (Stream source = e.Open()) { byte[] buffer = new byte[s_bufferSize]; int read; while ((read = source.Read(buffer, 0, buffer.Length)) != 0) { ms.Write(buffer, 0, read); } Assert.Equal(e.Length, ms.Length); // Only allow to decompress up to uncompressed size Assert.Equal(0, source.Read(buffer, 0, buffer.Length)); // Shouldn't be readable more } } }
public async Task CompressionLevel_SizeInOrder(string testFile) { using var uncompressedStream = await LocalMemoryStream.readAppFileAsync(testFile); async Task <long> GetLengthAsync(CompressionLevel compressionLevel) { using var mms = new MemoryStream(); using var compressor = CreateStream(mms, compressionLevel); await uncompressedStream.CopyToAsync(compressor); compressor.Flush(); return(mms.Length); } long noCompressionLength = await GetLengthAsync(CompressionLevel.NoCompression); long fastestLength = await GetLengthAsync(CompressionLevel.Fastest); long optimalLength = await GetLengthAsync(CompressionLevel.Optimal); long smallestLength = await GetLengthAsync(CompressionLevel.SmallestSize); Assert.True(noCompressionLength >= fastestLength); Assert.True(fastestLength >= optimalLength); Assert.True(optimalLength >= smallestLength); }
public static async Task LargeArchive_DataDescriptor_Read_NonZip64_FileLengthGreaterThanIntMax() { MemoryStream stream = await LocalMemoryStream.readAppFileAsync(strange("fileLengthGreaterIntLessUInt.zip")); using (ZipArchive archive = new ZipArchive(stream, ZipArchiveMode.Read)) { ZipArchiveEntry e = archive.GetEntry("large.bin"); Assert.Equal(3_600_000_000, e.Length); Assert.Equal(3_499_028, e.CompressedLength); using (Stream source = e.Open()) { byte[] buffer = new byte[s_bufferSize]; int read = source.Read(buffer, 0, buffer.Length); // We don't want to inflate this large archive entirely // just making sure it read successfully Assert.Equal(s_bufferSize, read); foreach (byte b in buffer) { if (b != '0') { Assert.True(false, $"The file should be all '0's, but found '{(char)b}'"); } } } } }
public async Task DecompressWorksWithPdf() { var compareStream = await LocalMemoryStream.readAppFileAsync(gzTestFile("GZTestDocument.pdf")); var gzStream = await LocalMemoryStream.readAppFileAsync(gzTestFile("GZTestDocument.pdf.gz")); await DecompressAsync(compareStream, gzStream); }
public static async Task DecompressWorksWithBinaryFile() { var compareStream = await LocalMemoryStream.readAppFileAsync(gzTestFile("GZTestDocument.doc")); var gzStream = await LocalMemoryStream.readAppFileAsync(gzTestFile("GZTestDocument.doc.gz")); await DecompressAsync(compareStream, gzStream); }
public async Task ModifyBaseStream() { var ms = await LocalMemoryStream.readAppFileAsync(gzTestFile("GZTestDocument.txt.gz")); var zip = new GZipStream(ms, CompressionMode.Decompress); int size = 1024; Byte[] bytes = new Byte[size]; zip.BaseStream.Read(bytes, 0, size); // This will throw if the underlying stream is not writeable as expected }
public static async Task ZipArchive_CorruptedLocalHeader_CompressedSize_NotMatchWithCentralDirectory() { MemoryStream stream = await LocalMemoryStream.readAppFileAsync(zfile("normal.zip")); PatchDataRelativeToFileName(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 12); // patch compressed size in file header using (ZipArchive archive = new ZipArchive(stream, ZipArchiveMode.Read)) { ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); Assert.Throws <InvalidDataException>(() => e.Open()); } }
public async Task DecompressFailsWithRealGzStream(string uncompressedPath) { string fileName = Path.Combine("GZipTestData", Path.GetFileName(uncompressedPath) + ".gz"); var baseStream = await LocalMemoryStream.readAppFileAsync(fileName); var zip = CreateStream(baseStream, CompressionMode.Decompress); int _bufferSize = 2048; var bytes = new byte[_bufferSize]; Assert.Throws <InvalidDataException>(() => { zip.Read(bytes, 0, _bufferSize); }); zip.Dispose(); }
public async Task DecompressFailsWithWrapperStream(string uncompressedPath, string newDirectory, string newSuffix) { string fileName = Path.Combine(newDirectory, Path.GetFileName(uncompressedPath) + newSuffix); using (LocalMemoryStream baseStream = await LocalMemoryStream.readAppFileAsync(fileName)) using (Stream cs = CreateStream(baseStream, CompressionMode.Decompress)) { int _bufferSize = 2048; var bytes = new byte[_bufferSize]; Assert.Throws <InvalidDataException>(() => { cs.Read(bytes, 0, _bufferSize); }); } }
public async Task DecompressFailsWithRealGzStream() { string[] files = { gzTestFile("GZTestDocument.doc.gz"), gzTestFile("GZTestDocument.txt.gz") }; foreach (string fileName in files) { var baseStream = await LocalMemoryStream.readAppFileAsync(fileName); var zip = new DeflateStream(baseStream, CompressionMode.Decompress); int _bufferSize = 2048; var bytes = new byte[_bufferSize]; Assert.Throws <InvalidDataException>(() => { zip.Read(bytes, 0, _bufferSize); }); zip.Dispose(); } }
public async Task CanReadBaseStreamAfterDispose() { var ms = await LocalMemoryStream.readAppFileAsync(gzTestFile("GZTestDocument.txt.gz")); var zip = new GZipStream(ms, CompressionMode.Decompress, leaveOpen: true); var baseStream = zip.BaseStream; zip.Dispose(); int size = 1024; byte[] bytes = new byte[size]; baseStream.Read(bytes, 0, size); // This will throw if the underlying stream is not writable as expected }
public async Task ModifyBaseStream() { var ms = await LocalMemoryStream.readAppFileAsync(gzTestFile("GZTestDocument.txt.gz")); var newMs = StripHeaderAndFooter.Strip(ms); var zip = new DeflateStream(newMs, CompressionMode.Decompress); int size = 1024; byte[] bytes = new byte[size]; zip.BaseStream.Read(bytes, 0, size); // This will throw if the underlying stream is not writable as expected zip.BaseStream.Position = 0; await zip.BaseStream.ReadAsync(bytes, 0, size); }
public async Task TestLeaveOpenAfterValidDecompress() { //Create the Stream int _bufferSize = 1024; var bytes = new byte[_bufferSize]; Stream compressedStream = await LocalMemoryStream.readAppFileAsync(CompressedTestFile(UncompressedTestFile())); Stream decompressor = CreateStream(compressedStream, CompressionMode.Decompress, leaveOpen: false); //Read some data and Close the stream decompressor.Read(bytes, 0, _bufferSize); decompressor.Flush(); decompressor.Dispose(); //Check that Close has really closed the underlying stream Assert.Throws <ObjectDisposedException>(() => compressedStream.Read(bytes, 0, bytes.Length)); }
public async Task BaseStream_Modify(CompressionMode mode) { using (var baseStream = await LocalMemoryStream.readAppFileAsync(CompressedTestFile(UncompressedTestFile()))) using (var compressor = CreateStream(baseStream, mode)) { int size = 1024; byte[] bytes = new byte[size]; if (mode == CompressionMode.Compress) { BaseStream(compressor).Write(bytes, 0, size); // This will throw if the underlying stream is not writable as expected } else { BaseStream(compressor).Read(bytes, 0, size); // This will throw if the underlying stream is not readable as expected } } }
public async Task CanReadBaseStreamAfterDispose() { var ms = await LocalMemoryStream.readAppFileAsync(gzTestFile("GZTestDocument.txt.gz")); var newMs = StripHeaderAndFooter.Strip(ms); var zip = new DeflateStream(newMs, CompressionMode.Decompress, true); var baseStream = zip.BaseStream; zip.Dispose(); int size = 1024; Byte[] bytes = new Byte[size]; baseStream.Read(bytes, 0, size); // This will throw if the underlying stream is not writeable as expected baseStream.Position = 0; await baseStream.ReadAsync(bytes, 0, size); }
public static async Task UnseekableVeryLargeArchive_DataDescriptor_Read_Zip64() { MemoryStream stream = await LocalMemoryStream.readAppFileAsync(strange("veryLarge.zip")); using (ZipArchive archive = new ZipArchive(stream, ZipArchiveMode.Read)) { ZipArchiveEntry e = archive.GetEntry("bigFile.bin"); Assert.Equal(6_442_450_944, e.Length); Assert.Equal(6_261_752, e.CompressedLength); using (Stream source = e.Open()) { byte[] buffer = new byte[s_bufferSize]; int read = source.Read(buffer, 0, buffer.Length); // We don't want to inflate this large archive entirely // just making sure it read successfully Assert.Equal(s_bufferSize, read); } } }
public static async Task ZipArchiveEntry_CorruptedStream_UnCompressedSizeBiggerThanExpected_NothingShouldBreak() { MemoryStream stream = await LocalMemoryStream.readAppFileAsync(zfile("normal.zip")); int nameOffset = PatchDataRelativeToFileNameFillBytes(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 8); // patch uncompressed size in file header PatchDataRelativeToFileNameFillBytes(Encoding.ASCII.GetBytes(s_tamperedFileName), stream, 22, nameOffset + s_tamperedFileName.Length); // patch in central directory too using (ZipArchive archive = new ZipArchive(stream, ZipArchiveMode.Read)) { ZipArchiveEntry e = archive.GetEntry(s_tamperedFileName); using (MemoryStream ms = new MemoryStream()) using (Stream source = e.Open()) { source.CopyTo(ms); Assert.True(e.Length > ms.Length); // Even uncompressed size is bigger than decompressed size there should be no error Assert.True(e.CompressedLength < ms.Length); } } }
public async Task Read_BaseStreamSlowly() { string testFile = UncompressedTestFile(); var uncompressedStream = await LocalMemoryStream.readAppFileAsync(testFile); var compressedStream = new BadWrappedStream(BadWrappedStream.Mode.ReadSlowly, File.ReadAllBytes(CompressedTestFile(testFile))); using var decompressor = CreateStream(compressedStream, CompressionMode.Decompress); var decompressorOutput = new MemoryStream(); int _bufferSize = 1024; var bytes = new byte[_bufferSize]; bool finished = false; int retCount; while (!finished) { retCount = await decompressor.ReadAsync(bytes, 0, _bufferSize); if (retCount != 0) { await decompressorOutput.WriteAsync(bytes, 0, retCount); } else { finished = true; } } decompressor.Dispose(); decompressorOutput.Position = 0; uncompressedStream.Position = 0; byte[] uncompressedStreamBytes = uncompressedStream.ToArray(); byte[] decompressorOutputBytes = decompressorOutput.ToArray(); Assert.Equal(uncompressedStreamBytes.Length, decompressorOutputBytes.Length); for (int i = 0; i < uncompressedStreamBytes.Length; i++) { Assert.Equal(uncompressedStreamBytes[i], decompressorOutputBytes[i]); } }
public async Task FlushAsync_DuringFlushAsync() { byte[] buffer = null; string testFilePath = CompressedTestFile(UncompressedTestFile()); using (var origStream = await LocalMemoryStream.readAppFileAsync(testFilePath)) { buffer = origStream.ToArray(); } using (var writeStream = new ManualSyncMemoryStream(false)) using (var zip = CreateStream(writeStream, CompressionMode.Compress)) { Task task = null; try { writeStream.manualResetEvent.Set(); await zip.WriteAsync(buffer, 0, buffer.Length); writeStream.manualResetEvent.Reset(); writeStream.WriteHit = false; task = zip.FlushAsync(); while (!writeStream.WriteHit && task.IsCompleted) { zip.Write(buffer, 0, 1); task = zip.FlushAsync(); } Assert.Throws <InvalidOperationException>(() => { zip.FlushAsync(); }); // "overlapping flushes" } finally { // Unblock Async operations writeStream.manualResetEvent.Set(); // The original WriteAsync should be able to complete Assert.True(task.Wait(TaskTimeout), "Original write Task did not complete in time"); Assert.True(writeStream.WriteHit, "Underlying Writesync function was not called."); } } }
public async Task Read_EndOfStreamPosition() { var compressedStream = await LocalMemoryStream.readAppFileAsync(CompressedTestFile(UncompressedTestFile())); int compressedEndPosition = (int)compressedStream.Length; var rand = new Random(1024); int _bufferSize = BufferSize * 2 - 568; var bytes = new byte[_bufferSize]; rand.NextBytes(bytes); compressedStream.Position = compressedEndPosition; compressedStream.Write(bytes, 0, _bufferSize); compressedStream.Write(bytes, 0, _bufferSize); compressedStream.Position = 0; var decompressor = CreateStream(compressedStream, CompressionMode.Decompress); while (decompressor.Read(bytes, 0, _bufferSize) > 0) { ; } Assert.Equal(((compressedEndPosition / BufferSize) + 1) * BufferSize, compressedStream.Position); }
public async Task BaseStream_ValidAfterDisposeWithTrueLeaveOpen(CompressionMode mode) { var ms = await LocalMemoryStream.readAppFileAsync(CompressedTestFile(UncompressedTestFile())); var decompressor = CreateStream(ms, mode, leaveOpen: true); var baseStream = BaseStream(decompressor); Assert.Same(ms, baseStream); decompressor.Dispose(); int size = 1024; byte[] bytes = new byte[size]; if (mode == CompressionMode.Compress) { baseStream.Write(bytes, 0, size); } else { baseStream.Read(bytes, 0, size); } }
public static async Task UpdateModeInvalidOperations() { using (LocalMemoryStream ms = await LocalMemoryStream.readAppFileAsync(zfile("normal.zip"))) { ZipArchive target = new ZipArchive(ms, ZipArchiveMode.Update, leaveOpen: true); ZipArchiveEntry edeleted = target.GetEntry("first.txt"); Stream s = edeleted.Open(); //invalid ops while entry open Assert.Throws <IOException>(() => edeleted.Open()); Assert.Throws <InvalidOperationException>(() => { var x = edeleted.Length; }); Assert.Throws <InvalidOperationException>(() => { var x = edeleted.CompressedLength; }); Assert.Throws <IOException>(() => edeleted.Delete()); s.Dispose(); //invalid ops on stream after entry closed Assert.Throws <ObjectDisposedException>(() => s.ReadByte()); Assert.Throws <InvalidOperationException>(() => { var x = edeleted.Length; }); Assert.Throws <InvalidOperationException>(() => { var x = edeleted.CompressedLength; }); edeleted.Delete(); //invalid ops while entry deleted Assert.Throws <InvalidOperationException>(() => edeleted.Open()); Assert.Throws <InvalidOperationException>(() => { edeleted.LastWriteTime = new DateTimeOffset(); }); ZipArchiveEntry e = target.GetEntry("notempty/second.txt"); target.Dispose(); Assert.Throws <ObjectDisposedException>(() => { var x = target.Entries; }); Assert.Throws <ObjectDisposedException>(() => target.CreateEntry("dirka")); Assert.Throws <ObjectDisposedException>(() => e.Open()); Assert.Throws <ObjectDisposedException>(() => e.Delete()); Assert.Throws <ObjectDisposedException>(() => { e.LastWriteTime = new DateTimeOffset(); }); } }
private static async Task <MemoryStream> populateStream() { return(await LocalMemoryStream.readAppFileAsync(zfile("normal.zip"))); }