public void CompressAndDecompress_worksCorrectly_advanced([Values(false, true)] bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; byte[] compressed1, compressed2; using (var options = new CompressionOptions(dict, new Dictionary <ZSTD_cParameter, int> { { ZSTD_cParameter.ZSTD_c_checksumFlag, 0 } })) using (var compressor = new Compressor(options)) compressed1 = compressor.Wrap(data); using (var options = new CompressionOptions(dict, new Dictionary <ZSTD_cParameter, int> { { ZSTD_cParameter.ZSTD_c_checksumFlag, 1 } })) using (var compressor = new Compressor(options)) compressed2 = compressor.Wrap(data); Assert.AreEqual(compressed1.Length + 4, compressed2.Length); using (var options = new DecompressionOptions(dict, new Dictionary <ZSTD_dParameter, int>())) using (var decompressor = new Decompressor(options)) { CollectionAssert.AreEqual(data, decompressor.Unwrap(compressed1)); CollectionAssert.AreEqual(data, decompressor.Unwrap(compressed2)); } }
public void Decompress_throwsZstdException_onMalformedDecompressedSize([Values(false, true)] bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; byte[] compressed; using (var options = new CompressionOptions(dict)) using (var compressor = new Compressor(options)) compressed = compressor.Wrap(data); var frameHeader = compressed[4]; // Ensure that we malform decompressed size in the right place if (useDictionary) { Assert.AreEqual(frameHeader, 0x63); compressed[9]--; } else { Assert.AreEqual(frameHeader, 0x60); compressed[5]--; } // Thus, ZSTD_getDecompressedSize will return size that is one byte lesser than actual using (var options = new DecompressionOptions(dict)) using (var decompressor = new Decompressor(options)) Assert.Throws <ZstdException>(() => decompressor.Unwrap(compressed)); }
public void CompressAndDecompress_workCorrectly_spans([Values(false, true)] bool useDictionary) { var buffer = GenerateSample(); var data = new ReadOnlySpan <byte>(buffer, 1, buffer.Length - 1); var dict = useDictionary ? BuildDictionary() : null; Span <byte> compressed = stackalloc byte[Compressor.GetCompressBound(data.Length)]; using (var options = new CompressionOptions(dict)) using (var compressor = new Compressor(options)) { var size = compressor.Wrap(data, compressed); compressed = compressed.Slice(0, size); } Span <byte> decompressed = stackalloc byte[data.Length + 1]; using (var options = new DecompressionOptions(dict)) using (var decompressor = new Decompressor(options)) { var size = decompressor.Unwrap(compressed, decompressed); Assert.AreEqual(data.Length, size); decompressed = decompressed.Slice(0, size); } CollectionAssert.AreEqual(data.ToArray(), decompressed.ToArray()); }
public void RoundTrip_StreamingToStreaming_Stress([Values(true, false)] bool useDict, [Values(true, false)] bool async) { long i = 0; var dict = useDict ? TrainDict() : null; var compressionOptions = new CompressionOptions(dict); var decompressionOptions = new DecompressionOptions(dict); Enumerable.Range(0, 10000) .AsParallel() .WithDegreeOfParallelism(Environment.ProcessorCount * 4) .ForAll(n => { var testStream = DataGenerator.GetSmallStream(DataFill.Sequential); var cBuffer = new byte[1 + (int)(n % (testStream.Length * 11))]; var dBuffer = new byte[1 + (int)(n % (testStream.Length * 13))]; var tempStream = new MemoryStream(); using (var compressionStream = new CompressionStream(tempStream, compressionOptions, 1 + (int)(n % (testStream.Length * 17)))) { int bytesRead; int offset = n % cBuffer.Length; while ((bytesRead = testStream.Read(cBuffer, offset, cBuffer.Length - offset)) > 0) { if (async) { compressionStream.WriteAsync(cBuffer, offset, bytesRead).GetAwaiter().GetResult(); } else { compressionStream.Write(cBuffer, offset, bytesRead); } if (Interlocked.Increment(ref i) % 100 == 0) { GC.Collect(GC.MaxGeneration, GCCollectionMode.Forced, true, true); } } } tempStream.Seek(0, SeekOrigin.Begin); var resultStream = new MemoryStream(); using (var decompressionStream = new DecompressionStream(tempStream, decompressionOptions, 1 + (int)(n % (testStream.Length * 19)))) { int bytesRead; int offset = n % dBuffer.Length; while ((bytesRead = async ? decompressionStream.ReadAsync(dBuffer, offset, dBuffer.Length - offset).GetAwaiter().GetResult() : decompressionStream.Read(dBuffer, offset, dBuffer.Length - offset)) > 0) { resultStream.Write(dBuffer, offset, bytesRead); if (Interlocked.Increment(ref i) % 100 == 0) { GC.Collect(GC.MaxGeneration, GCCollectionMode.Forced, true, true); } } } Assert.AreEqual(testStream.ToArray(), resultStream.ToArray()); }); GC.KeepAlive(compressionOptions); GC.KeepAlive(decompressionOptions); }
public ZStandardCompressor(CompressionOptions compressionOptions = null, DecompressionOptions decompressionOptions = null) { Compressor = compressionOptions != null ? new ZstdNet.Compressor(compressionOptions) : new ZstdNet.Compressor(); Decompressor = decompressionOptions != null ? new Decompressor(decompressionOptions) : new Decompressor(); }
public void Decompress_throwsZstdException_onInvalidData([Values(false, true)] bool useDictionary) { var data = GenerateSample(); // This isn't data in compressed format var dict = useDictionary ? BuildDictionary() : null; using (var options = new DecompressionOptions(dict)) using (var decompressor = new Decompressor(options)) Assert.Throws <ZstdException>(() => decompressor.Unwrap(data)); }
public void Decompress_throwsArgumentOutOfRangeException_onTooBigData([Values(false, true)] bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; byte[] compressed; using (var options = new CompressionOptions(dict)) using (var compressor = new Compressor(options)) compressed = compressor.Wrap(data); using (var options = new DecompressionOptions(dict)) using (var decompressor = new Decompressor(options)) Assert.Throws <ArgumentOutOfRangeException>(() => decompressor.Unwrap(compressed, 20)); }
private static byte[] CompressAndDecompress(byte[] data, byte[] dict, int compressionLevel = CompressionOptions.DefaultCompressionLevel) { byte[] compressed; using (var options = new CompressionOptions(dict, compressionLevel)) using (var compressor = new Compressor(options)) compressed = compressor.Wrap(data); byte[] decompressed; using (var options = new DecompressionOptions(dict)) using (var decompressor = new Decompressor(options)) decompressed = decompressor.Unwrap(compressed); return(decompressed); }
public void DecompressWithAnotherDictionary_throwsZstdException() { var data = GenerateSample(); var oldDict = BuildDictionary(); byte[] compressed; using (var options = new CompressionOptions(oldDict)) using (var compressor = new Compressor(options)) compressed = compressor.Wrap(data); var newDict = Encoding.ASCII.GetBytes("zstd supports raw-content dictionaries"); using (var options = new DecompressionOptions(newDict)) using (var decompressor = new Decompressor(options)) Assert.Throws <ZstdException>(() => decompressor.Unwrap(compressed)); }
public void CompressAndDecompress_workCorrectly_onOneByteBuffer([Values(false, true)] bool useDictionary) { var data = new byte[] { 42 }; var dict = useDictionary ? BuildDictionary() : null; byte[] compressed; using (var options = new CompressionOptions(dict)) using (var compressor = new Compressor(options)) compressed = compressor.Wrap(data); byte[] decompressed; using (var options = new DecompressionOptions(dict)) using (var decompressor = new Decompressor(options)) decompressed = decompressor.Unwrap(compressed); CollectionAssert.AreEqual(data, decompressed); }
public void Decompress_throwsInsufficientMemoryException_whenDestinationBufferIsTooSmall([Values(false, true)] bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; byte[] compressed; using (var options = new CompressionOptions(dict)) using (var compressor = new Compressor(options)) compressed = compressor.Wrap(data); var decompressed = new byte[20]; const int offset = 4; using (var options = new DecompressionOptions(dict)) using (var decompressor = new Decompressor(options)) Assert.Throws <InsufficientMemoryException>(() => decompressor.Unwrap(compressed, decompressed, offset)); }
public void Decompress_throwsArgumentOutOfRangeException_onTooBigData([Values(false, true)] bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; byte[] compressed; using (var options = new CompressionOptions(dict)) using (var compressor = new Compressor(options)) compressed = compressor.Wrap(data); using (var options = new DecompressionOptions(dict)) using (var decompressor = new Decompressor(options)) { var ex = Assert.Throws <ZstdException>(() => decompressor.Unwrap(compressed, 20)); Assert.AreEqual(ZSTD_ErrorCode.ZSTD_error_dstSize_tooSmall, ex.Code); } }
public void DecompressWithDictionary_worksCorrectly_onDataCompressedWithoutIt() { var data = GenerateSample(); byte[] compressed; using (var compressor = new Compressor()) compressed = compressor.Wrap(data); var dict = BuildDictionary(); byte[] decompressed; using (var options = new DecompressionOptions(dict)) using (var decompressor = new Decompressor(options)) decompressed = decompressor.Unwrap(compressed); CollectionAssert.AreEqual(data, decompressed); }
public void Compress_canRead_fromArraySegment([Values(false, true)] bool useDictionary) { var data = GenerateSample(); var segment = new ArraySegment <byte>(data, 2, data.Length - 5); var dict = useDictionary ? BuildDictionary() : null; byte[] compressed; using (var options = new CompressionOptions(dict)) using (var compressor = new Compressor(options)) compressed = compressor.Wrap(segment); byte[] decompressed; using (var options = new DecompressionOptions(dict)) using (var decompressor = new Decompressor(options)) decompressed = decompressor.Unwrap(compressed); CollectionAssert.AreEqual(segment, decompressed); }
public void CompressAndDecompress_workCorrectly([Values(false, true)] bool useDictionary, [Values(false, true)] bool bestCompression) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; var compressionLevel = bestCompression ? CompressionOptions.MaxCompressionLevel : CompressionOptions.DefaultCompressionLevel; byte[] compressed; using (var options = new CompressionOptions(dict, compressionLevel)) using (var compressor = new Compressor(options)) compressed = compressor.Wrap(data); byte[] decompressed; using (var options = new DecompressionOptions(dict)) using (var decompressor = new Decompressor(options)) decompressed = decompressor.Unwrap(compressed); CollectionAssert.AreEqual(data, decompressed); }
public async Task RoundTrip_StreamingToStreamingAsync( [Values(false, true)] bool useDict, [Values(false, true)] bool advanced, [Values(1, 2, 7, 101, 1024, 65535, DataGenerator.LargeBufferSize, DataGenerator.LargeBufferSize + 1)] int zstdBufferSize, [Values(1, 2, 7, 101, 1024, 65535, DataGenerator.LargeBufferSize, DataGenerator.LargeBufferSize + 1)] int copyBufferSize) { var dict = useDict ? TrainDict() : null; var testStream = DataGenerator.GetLargeStream(DataFill.Sequential); const int offset = 1; var buffer = new byte[copyBufferSize + offset + 1]; var tempStream = new MemoryStream(); using (var compressionOptions = new CompressionOptions(dict, advanced ? new Dictionary <ZSTD_cParameter, int> { { ZSTD_cParameter.ZSTD_c_windowLog, 11 }, { ZSTD_cParameter.ZSTD_c_checksumFlag, 1 }, { ZSTD_cParameter.ZSTD_c_nbWorkers, 4 } } : null)) await using (var compressionStream = new CompressionStream(tempStream, compressionOptions, zstdBufferSize)) { int bytesRead; while ((bytesRead = await testStream.ReadAsync(buffer, offset, copyBufferSize)) > 0) { await compressionStream.WriteAsync(buffer, offset, bytesRead); } } tempStream.Seek(0, SeekOrigin.Begin); var resultStream = new MemoryStream(); using (var decompressionOptions = new DecompressionOptions(dict, advanced ? new Dictionary <ZSTD_dParameter, int> { { ZSTD_dParameter.ZSTD_d_windowLogMax, 11 } } : null)) await using (var decompressionStream = new DecompressionStream(tempStream, decompressionOptions, zstdBufferSize)) { int bytesRead; while ((bytesRead = await decompressionStream.ReadAsync(buffer, offset, copyBufferSize)) > 0) { await resultStream.WriteAsync(buffer, offset, bytesRead); } } Assert.AreEqual(testStream.ToArray(), resultStream.ToArray()); }
public void CompressAndDecompress_workCorrectly_onArraysOfDifferentSizes([Values(false, true)] bool useDictionary) { var dict = useDictionary ? BuildDictionary() : null; using (var compressionOptions = new CompressionOptions(dict)) using (var decompressionOptions = new DecompressionOptions(dict)) using (var compressor = new Compressor(compressionOptions)) using (var decompressor = new Decompressor(decompressionOptions)) { for (var i = 2; i < 100000; i += 3000) { var data = GenerateBuffer(i); var decompressed = decompressor.Unwrap(compressor.Wrap(data)); CollectionAssert.AreEqual(data, decompressed); } } }
public void Decompress_throwsDstSizeTooSmall_whenDestinationBufferIsTooSmall([Values(false, true)] bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; byte[] compressed; using (var options = new CompressionOptions(dict)) using (var compressor = new Compressor(options)) compressed = compressor.Wrap(data); var decompressed = new byte[20]; const int offset = 4; using (var options = new DecompressionOptions(dict)) using (var decompressor = new Decompressor(options)) { var ex = Assert.Throws <ZstdException>(() => decompressor.Unwrap(compressed, decompressed, offset)); Assert.AreEqual(ZSTD_ErrorCode.ZSTD_error_dstSize_tooSmall, ex.Code); } }
public void Compress_canWrite_toGivenBuffer([Values(false, true)] bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; var compressed = new byte[1000]; const int offset = 54; int compressedSize; using (var options = new CompressionOptions(dict)) using (var compressor = new Compressor(options)) compressedSize = compressor.Wrap(data, compressed, offset); byte[] decompressed; using (var options = new DecompressionOptions(dict)) using (var decompressor = new Decompressor(options)) decompressed = decompressor.Unwrap(compressed.Skip(offset).Take(compressedSize).ToArray()); CollectionAssert.AreEqual(data, decompressed); }
public void CompressAndDecompress_workCorrectly_stress([Values(false, true)] bool useDictionary) { long i = 0L; var data = GenerateBuffer(65536); var dict = useDictionary ? BuildDictionary() : null; using (var compressionOptions = new CompressionOptions(dict)) using (var decompressionOptions = new DecompressionOptions(dict)) Enumerable.Range(0, 10000) .AsParallel().WithDegreeOfParallelism(100) .ForAll(_ => { using (var compressor = new Compressor(compressionOptions)) using (var decompressor = new Decompressor(decompressionOptions)) { var decompressed = decompressor.Unwrap(compressor.Wrap(data)); if (Interlocked.Increment(ref i) % 100 == 0) { GC.Collect(GC.MaxGeneration, GCCollectionMode.Forced, true, true); } CollectionAssert.AreEqual(data, decompressed); } }); }
public void CompressAndDecompress_workCorrectly_ifDifferentInstancesRunInDifferentThreads([Values(false, true)] bool useDictionary) { var dict = useDictionary ? BuildDictionary() : null; using (var compressionOptions = new CompressionOptions(dict)) using (var decompressionOptions = new DecompressionOptions(dict)) Enumerable.Range(0, 100) .AsParallel().WithDegreeOfParallelism(50) .ForAll(_ => { using (var compressor = new Compressor(compressionOptions)) using (var decompressor = new Decompressor(decompressionOptions)) { for (var i = 2; i < 100000; i += 30000) { var data = GenerateBuffer(i); var decompressed = decompressor.Unwrap(compressor.Wrap(data)); CollectionAssert.AreEqual(data, decompressed); } } }); }