/// <summary>Construct an IFile Reader.</summary> /// <param name="conf">Configuration File</param> /// <param name="in">The input stream</param> /// <param name="length"> /// Length of the data in the stream, including the checksum /// bytes. /// </param> /// <param name="codec">codec</param> /// <param name="readsCounter">Counter for records read from disk</param> /// <exception cref="System.IO.IOException"/> public Reader(Configuration conf, FSDataInputStream @in, long length, CompressionCodec codec, Counters.Counter readsCounter) { // Count records read from disk // Possibly decompressed stream that we read readRecordsCounter = readsCounter; checksumIn = new IFileInputStream(@in, length, conf); if (codec != null) { decompressor = CodecPool.GetDecompressor(codec); if (decompressor != null) { this.@in = codec.CreateInputStream(checksumIn, decompressor); } else { Log.Warn("Could not obtain decompressor from CodecPool"); this.@in = checksumIn; } } else { this.@in = checksumIn; } this.dataIn = new DataInputStream(this.@in); this.fileLength = length; if (conf != null) { bufferSize = conf.GetInt("io.file.buffer.size", DefaultBufferSize); } }
public void Decompress_throwsZstdException_onMalformedDecompressedSize([Values(false, true)] bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; byte[] compressed; using (var options = new CompressionOptions(dict)) using (var compressor = new Compressor(options)) compressed = compressor.Wrap(data); var frameHeader = compressed[4]; // Ensure that we malform decompressed size in the right place if (useDictionary) { Assert.AreEqual(frameHeader, 0x63); compressed[9]--; } else { Assert.AreEqual(frameHeader, 0x60); compressed[5]--; } // Thus, ZSTD_getDecompressedSize will return size that is one byte lesser than actual using (var options = new DecompressionOptions(dict)) using (var decompressor = new Decompressor(options)) Assert.Throws <ZstdException>(() => decompressor.Unwrap(compressed)); }
public void CompressAndDecompress_worksCorrectly_advanced([Values(false, true)] bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; byte[] compressed1, compressed2; using (var options = new CompressionOptions(dict, new Dictionary <ZSTD_cParameter, int> { { ZSTD_cParameter.ZSTD_c_checksumFlag, 0 } })) using (var compressor = new Compressor(options)) compressed1 = compressor.Wrap(data); using (var options = new CompressionOptions(dict, new Dictionary <ZSTD_cParameter, int> { { ZSTD_cParameter.ZSTD_c_checksumFlag, 1 } })) using (var compressor = new Compressor(options)) compressed2 = compressor.Wrap(data); Assert.AreEqual(compressed1.Length + 4, compressed2.Length); using (var options = new DecompressionOptions(dict, new Dictionary <ZSTD_dParameter, int>())) using (var decompressor = new Decompressor(options)) { CollectionAssert.AreEqual(data, decompressor.Unwrap(compressed1)); CollectionAssert.AreEqual(data, decompressor.Unwrap(compressed2)); } }
public static byte[] Decompress(byte[] data, CompressAlgorithm method = CompressAlgorithm.Deflate) { MemoryStream input = new MemoryStream(data); MemoryStream output = new MemoryStream(); switch (method) { case CompressAlgorithm.Deflate: { using (DeflateStream dstream = new DeflateStream(input, CompressionMode.Decompress)) { dstream.CopyTo(output); } } break; case CompressAlgorithm.Zstd: { using (var decompressor = new Decompressor()) { return(decompressor.Unwrap(data)); } } } return(output.ToArray()); }
/// <summary> /// Constructor /// </summary> /// <param name="id">Channel ID</param> /// <param name="name">Channel Name</param> /// <param name="options">Channel Options</param> /// <param name="chunkSize">Max chunk size</param> /// <param name="compressType">Compress Type</param> /// <param name="decompressType">Decompress Type</param> /// <param name="sender">Method used to send packet</param> public StaticVirtualChannel(UInt16 id, string name, Channel_Options options, uint chunkSize, CompressionType compressType, CompressionType decompressType, SendSVCData sender) { this.channelId = id; this.channelName = name; this.channelOptions = options; this.maxChunkSize = chunkSize; this.decompressedBuffer = new List <byte>(); if (compressType != CompressionType.PACKET_COMPR_TYPE_NONE) { mppcCompressor = new Compressor((SlidingWindowSize)compressType); } if (decompressType != CompressionType.PACKET_COMPR_TYPE_NONE) { mppcDecompressor = new Decompressor((SlidingWindowSize)decompressType); } this.Sender = sender; }
public void ShouldCompressAndDecompress() { // Arrange var size = 100; var data = new byte[size]; new Random(100500).NextBytes(data); var compressionBuffer = new byte[size * 2]; var compressor = new Compressor(); var decompressionBuffer = new byte[size * 2]; var decompressor = new Decompressor(); // Act var compressedSize = compressor.Compress(data, 0, size, compressionBuffer, 0, compressionBuffer.Length); var decompressedSize = decompressor.Decompress(compressionBuffer, 0, compressedSize, decompressionBuffer); // Assert Assert.Equal(data, decompressionBuffer.Take(decompressedSize)); }
public void CompressDecompressTest() { const int chunkSize = 1048576; const string inFile = "original.dat"; if (!File.Exists(inFile)) { DataGenerator.Generate(inFile, 1); } var compressor = new Compressor(); const string compressed = "compressed.dat"; if (!File.Exists(compressed)) { File.Delete(compressed); } compressor.Compress(inFile, compressed, chunkSize); var decompressor = new Decompressor(); const string decompressed = "decompressed.dat"; if (!File.Exists(decompressed)) { File.Delete(decompressed); } decompressor.Decompress(compressed, decompressed, chunkSize); FileAssert.AreEqual(inFile, decompressed); }
public void CompressAndDecompress_worksCorrectly_advanced(bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; Span <byte> compressed1, compressed2; using (var compressor = new Compressor()) { compressor.LoadDictionary(dict); compressor.SetParameter(ZSTD_cParameter.ZSTD_c_checksumFlag, 0); compressed1 = compressor.Wrap(data); } using (var compressor = new Compressor()) { compressor.LoadDictionary(dict); compressor.SetParameter(ZSTD_cParameter.ZSTD_c_checksumFlag, 1); compressed2 = compressor.Wrap(data); } Assert.Equal(compressed1.Length + 4, compressed2.Length); using (var decompressor = new Decompressor()) { decompressor.LoadDictionary(dict); Assert.True(decompressor.Unwrap(compressed1).SequenceEqual(data)); Assert.True(decompressor.Unwrap(compressed2).SequenceEqual(data)); } }
/// <summary> /// Create a /// <see cref="CompressionInputStream"/> /// that will read from the given /// <see cref="System.IO.InputStream"/> /// with the given /// <see cref="Decompressor"/> /// , and return a /// stream for uncompressed data. /// </summary> /// <param name="in">the stream to read compressed bytes from</param> /// <param name="decompressor">decompressor to use</param> /// <returns>a stream to read uncompressed bytes from</returns> /// <exception cref="System.IO.IOException"/> public virtual CompressionInputStream CreateInputStream(InputStream @in, Decompressor decompressor) { return(Bzip2Factory.IsNativeBzip2Loaded(conf) ? new DecompressorStream(@in, decompressor , conf.GetInt("io.file.buffer.size", 4 * 1024)) : new BZip2Codec.BZip2CompressionInputStream (@in)); }
public void CompressAndDecompress_workCorrectly_spans(bool useDictionary) { var buffer = GenerateSample(); var data = new ReadOnlySpan <byte>(buffer, 1, buffer.Length - 1); var dict = useDictionary ? BuildDictionary() : null; Span <byte> compressed = stackalloc byte[Compressor.GetCompressBound(data.Length)]; using (var compressor = new Compressor()) { compressor.LoadDictionary(dict); var size = compressor.Wrap(data, compressed); compressed = compressed.Slice(0, size); } Span <byte> decompressed = stackalloc byte[data.Length + 1]; using (var decompressor = new Decompressor()) { decompressor.LoadDictionary(dict); var size = decompressor.Unwrap(compressed, decompressed); Assert.Equal(data.Length, size); decompressed = decompressed.Slice(0, size); } Assert.True(data.ToArray().SequenceEqual(decompressed.ToArray())); }
public void Decompress_canWrite_toGivenBuffer(bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; byte[] compressed; using (var compressor = new Compressor()) { compressor.LoadDictionary(dict); compressed = compressor.Wrap(data).ToArray(); } var decompressed = new byte[1000]; const int offset = 54; int decompressedSize; using (var decompressor = new Decompressor()) { decompressor.LoadDictionary(dict); decompressedSize = decompressor.Unwrap(compressed, decompressed, offset); } Assert.True(data.SequenceEqual(decompressed.Skip(offset).Take(decompressedSize))); }
public void Decompress_throwsZstdException_onMalformedDecompressedSize(bool useDictionary) { var data = GenerateSample(); var dict = useDictionary ? BuildDictionary() : null; byte[] compressed; using (var compressor = new Compressor()) { compressor.LoadDictionary(dict); compressed = compressor.Wrap(data).ToArray(); } var frameHeader = compressed[4]; // Ensure that we malform decompressed size in the right place if (useDictionary) { Assert.Equal(0x63, frameHeader); compressed[9]--; } else { Assert.Equal(0x60, frameHeader); compressed[5]--; } // Thus, ZSTD_getDecompressedSize will return size that is one byte lesser than actual using (var decompressor = new Decompressor()) { decompressor.LoadDictionary(dict); Assert.Throws <ZstdException>(() => decompressor.Unwrap(compressed)); } }
private static async Task RunShowKey(ShowKey options) { Console.WriteLine($"Showing value of the key..."); Console.WriteLine(@""); var configuration = Configuration.GetConfiguration(); var redis = ConnectionMultiplexer.Connect(configuration["RedisConnectionString"]); var server = redis.GetServer(configuration["RedisInstance"]); var database = redis.GetDatabase(); var hash = database.HashGetAll(options.Key); var value = hash.FirstOrDefault(h => h.Name == "data"); var absexp = hash.FirstOrDefault(h => h.Name == "absexp"); byte[] data = value.Value; if (hash.Length == 0) { Console.WriteLine($"Key {options.Key} does not exist."); return; } Console.WriteLine($"Key: {options.Key}"); Console.WriteLine( options.Decompress ? $"Value: {Encoding.UTF8.GetString(await Decompressor.Decompress(data))}" : $"Value: {Encoding.UTF8.GetString(data)}"); Console.WriteLine($"Expiry: {new DateTimeOffset((long) absexp.Value, TimeSpan.Zero) - DateTimeOffset.Now}"); }
public static GameMessage deserialize(Decompressor decompressor) { MessageType type = (MessageType)decompressor.GetNumber(Enum.GetNames(typeof(MessageType)).Length); switch (type) { case MessageType.Ack: return(AckDeserialize(decompressor)); case MessageType.ClientConnect: return(ClientConnectDeserialize(decompressor)); case MessageType.PlayerInput: return(PlayerInputDeserialize(decompressor)); case MessageType.PlayerSnapshot: return(PlayerSnapshotDeserialize(decompressor)); case MessageType.ConnectConfirmation: return(ConnectedClientDeserialize(decompressor)); case MessageType.WorldSnapshot: return(WorldSnapshotDeserialize(decompressor)); case MessageType.Shot: return(ShotDeserialize(decompressor)); case MessageType.Rotation: return(RotationDeserialize(decompressor)); default: return(null); } }
private void Parse(BinaryReader reader) { DDSStruct header = new DDSStruct(); Utils.PixelFormat pixelFormat = Utils.PixelFormat.UNKNOWN; byte[] data = null; if (ReadHeader(reader, ref header)) { _isValid = true; // patches for stuff if (header.depth == 0) { header.depth = 1; } uint blocksize = 0; pixelFormat = GetFormat(header, ref blocksize); if (pixelFormat == Utils.PixelFormat.UNKNOWN) { throw new InvalidFileHeaderException(); } data = ReadData(reader, header); if (data != null) { byte[] rawData = Decompressor.Expand(header, data, pixelFormat); _bitmap = CreateBitmap((int)header.width, (int)header.height, rawData); } } }
public async Task ExtractAsync(string inputPath, string outputPath) { await using var stream = ArchiveStream.OpenRead(inputPath); var directoryHeaders = (await stream.ReadAllDirectoriesAsync(outputPath)).ToList(); var fileHeaders = (await stream.ReadAllFileHeadersAsync(outputPath)).ToList(); directoryHeaders.AsParallel().ForAll(x => { if (!string.IsNullOrEmpty(x.FullPath) && !Directory.Exists(x.FullPath)) { Directory.CreateDirectory(x.FullPath); } }); try { await Decompressor.DecompressAsync(stream, fileHeaders); } catch (Exception ex) { await DeleteFiles(fileHeaders); await DeleteDirectories(directoryHeaders); throw new ArchiveException(ex.Message); } }
public virtual void TestZlibCompressorDecompressorWithConfiguration() { Configuration conf = new Configuration(); conf.SetBoolean(CommonConfigurationKeys.IoNativeLibAvailableKey, true); if (ZlibFactory.IsNativeZlibLoaded(conf)) { byte[] rawData; int tryNumber = 5; int ByteSize = 10 * 1024; Compressor zlibCompressor = ZlibFactory.GetZlibCompressor(conf); Decompressor zlibDecompressor = ZlibFactory.GetZlibDecompressor(conf); rawData = Generate(ByteSize); try { for (int i = 0; i < tryNumber; i++) { CompressDecompressZlib(rawData, (ZlibCompressor)zlibCompressor, (ZlibDecompressor )zlibDecompressor); } zlibCompressor.Reinit(conf); } catch (Exception ex) { NUnit.Framework.Assert.Fail("testZlibCompressorDecompressorWithConfiguration ex error " + ex); } } else { Assert.True("ZlibFactory is using native libs against request", ZlibFactory.IsNativeZlibLoaded(conf)); } }
public Orchestrator BuildOrchestrator(CompressionMode compressionMode, int processorCount) { IReaderWriter readerWriter; IGzipWorker[] workers = new IGzipWorker[processorCount]; switch (compressionMode) { case CompressionMode.Compress: readerWriter = new CompressorReaderWriter(inputStream, outputStream, readQueueSize, writeQueueSize, blockSize); for (var i = 0; i < workers.Length; i++) { workers[i] = new Compressor(readerWriter.ReadQueue, readerWriter.WriteQueue); } break; case CompressionMode.Decompress: readerWriter = new DecompressorReaderWriter(inputStream, outputStream, readQueueSize, writeQueueSize, blockSize); for (var i = 0; i < workers.Length; i++) { workers[i] = new Decompressor(readerWriter.ReadQueue, readerWriter.WriteQueue); } break; default: throw new InvalidOperationException("Unknown compression mode"); } readerWriter.ReportPercentage = reportPercentage; return(new Orchestrator(workers, readerWriter)); }
public virtual void TestGzipCompatibility() { Random r = new Random(); long seed = r.NextLong(); r.SetSeed(seed); Log.Info("seed: " + seed); DataOutputBuffer dflbuf = new DataOutputBuffer(); GZIPOutputStream gzout = new GZIPOutputStream(dflbuf); byte[] b = new byte[r.Next(128 * 1024 + 1)]; r.NextBytes(b); gzout.Write(b); gzout.Close(); DataInputBuffer gzbuf = new DataInputBuffer(); gzbuf.Reset(dflbuf.GetData(), dflbuf.GetLength()); Configuration conf = new Configuration(); conf.SetBoolean(CommonConfigurationKeys.IoNativeLibAvailableKey, false); CompressionCodec codec = ReflectionUtils.NewInstance <GzipCodec>(conf); Decompressor decom = codec.CreateDecompressor(); NUnit.Framework.Assert.IsNotNull(decom); Assert.Equal(typeof(BuiltInGzipDecompressor), decom.GetType()); InputStream gzin = codec.CreateInputStream(gzbuf, decom); dflbuf.Reset(); IOUtils.CopyBytes(gzin, dflbuf, 4096); byte[] dflchk = Arrays.CopyOf(dflbuf.GetData(), dflbuf.GetLength()); Assert.AssertArrayEquals(b, dflchk); }
public void BadCompressedFilesThrowsException() { byte[] data = { 0x01, 0x02, 0x00, 0x00, 0x31, 0x01, 0x00, 0x00, 0x78, 0xDA, 0x4D, 0x91, 0xCB, 0x6D, 0xC4, 0x30, 0x0C, 0x44, 0x5B, 0x99, 0x02, 0x8C, 0xED, 0x21, 0xF7, 0x24, 0xC8, 0x25, 0x05, 0xD0, 0x32, 0xE3, 0x25, 0x40, 0x49, 0x0E, 0x3F, 0x8B, 0x94, 0x1F, 0x7A, 0xF3, 0xBD, 0x48, 0x10, 0x35, 0x22, 0xE7, 0x8D, 0x1E, 0xA7, 0x71, 0x87, 0x1C, 0x9E, 0x1D, 0xDB, 0xD4, 0x69, 0x70, 0x09, 0x50, 0xE7, 0x58, 0xD0, 0xE6, 0x70, 0x6E, 0xC1, 0x91, 0x06, 0xDA, 0xE4, 0x10, 0x6F, 0x32, 0x76, 0xB0, 0x4A, 0x5C, 0xF0, 0x34, 0x6D, 0x15, 0x1C, 0x57, 0x32, 0x0E, 0x23, 0x64, 0xE0, 0x76, 0x5E, 0x80, 0x77, 0x0E, 0x1C, 0xD3, 0x82, 0x2E, 0x78, 0x0D, 0xA4, 0x2A, 0xF5, 0x36, 0xED, 0x60, 0x5B, 0x30, 0x64, 0xBD, 0x82, 0x20, 0xBD, 0x4E, 0x9B, 0x94, 0x4E, 0xA8, 0xA5, 0x8A, 0x2F, 0xE8, 0xB4, 0x0F, 0x82, 0x89, 0xA7, 0xA3, 0xA5, 0x9D, 0xDB, 0xE0, 0xF7, 0xE4, 0x05, 0xA5, 0x72, 0xDA, 0x25, 0x42, 0xBC, 0x54, 0xEE, 0x54, 0xC7, 0x43, 0x78, 0x80, 0x5A, 0xF5, 0x73, 0xF9, 0x71, 0xF2, 0x63, 0x1B, 0xEC, 0x7F, 0x2F, 0x96, 0x2F, 0x27, 0x88, 0x5A, 0x4F, 0x34, 0xDE, 0x16, 0xF8, 0x54, 0x95, 0x26, 0x91, 0x9B, 0x0C, 0xBC, 0x95, 0x67, 0xBF, 0xE0, 0x99, 0x7A, 0xD9, 0xF7, 0x90, 0x35, 0xB5, 0x82, 0x50, 0xD9, 0x53, 0x09, 0x54, 0x8D, 0xF8, 0xF4, 0x8A, 0x9B, 0xDC, 0xD8, 0xAC, 0x88, 0x5E, 0xAE, 0xE4, 0xAC, 0x7A, 0xB7, 0xD7, 0xA0, 0x54, 0xE9, 0x9D, 0xB8, 0x69, 0x59, 0x5D, 0x9E, 0x88, 0x1B, 0x0F, 0xF2, 0x33, 0x89, 0xAF, 0x1A, 0x64, 0x44, 0xA1, 0x66, 0x5F, 0xB0, 0x2A, 0x8D, 0xAD, 0x3C, 0xAA, 0xAC, 0x6C, 0xB3, 0x2E, 0x16, 0xEC, 0x46, 0x37, 0xD9, 0x6A, 0x4E, 0xEE, 0xC9, 0xD5, 0xDB, 0x66, 0x39, 0x62, 0xE5, 0xCE, 0x23, 0xCE, 0xEF, 0x48, 0x81, 0x6C, 0xDF, 0xD1, 0x44, 0x65, 0x13, 0x52, 0x89, 0xFC, 0xB2, 0x9D, 0xF3, 0xD2, 0xEE, 0xA9, 0x7C, 0x48, 0xAF, 0x59, 0xFF, 0x24, 0x5C, 0x03, 0x2B, 0x9F, 0x43, 0xA9, 0xB1, 0x15, 0x86, 0xF2, 0x44, 0x14, 0x4A, 0xA9, 0x0A, 0xE2, 0x41, 0x4B, 0x55, 0xC4, 0x55, 0xBD, 0x7C, 0x02, 0x79, 0xB7, 0xBB, 0xBF, 0x00, 0x00, 0x00, 0x00, }; using DataStream stream = DataStreamFactory.FromArray(data, 0, data.Length); var format = new BinaryFormat(stream); var decompressor = new Decompressor(); Assert.Throws <ExtractionException>(() => decompressor.Convert(format)); }
public async Task LoadCompressed() { try { var folder = await Windows.ApplicationModel.Package.Current.InstalledLocation.GetFolderAsync("xml"); var file = await folder.GetFileAsync("world.compressed"); //var stream = await file.OpenStreamForReadAsync(); var decompressedFilename = "world.decompressed"; var decompressedFile = await folder.CreateFileAsync(decompressedFilename, CreationCollisionOption.ReplaceExisting); using (var compressedInput = await file.OpenSequentialReadAsync()) using (var decompressor = new Decompressor(compressedInput)) using (var decompressedOutput = await decompressedFile.OpenAsync(FileAccessMode.ReadWrite)) { var bytesDecompressed = await RandomAccessStream.CopyAsync(decompressor, decompressedOutput); } } catch (Exception e) { throw e; } await Load(); }
public bool IsCached(ContentReference fileRef) { if (fileRef is null) { throw new ArgumentNullException(nameof(fileRef)); } if (!filesExist.ContainsKey(fileRef.CacheID)) { if (zipFile.Exists) { var cacheFileName = GetCacheFileName(fileRef); using var zip = Decompressor.Open(zipFile); var entry = zip.GetEntry(cacheFileName); filesExist[fileRef.CacheID] = entry is object; } else { filesExist[fileRef.CacheID] = false; } } return(filesExist.ContainsKey(fileRef.CacheID) && filesExist[fileRef.CacheID]); }
private async void Button_Click_2(object sender, RoutedEventArgs e) { var decompressed = await ApplicationData.Current.LocalFolder .CreateFileAsync("decompressed.txt", CreationCollisionOption .ReplaceExisting); var stream = await ApplicationData.Current.LocalFolder.OpenStreamForReadAsync("compressed.zip"); var decompressor = new Decompressor(stream.AsInputStream()); var bytes = new Byte[100000]; var buffer = bytes.AsBuffer(); var buf = await decompressor.ReadAsync(buffer, 999999, InputStreamOptions.None); await FileIO.WriteBufferAsync(decompressed, buf); await SimpleDialog(string.Format("Decompressed {0} bytes to {1}", _compressedText.Length, buf.Length)); BigText.Text = new String(Encoding.UTF8.GetChars(buf.ToArray())); DecompressButton.IsEnabled = false; }
/// <exception cref="System.IO.IOException"/> public Decompressor GetDecompressor() { CompressionCodec codec = GetCodec(); if (codec != null) { Decompressor decompressor = CodecPool.GetDecompressor(codec); if (decompressor != null) { if (decompressor.Finished()) { // Somebody returns the decompressor to CodecPool but is still using // it. Log.Warn("Deompressor obtained from CodecPool already finished()"); } else { if (Log.IsDebugEnabled()) { Log.Debug("Got a decompressor: " + decompressor.GetHashCode()); } } decompressor.Reset(); } return(decompressor); } return(null); }
public bool TryParsePackage(Stream s, string filename, FileSystem context, out IReadOnlyPackage?package) { if (filename.EndsWith(".lpk") || // Spritesheet container filename.EndsWith(".bpk") || // Image container filename.EndsWith(".spk") || // Sound set filename.EndsWith(".lps") || // Singleplayer map filename.EndsWith(".lpm") || // Multiplayer map filename.EndsWith(".mpk")) // Matrix set (destroyable map part, tile replacements) { s = Decompressor.Decompress(s); } if (s.Position + 4 <= s.Length) { var signature = s.ReadASCII(4); s.Position -= 4; if (signature.Equals("DATA")) { package = new Lvl( new SegmentStream(s, 8, (s.ReadByte() << 24) | (s.ReadByte() << 16) | (s.ReadByte() << 8) | s.ReadByte()), filename, context ); return(true); } } package = null; return(false); }
/// <exception cref="System.Exception"/> public bool Call() { Decompressor dc = queue.Take(); CodecPool.ReturnDecompressor(dc); return(dc != null); }
public void CompressAndDecompress_workCorrectly_spans([Values(false, true)] bool useDictionary) { var buffer = GenerateSample(); var data = new ReadOnlySpan <byte>(buffer, 1, buffer.Length - 1); var dict = useDictionary ? BuildDictionary() : null; Span <byte> compressed = stackalloc byte[Compressor.GetCompressBound(data.Length)]; using (var options = new CompressionOptions(dict)) using (var compressor = new Compressor(options)) { var size = compressor.Wrap(data, compressed); compressed = compressed.Slice(0, size); } Span <byte> decompressed = stackalloc byte[data.Length + 1]; using (var options = new DecompressionOptions(dict)) using (var decompressor = new Decompressor(options)) { var size = decompressor.Unwrap(compressed, decompressed); Assert.AreEqual(data.Length, size); decompressed = decompressed.Slice(0, size); } CollectionAssert.AreEqual(data.ToArray(), decompressed.ToArray()); }
/// <exception cref="System.Exception"/> public bool Call() { Decompressor c = CodecPool.GetDecompressor(this._enclosing.codec); queue.Put(c); return(c != null); }
/// <summary> /// Decompresses the specified ZStandard Data /// </summary> /// <param name="buffer">Data to decompress</param> public static byte[] DecompressZStandard(byte[] buffer) { using (Decompressor decompressor = new Decompressor()) { return(decompressor.Unwrap(buffer)); } }
public void Zstd_Decompress() { using (var zstd = new Decompressor()) { var unzipped = zstd.Unwrap(_zstd); } }