public override void Load(ContentLoader contentLoader = null) { using (var stream = new CompressionStream(this.Location)) { stream.SkipUntil(c => c == '\0'); if (contentLoader != null) contentLoader(stream); } }
public async Task RoundTrip_StreamingToStreamingAsync( [CombinatorialValues(false, true)] bool useDict, [CombinatorialValues(false, true)] bool advanced, [CombinatorialValues(1, 2, 7, 101, 1024, 65535, DataGenerator.LargeBufferSize, DataGenerator.LargeBufferSize + 1)] int zstdBufferSize, [CombinatorialValues(1, 2, 7, 101, 1024, 65535, DataGenerator.LargeBufferSize, DataGenerator.LargeBufferSize + 1)] int copyBufferSize) { var dict = useDict ? TrainDict() : null; var testStream = DataGenerator.GetLargeStream(DataFill.Sequential); const int offset = 1; var buffer = new byte[copyBufferSize + offset + 1]; var tempStream = new MemoryStream(); await using (var compressionStream = new CompressionStream(tempStream, Compressor.DefaultCompressionLevel, zstdBufferSize)) { compressionStream.LoadDictionary(dict); if (advanced) { compressionStream.SetParameter(ZSTD_cParameter.ZSTD_c_windowLog, 11); compressionStream.SetParameter(ZSTD_cParameter.ZSTD_c_checksumFlag, 1); } int bytesRead; while ((bytesRead = await testStream.ReadAsync(buffer, offset, copyBufferSize)) > 0) { await compressionStream.WriteAsync(buffer, offset, bytesRead); } } tempStream.Seek(0, SeekOrigin.Begin); var resultStream = new MemoryStream(); await using (var decompressionStream = new DecompressionStream(tempStream, zstdBufferSize)) { decompressionStream.LoadDictionary(dict); if (advanced) { decompressionStream.SetParameter(ZSTD_dParameter.ZSTD_d_windowLogMax, 11); } int bytesRead; while ((bytesRead = await decompressionStream.ReadAsync(buffer, offset, copyBufferSize)) > 0) { await resultStream.WriteAsync(buffer, offset, bytesRead); } } Assert.True(testStream.ToArray().SequenceEqual(resultStream.ToArray())); }
private void _unzipWorker_DoWork(object sender, DoWorkEventArgs e) { FileInfo info = new FileInfo(_unzipFileIn); long length = info.Length; using (Stream inStream = File.Open(_unzipFileIn, FileMode.Open, FileAccess.ReadWrite, FileShare.None)) using (Stream outStream = File.Open(_unzipFileOut, FileMode.Create, FileAccess.ReadWrite, FileShare.None)) { try { using (CompressionStream bzis = CompressionFactory.Reader(CompressionFactory.Type.Gz, inStream)) { byte[] buffer = new byte[4 * 1024]; int bytesRead; while ((bytesRead = bzis.Read(buffer, 0, buffer.Length)) > 0) { outStream.Write(buffer, 0, bytesRead); int percentage = (int)Math.Floor((double)bzis.Position * 100 / length); if (percentage < 0) { _unzipWorker.ReportProgress(0); } else if (percentage > 100) { _unzipWorker.ReportProgress(100); } else { _unzipWorker.ReportProgress(percentage); } if (_unzipWorker.CancellationPending) { e.Cancel = true; return; } } } e.Result = _unzipFileOut; } catch (Exception ex) { log.Error(string.Format("Error while uncompressing {0} to {1}", _unzipFileIn, _unzipFileOut), ex); throw; } finally { outStream.Flush(); } } }
/// <summary>Writes the given nbtstructure into a file</summary> /// <param name="Filepath">The filepath to write to</param> /// <param name="Tag">The tag to write</param> /// <param name="compression">The compression type to be used</param> /// <param name="endianness">The endianness of the nbt structure</param> public static void WriteFile(String Filepath, ITag Tag, NBTCompression compression, Endianness endianness) { var Writer = new FileStream(Filepath, FileMode.Create); Stream stream = CompressionStream.GetCompressionStream(Writer, compression); Write(Tag, new SerializationContext(endianness, stream)); stream.Flush(); stream.Close(); }
private void GZipCompress(Stream inputStream, Stream outputStream) { if (inputStream == null || outputStream == null) { throw new ArgumentNullException(); } CompressionStream bzos = CompressionFactory.Writer(CompressionFactory.Type.Gz, outputStream); StreamCopy(inputStream, (CompressionStream)bzos); bzos.Dispose(); }
public void CompressionShrinksData() { var dataStream = DataGenerator.GetLargeStream(DataFill.Sequential); var resultStream = new MemoryStream(); using (var compressionStream = new CompressionStream(resultStream)) dataStream.CopyTo(compressionStream); Assert.Greater(dataStream.Length, resultStream.Length); }
private void GZipDecompress(Stream inputStream, Stream outputStream) { if (inputStream == null || outputStream == null) { throw new ArgumentNullException(); } CompressionStream bzis = CompressionFactory.Reader(CompressionFactory.Type.Gz, inputStream); StreamCopy((CompressionStream)bzis, outputStream); outputStream.Flush(); bzis.Dispose(); }
public static CompressionStream GetCompStream(ZSteamSocket socket) { CompressionStream compStream; if (!compStreams.TryGetValue(socket, out compStream)) { var stream = new MemoryStream(); compStream = new CompressionStream(new MemoryStream()); compStreams.Add(socket, compStream); compStreamsData.Add(socket, stream); } return(compStream); }
public static async Task <T> DecompressAsync <T>([NotNull] this Stream stream, T destination, CompressionType type, CancellationToken token) where T : Stream { if (stream is null) { throw new ArgumentNullException(nameof(stream)); } Boolean empty = destination is null; await using CompressionStream compress = new CompressionStream(stream, type, CompressionMode.Decompress, true); await compress.CopyToAsync(destination ??= (T)(Object) new MemoryStream(), token).ConfigureAwait(false); return(empty ? destination.ResetPosition() : destination); }
public static T Decompress <T>([NotNull] this Stream stream, T destination, CompressionType type) where T : Stream { if (stream is null) { throw new ArgumentNullException(nameof(stream)); } Boolean empty = destination is null; using CompressionStream compress = new CompressionStream(stream, type, CompressionMode.Decompress); compress.CopyTo(destination ??= (T)(Object) new MemoryStream()); return(empty ? destination.ResetPosition() : destination); }
public void GlobalSetup() { var r = new Random(0); Buffer = new byte[Math.Max(DataSize, Compressor.GetCompressBound(DataSize))]; Data = new byte[DataSize]; r.NextBytes(Data); CompressedData = Compressor.Wrap(Data); using var tempStream = new MemoryStream(); using var compressionStream = new CompressionStream(tempStream); new MemoryStream(Data).CopyTo(compressionStream); CompressedStreamData = tempStream.ToArray(); }
private void m_worker_DoWork(object sender, DoWorkEventArgs e) { FileInfo info = new FileInfo(m_compressedFile); long length = info.Length; using (Stream inStream = File.Open(m_compressedFile, FileMode.Open, FileAccess.ReadWrite, FileShare.None)) using (Stream outStream = File.Open(m_decompressedFile, FileMode.Create, FileAccess.ReadWrite, FileShare.None)) { try { using (CompressionStream bzis = CompressionFactory.Reader(CompressionFactory.Type.Gz, inStream)) { byte[] buffer = new byte[4 * 1024]; int bytesRead; while ((bytesRead = bzis.Read(buffer, 0, buffer.Length)) > 0) { outStream.Write(buffer, 0, bytesRead); int percentage = (int)Math.Floor((double)bzis.Position * 100 / length); if (percentage < 0) { m_worker.ReportProgress(0); } else if (percentage > 100) { m_worker.ReportProgress(100); } else { m_worker.ReportProgress(percentage); } if (m_worker.CancellationPending) { e.Cancel = true; return; } } } } finally { outStream.Flush(); } } }
public void RoundTrip_StreamingToBatch() { var dataStream = DataGenerator.GetLargeStream(DataFill.Sequential); var tempStream = new MemoryStream(); using (var compressionStream = new CompressionStream(tempStream)) dataStream.CopyTo(compressionStream); var resultBuffer = new byte[dataStream.Length]; using (var decompressor = new Decompressor()) Assert.AreEqual(dataStream.Length, decompressor.Unwrap(tempStream.ToArray(), resultBuffer, 0, false)); Assert.AreEqual(dataStream.ToArray(), resultBuffer); }
/// <summary> /// Creates a compressed ContentLoader. /// </summary> /// <param name="loader">The loader.</param> /// <returns></returns> public static ContentLoader CompressedContentLoader(ContentLoader loader) { if (loader == null) return stream => { }; return stream => { using ( var compressed = new CompressionStream(stream, CompressionMode.Decompress, true)) { loader(compressed); } }; }
public static LooseObjectReader GetObjectLoader(string objectsLocation, string id) { var location = ObjectExists(objectsLocation, id); if (location == null) return null; var path = Path.Combine(objectsLocation, id.Substring(0, 2), id.Substring(2)); using (var inner = new FileStream(path, System.IO.FileMode.Open, FileAccess.Read, FileShare.Read)) { var stream = new CompressionStream(inner); ObjectType type; int size; ReadHeader(stream, out type, out size); var dataOffset = inner.Position; return new LooseObjectReader(location, type, size, dataOffset); } }
public SharpZipTarArchiveIterator(Stream compressedTarFile, CompressionFactory.Type compressionType) { if (compressionType == CompressionFactory.Type.Gz) { compressionStream = CompressionFactory.Reader(CompressionFactory.Type.Gz, compressedTarFile); } else if (compressionType == CompressionFactory.Type.Bz2) { compressionStream = CompressionFactory.Reader(CompressionFactory.Type.Bz2, compressedTarFile); } else { throw new NotSupportedException($"Type {compressionType} is not supported by ArchiveIterator"); } tarStream = new TarInputStream(compressionStream); disposed = false; }
void DecodeRealFile(CompressionAlgorithm algorithm, string compressedFile, string uncompressedFile) { var dataStream = File.OpenRead(compressedFile); var backing = File.Create(firstTempPath); backing.Seek(0, SeekOrigin.Begin); CompressionStream decompressing = new CompressionStream(dataStream, CompressionMode.Decompress, algorithm, true); MemoryStream output = new MemoryStream(); CopyStream(decompressing, output); output.Seek(0, SeekOrigin.Begin); StreamReader reader = new StreamReader(output); output.Seek(0, SeekOrigin.Begin); Assert.AreNotEqual(0, output.Length, "Stream length should not be 0,"); Assert.IsTrue(compare_buffers(File.ReadAllBytes(uncompressedFile), output.GetBuffer(), (int)output.Length), "Streams are not equal."); decompressing.Close(); output.Close(); }
public void TestWriterGeneration() { Dictionary <CompressionFactory.Type, Type> validWriters = new Dictionary <CompressionFactory.Type, Type>() { { CompressionFactory.Type.Gz, typeof(DotNetZipGZipOutputStream) }, { CompressionFactory.Type.Bz2, typeof(DotNetZipBZip2OutputStream) } }; foreach (KeyValuePair <CompressionFactory.Type, Type> pair in validWriters) { using (MemoryStream ms = new MemoryStream()) { using (CompressionStream providedStream = CompressionFactory.Writer(pair.Key, ms)) { Assert.AreEqual(providedStream.GetType(), pair.Value); } } } }
public async Task RoundTrip_StreamingToStreamingAsync( [Values(false, true)] bool useDict, [Values(false, true)] bool advanced, [Values(1, 2, 7, 101, 1024, 65535, DataGenerator.LargeBufferSize, DataGenerator.LargeBufferSize + 1)] int zstdBufferSize, [Values(1, 2, 7, 101, 1024, 65535, DataGenerator.LargeBufferSize, DataGenerator.LargeBufferSize + 1)] int copyBufferSize) { var dict = useDict ? TrainDict() : null; var testStream = DataGenerator.GetLargeStream(DataFill.Sequential); const int offset = 1; var buffer = new byte[copyBufferSize + offset + 1]; var tempStream = new MemoryStream(); using (var compressionOptions = new CompressionOptions(dict, advanced ? new Dictionary <ZSTD_cParameter, int> { { ZSTD_cParameter.ZSTD_c_windowLog, 11 }, { ZSTD_cParameter.ZSTD_c_checksumFlag, 1 }, { ZSTD_cParameter.ZSTD_c_nbWorkers, 4 } } : null)) await using (var compressionStream = new CompressionStream(tempStream, compressionOptions, zstdBufferSize)) { int bytesRead; while ((bytesRead = await testStream.ReadAsync(buffer, offset, copyBufferSize)) > 0) { await compressionStream.WriteAsync(buffer, offset, bytesRead); } } tempStream.Seek(0, SeekOrigin.Begin); var resultStream = new MemoryStream(); using (var decompressionOptions = new DecompressionOptions(dict, advanced ? new Dictionary <ZSTD_dParameter, int> { { ZSTD_dParameter.ZSTD_d_windowLogMax, 11 } } : null)) await using (var decompressionStream = new DecompressionStream(tempStream, decompressionOptions, zstdBufferSize)) { int bytesRead; while ((bytesRead = await decompressionStream.ReadAsync(buffer, offset, copyBufferSize)) > 0) { await resultStream.WriteAsync(buffer, offset, bytesRead); } } Assert.AreEqual(testStream.ToArray(), resultStream.ToArray()); }
public void StreamingCompressionSimpleWrite(byte[] data, int offset, int count) { var tempStream = new MemoryStream(); using (var compressionStream = new CompressionStream(tempStream)) compressionStream.Write(data, offset, count); tempStream.Seek(0, SeekOrigin.Begin); var resultStream = new MemoryStream(); using (var decompressionStream = new DecompressionStream(tempStream)) decompressionStream.CopyTo(resultStream); var dataToCompress = new byte[count]; Array.Copy(data, offset, dataToCompress, 0, count); Assert.AreEqual(dataToCompress, resultStream.ToArray()); }
public void StreamingCompressionFlushDataFromInternalBuffers() { var testBuffer = new byte[1]; var tempStream = new MemoryStream(); using var compressionStream = new CompressionStream(tempStream); compressionStream.Write(testBuffer, 0, testBuffer.Length); compressionStream.Flush(); Assert.True(tempStream.Length > 0); tempStream.Seek(0, SeekOrigin.Begin); //NOTE: without ZSTD_endStream call on compression var resultStream = new MemoryStream(); using (var decompressionStream = new DecompressionStream(tempStream)) decompressionStream.CopyTo(resultStream); Assert.True(testBuffer.SequenceEqual(resultStream.ToArray())); }
public void StreamingCompressionZeroAndOneByte() { var data = new byte[] { 0, 0, 0, 1, 2, 3, 4, 0, 0, 0 }; var tempStream = new MemoryStream(); using (var compressionStream = new CompressionStream(tempStream)) { compressionStream.Write(data, 0, 0); compressionStream.Write(ReadOnlySpan <byte> .Empty); compressionStream.WriteAsync(data, 0, 0).GetAwaiter().GetResult(); compressionStream.WriteAsync(ReadOnlyMemory <byte> .Empty).GetAwaiter().GetResult(); compressionStream.Write(data, 3, 1); compressionStream.Write(new ReadOnlySpan <byte>(data, 4, 1)); compressionStream.Flush(); compressionStream.WriteAsync(data, 5, 1).GetAwaiter().GetResult(); compressionStream.WriteAsync(new ReadOnlyMemory <byte>(data, 6, 1)).GetAwaiter().GetResult(); compressionStream.FlushAsync().GetAwaiter().GetResult(); } tempStream.Seek(0, SeekOrigin.Begin); var result = new byte[data.Length]; using (var decompressionStream = new DecompressionStream(tempStream)) { Assert.AreEqual(0, decompressionStream.Read(result, 0, 0)); Assert.AreEqual(0, decompressionStream.Read(Span <byte> .Empty)); Assert.AreEqual(0, decompressionStream.ReadAsync(result, 0, 0).GetAwaiter().GetResult()); Assert.AreEqual(0, decompressionStream.ReadAsync(Memory <byte> .Empty).GetAwaiter().GetResult()); Assert.AreEqual(1, decompressionStream.Read(result, 3, 1)); Assert.AreEqual(1, decompressionStream.Read(new Span <byte>(result, 4, 1))); Assert.AreEqual(1, decompressionStream.ReadAsync(result, 5, 1).GetAwaiter().GetResult()); Assert.AreEqual(1, decompressionStream.ReadAsync(new Memory <byte>(result, 6, 1)).GetAwaiter().GetResult()); } Assert.AreEqual(data, result); }
public void RoundTrip_StreamingToStreaming_Stress([CombinatorialValues(true, false)] bool useDict, [CombinatorialValues(true, false)] bool async) { long i = 0; var dict = useDict ? TrainDict() : null; Enumerable.Range(0, 10000) .AsParallel() .WithDegreeOfParallelism(Environment.ProcessorCount * 4) .ForAll(n => { var testStream = DataGenerator.GetSmallStream(DataFill.Sequential); var cBuffer = new byte[1 + (int)(n % (testStream.Length * 11))]; var dBuffer = new byte[1 + (int)(n % (testStream.Length * 13))]; var tempStream = new MemoryStream(); using (var compressionStream = new CompressionStream(tempStream, Compressor.DefaultCompressionLevel, 1 + (int)(n % (testStream.Length * 17)))) { compressionStream.LoadDictionary(dict); int bytesRead; int offset = n % cBuffer.Length; while ((bytesRead = testStream.Read(cBuffer, offset, cBuffer.Length - offset)) > 0) { if (async) { compressionStream.WriteAsync(cBuffer, offset, bytesRead).GetAwaiter().GetResult(); } else { compressionStream.Write(cBuffer, offset, bytesRead); } if (Interlocked.Increment(ref i) % 100 == 0) { GC.Collect(GC.MaxGeneration, GCCollectionMode.Forced, true, true); } } } tempStream.Seek(0, SeekOrigin.Begin); var resultStream = new MemoryStream(); using (var decompressionStream = new DecompressionStream(tempStream, 1 + (int)(n % (testStream.Length * 19)))) { decompressionStream.LoadDictionary(dict); int bytesRead; int offset = n % dBuffer.Length; while ((bytesRead = async ? decompressionStream.ReadAsync(dBuffer, offset, dBuffer.Length - offset).GetAwaiter() .GetResult() : decompressionStream.Read(dBuffer, offset, dBuffer.Length - offset)) > 0) { resultStream.Write(dBuffer, offset, bytesRead); if (Interlocked.Increment(ref i) % 100 == 0) { GC.Collect(GC.MaxGeneration, GCCollectionMode.Forced, true, true); } } } Assert.True(testStream.ToArray().SequenceEqual(resultStream.ToArray())); }); }
public void CompressStream(int zstdBufferSize, int copyBufferSize) { using var compressionStream = new CompressionStream(Stream.Null, CompressionOptions.Default, zstdBufferSize); new MemoryStream(Data).CopyTo(compressionStream, copyBufferSize); }
private byte[] LoadData() { var baseData = new byte[this.Base.Size]; this.Base.Load(stream => stream.Read(baseData, 0, baseData.Length)); using (var file = File.OpenRead(this.PackFile.Location)) { file.Seek(this.DataOffset, SeekOrigin.Begin); var stream = new CompressionStream(file, leaveOpen: true); var delta = new byte[this.RawSize]; stream.Read(delta, 0, delta.Length); return BinaryDelta.Apply(baseData, delta); } }
/// <summary>Writes the given nbtstructure into a file</summary> /// <param name="stream">The stream to write to</param> /// <param name="Tag">The tag to write</param> /// <param name="compression">The compression type to be used</param> /// <param name="endianness">The endianness of the nbt structure</param> public static void WriteFile(Stream stream, ITag Tag, NBTCompression compression, Endianness endianness) { stream = CompressionStream.GetCompressionStream(stream, compression); Write(Tag, new SerializationContext(endianness, stream)); }
public string GetHTTPData(string strformData, bool bNeedResponse) { string text = null; string requestUriString = "http://" + this.Server + "/ultima.do"; string s = strformData.Clone().ToString() + "&session=" + this.EncryptSession(true); byte[] bytes = new ASCIIEncoding().GetBytes(s); try { HttpWebRequest request = (HttpWebRequest)WebRequest.Create(requestUriString); request.Method = "POST"; request.ContentType = "application/x-www-form-urlencoded"; request.Accept = "text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"; request.UserAgent = "Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1))"; request.Referer = "http://df.36ji.uuzu.com/UltimaMain.swf"; request.Headers.Add("x-flash-version", "10,0,32,18"); request.CookieContainer = this._CookieContainer; request.AllowAutoRedirect = false; request.ContentLength = bytes.Length; Stream requestStream = request.GetRequestStream(); requestStream.Write(bytes, 0, bytes.Length); requestStream.Close(); HttpWebResponse response = (HttpWebResponse)request.GetResponse(); if (response.StatusCode != HttpStatusCode.OK) { goto Label_029D; } this._ErrorCode = "SUCCESS"; if (response.Cookies.Count > 0) { this._CookieContainer.Add(response.Cookies); } if (!bNeedResponse) { goto Label_029D; } Stream responseStream = response.GetResponseStream(); if (response.ContentType.IndexOf("bin") != -1) { ManagedZLib.Initialize(); CompressionStream stream = new CompressionStream(responseStream, CompressionOptions.Decompress); try { text = new StreamReader(stream, Encoding.GetEncoding("UTF-8")).ReadToEnd(); text = text.Replace(@"\n", "\n"); text = text.Replace("\\\"", "\""); text = text.Replace("\"{", "{"); text = text.Replace("}\"", "}"); goto Label_01E8; } catch (ZLibException) { goto Label_01E8; } finally { stream.Close(); ManagedZLib.Terminate(); } } text = new StreamReader(responseStream, Encoding.GetEncoding("UTF-8")).ReadToEnd(); Label_01E8: responseStream.Close(); if (text != null) { JsonObject obj2 = (JsonObject)JsonConvert.Import(text); if (obj2 != null) { obj2 = obj2["body"] as JsonObject; if ((obj2 != null) && (obj2["errcode"] != null)) { this._ErrorCode = obj2["errcode"].ToString(); if ((string.Compare(this._ErrorCode, "ERR_SESSION_000001", true) == 0) || (string.Compare(this._ErrorCode, "ERR_MAIN_000007", true) == 0)) { if (this.IsExpired()) { this._ErrorCode = "USER_EXPIRED"; } else { this.GetSession(); text = this.GetHTTPData(strformData, bNeedResponse); } } } } } else { this._ErrorCode = "NET_ERROR"; } Label_029D: response.Close(); } catch (WebException) { } return(text); }