private void WriteFile(string responseType) { this.output = this.GetCache(); if (this.output != null) { CompressionUtils.Send(this.output, responseType); return; } this.sb = new StringBuilder(4096); using (this.stream) { StreamReader reader = new StreamReader(this.stream); this.sb.Append(reader.ReadToEnd()); reader.Close(); } byte[] gzip; if (responseType == "text/css") { gzip = CompressionUtils.GZip(this.sm.ParseCssWebResourceUrls(this.sb.ToString())); } else { gzip = CompressionUtils.GZip(this.sb.ToString()); } this.SetCache(gzip); CompressionUtils.Send(gzip, responseType); }
private byte[] GetResourceBytes(byte[] buffer) { byte flags = 0; int encryptKey = _random.Next(100, int.MaxValue); // Compress if (buffer.Length > 30) { buffer = CompressionUtils.GZipCompress(buffer); flags |= 1; } // Encrypt StrongCryptoUtils.Encrypt(buffer, encryptKey); int pos = 0; var blob = new Blob(buffer.Length + 14); blob.Write(ref pos, (int)ResourceSignature); // Signature blob.Write(ref pos, (int)StrongCryptoUtils.ComputeHash(buffer)); // Hash blob.Write(ref pos, (int)encryptKey); // Encrypt Key blob.Write(ref pos, (byte)flags); // Encrypt Key blob.Write(ref pos, (byte)0); // Unused blob.Write(ref pos, (byte[])buffer); // Data return(blob.ToArray()); }
public void Get_Compressed_Deflate() { var mocks = new MockRepository(); var reqFactory = mocks.StrictMock <IHttpWebRequestFactory>(); var request = mocks.DynamicMock <IHttpWebRequest>(); var response = mocks.DynamicMock <IHttpWebResponse>(); With.Mocks(mocks).Expecting(delegate { Expect.Call(reqFactory.Create(new UriBuilder().Uri)) .IgnoreArguments() .Repeat.Once() .Return(request); Expect.Call(request.Headers) .Repeat.Any() .Return(new WebHeaderCollection()); Expect.Call(response.ContentEncoding) .Repeat.Any() .Return("deflate"); Expect.Call(response.Headers) .Repeat.Any() .Return(new WebHeaderCollection()); Expect.Call(request.GetResponse()) .Repeat.Once() .Return(response); Expect.Call(response.GetResponseStream()) .Repeat.Once() .Return(CompressionUtils.DeflateCompressStream("Testing compression")); }).Verify(delegate { var conn = new SolrConnection("http://localhost") { HttpWebRequestFactory = reqFactory }; Assert.AreEqual("Testing compression", conn.Get("", new Dictionary <string, string>())); }); }
public File(byte[] source, bool compression = true, Intentions intentions = Intentions.None) { FileId = Guid.NewGuid().ToString(); Compressed = compression; Intent = intentions; Data = compression ? CompressionUtils.Compress(source) : source; }
protected override void WriteDataToStream(Stream stream, SharedExtractParams extractParams, bool decompress) { var reader = extractParams.Reader; uint len = this.Compressed ? this.Size : this.RealSize; reader.BaseStream.Seek((long)this.Offset, SeekOrigin.Begin); // Reset at start since value might still be in used for a bit after this.BytesWritten = 0; if (!decompress || !this.Compressed) { StreamUtils.WriteSectionToStream(reader.BaseStream, len, stream, bytesWritten => this.BytesWritten = bytesWritten); } else { CompressionUtils.Decompress(reader.BaseStream, len, stream, bytesWritten => this.BytesWritten = bytesWritten, extractParams); } }
/// <inheritdoc /> protected override void WriteBytes(Stream stream, byte[] bytes) { _trace.Info(TraceCategory, "Unzipping {0} bytes", bytes.Length.ToString()); var hgtData = CompressionUtils.Unzip(bytes).Single().Value; stream.Write(hgtData, 0, hgtData.Length); }
public async Task CompressedAppendToNonEmptyStreamAndDecompress() { var sourceStr1 = "TEST this is TEST"; var sourceStr2 = "THIS IS another TEST"; var utils = new CompressionUtils(); using (var source1 = GetStreamWithContents(sourceStr1)) using (var source2 = GetStreamWithContents(sourceStr2)) using (var target = new MemoryStream()) { var chunk1Position = 0; await utils.CopyWithCompression(source1, target); var chunk2Position = target.Position; await utils.CopyWithCompression(source2, target); var decompressedBytes1 = (await utils.ReadWithDecompression(target, chunk1Position)).ToArray(); var decompressedStr1 = Encoding.GetString(decompressedBytes1); var decompressedBytes2 = (await utils.ReadWithDecompression(target, chunk2Position)).ToArray(); var decompressedStr2 = Encoding.GetString(decompressedBytes2); sourceStr1.Should().Be(decompressedStr1); sourceStr2.Should().Be(decompressedStr2); } }
/// <summary> /// 输出文件 /// </summary> /// <param name="responseType"></param> private void WriteFile(string responseType) { this._output = this.GetCache(); if (this._output != null) { this.Send(this._output, responseType); return; } if (this._stream == null) { Response404(responseType); return; } this._sb = new StringBuilder(4096); using (this._stream) { var reader = new StreamReader(this._stream); this._sb.Append(reader.ReadToEnd()); } var data = _sb.ToString(); //压缩脚本与样式 if (IsMinify && !IsDebug) { data = ResourceHelper.MinHelper.MinJs(data); } var content = this.IsCompress ? CompressionUtils.GZip(data) : Encoding.UTF8.GetBytes(data); this.SetCache(content); this.Send(content, responseType); }
public async Task WireMockServer_Should_SupportRequestGZipAndDeflate(string contentEncoding) { // Arrange const string body = "hello wiremock"; byte[] compressed = CompressionUtils.Compress(contentEncoding, Encoding.UTF8.GetBytes(body)); var server = WireMockServer.Start(); server.Given( Request.Create() .WithPath("/foo") .WithBody("hello wiremock") ) .RespondWith( Response.Create().WithBody("OK") ); var content = new StreamContent(new MemoryStream(compressed)); content.Headers.ContentType = new MediaTypeHeaderValue("text/plain"); content.Headers.ContentEncoding.Add(contentEncoding); // Act var response = await new HttpClient().PostAsync($"{server.Urls[0]}/foo", content); // Assert Check.That(await response.Content.ReadAsStringAsync()).Contains("OK"); server.Stop(); }
public int Add(byte[] buffer, int offset, int count, bool encrypt = false, bool compress = false) { int blobId = NextBlobID; byte flags = 0; // Compress if (compress && count > 30) { buffer = CompressionUtils.GZipCompress(buffer, offset, count); offset = 0; count = buffer.Length; flags |= 2; _hasCompress = true; } // Encrypt if (encrypt) { StrongCryptoUtils.Encrypt(buffer, _encryptKey, offset, count); flags |= 1; _hasEncrypt = true; } // Write int pos = _blob.Length; _blob.Write(ref pos, (byte)flags); _blob.Write7BitEncodedInt(ref pos, count); _blob.Write(ref pos, buffer, offset, count); return(blobId); }
public async Task <long> GetLastDocId() { if (_file.Length == 0) { return(0); } var initialPos = _file.Position; try { var compression = new CompressionUtils(); var segment = await compression.ReadWithDecompressionFromRightToLeft(_file); using (var ms = new MemoryStream(segment.Array, segment.Offset, segment.Count)) { var batch = await DocumentSerializer.DeserializeBatch(ms).ConfigureAwait(false); return(batch.Last().Id); } } finally { _file.Position = initialPos; } }
public async Task BodyParser_Parse_ContentEncoding_GZip_And_DecompressGzipAndDeflate_Is_True_Should_Decompress(string compression) { // Arrange var bytes = Encoding.ASCII.GetBytes("0"); var compressed = CompressionUtils.Compress(compression, bytes); var bodyParserSettings = new BodyParserSettings { Stream = new MemoryStream(compressed), ContentType = "text/plain", DeserializeJson = false, ContentEncoding = compression.ToUpperInvariant(), DecompressGZipAndDeflate = true }; // Act var result = await BodyParser.Parse(bodyParserSettings); // Assert result.DetectedBodyType.Should().Be(BodyType.String); result.DetectedBodyTypeFromContentType.Should().Be(BodyType.String); result.BodyAsBytes.Should().BeEquivalentTo(new byte[] { 48 }); result.BodyAsJson.Should().BeNull(); result.BodyAsString.Should().Be("0"); result.DetectedCompression.Should().Be(compression); }
private EulerContainer GetEulerContainer(string path) { using (var english = File.Open(path, FileMode.Open)) { return(CompressionUtils.Decompress <EulerContainer>(english)); } }
private static EulerContainer GetEulerContainer(string path) { using (var file = File.Open(path, FileMode.Open)) { return(CompressionUtils.Decompress <EulerContainer>(file)); } }
private void WriteFile(string responseType) { this.output = this.GetCache(); if (this.output != null) { this.Send(this.output, responseType); return; } this.sb = new StringBuilder(4096); using (this.stream) { StreamReader reader = new StreamReader(this.stream); this.sb.Append(reader.ReadToEnd()); reader.Close(); } string data = responseType == "text/css" ? this.sm.ParseCssWebResourceUrls(this.sb.ToString()) : this.sb.ToString(); byte[] content = this.compress ? CompressionUtils.GZip(data) : Encoding.UTF8.GetBytes(data); this.SetCache(content); this.Send(content, responseType); }
/// <summary> /// /// </summary> /// <param name="context"></param> public override void ExecuteResult(ControllerContext context) { if (HttpContext.Current != null) { var instanceScript = HttpContext.Current.Items[ResourceManager.INSTANCESCRIPT]; if (instanceScript != null) { this.Script = instanceScript.ToString() + (this.Script ?? ""); } } if (!string.IsNullOrEmpty(this.Script)) { this.Script = string.Concat("<string>", this.Script); } if (this.IsUpload) { context.HttpContext.Response.Write("<textarea>{0}</textarea>".FormatWith(new ClientConfig().Serialize(this))); } else { CompressionUtils.GZipAndSend(new ClientConfig().Serialize(this)); } }
private void WriteChunks(Stream stream, SharedExtractParams extractParams, bool decompress) { var reader = extractParams.Reader; for (int i = 0; i < (numChunks - 1); i++) { reader.BaseStream.Seek((long)this.Chunks[i].offset, SeekOrigin.Begin); if (!decompress) { ulong prev = this.BytesWritten; StreamUtils.WriteSectionToStream(reader.BaseStream, Math.Max(Chunks[i].packSz, Chunks[i].fullSz), // Lazy hack, only one should be set when not compressed stream, bytesWritten => this.BytesWritten = prev + bytesWritten); } else { ulong prev = this.BytesWritten; CompressionUtils.Decompress(reader.BaseStream, this.Chunks[i].packSz, stream, bytesWritten => this.BytesWritten = prev + bytesWritten, extractParams); } } }
protected override void WriteDataToStream(Stream stream, SharedExtractParams extractParams, bool decompress) { var reader = extractParams.Reader; reader.BaseStream.Seek((long)this.Offset, SeekOrigin.Begin); // Reset at start since value might still be in used for a bit after this.BytesWritten = 0; if (!decompress) { StreamUtils.WriteSectionToStream(reader.BaseStream, Math.Max(this.Size, this.RealSize), // Lazy hack, only one should be set when not compressed stream, bytesWritten => this.BytesWritten = bytesWritten); } else { this.WriteHeader(stream); try { CompressionUtils.Decompress(reader.BaseStream, this.Size, stream, bytesWritten => this.BytesWritten = bytesWritten, extractParams); } catch (Exception ex) { throw new Exception($"Couldn't decompress zlib texture data. Size: {this.Size}, RealSize: {this.RealSize}", ex); } } this.WriteChunks(stream, extractParams, decompress); }
/// <inheritdoc /> public IObservable <string> Get(Tile tile) { // TODO tile can cross more than one srtm cell: need load more. var prefix = GetFileNamePrefix(tile.BoundingBox.Center()); var filePath = Path.Combine(_elePath, prefix); if (_fileSystemService.Exists(filePath)) { return(Observable.Return(filePath)); } return(Download(prefix).Select(bytes => { lock (_lock) { if (!_fileSystemService.Exists(filePath)) { _trace.Info(TraceCategory, "Unzipping {0} bytes", bytes.Length.ToString()); var hgtData = CompressionUtils.Unzip(bytes).Single().Value; using (var stream = _fileSystemService.WriteStream(filePath)) stream.Write(hgtData, 0, hgtData.Length); } } return filePath; })); }
public Block GetBlockByHash(Hash hash) { if (hash == Hash.Null) { return(null); } var blockMap = new StorageMap(BlockHashMapTag, this.Storage); if (blockMap.ContainsKey <Hash>(hash)) { var bytes = blockMap.Get <Hash, byte[]>(hash); bytes = CompressionUtils.Decompress(bytes); var block = Block.Unserialize(bytes); if (block.Hash != hash) { throw new ChainException("data corruption on block: " + hash); } return(block); } return(null); }
public static IReadOnlyList <NearestPostcodeDto> DecompressPreComputedPostcodes(PreComputedNearestPostcodesDto preComputedNearestPostcodesDto) { byte[] decompressedPreComputedNearbyPostcodesBytes = CompressionUtils.UnzipToBytes(preComputedNearestPostcodesDto.CompressedNearestPostcodes); List <NearestPostcodeDto> nearbyPostcodeDtos = Utf8Json.JsonSerializer.Deserialize <List <NearestPostcodeDto> >(decompressedPreComputedNearbyPostcodesBytes); return(nearbyPostcodeDtos); }
public void IsGZipped() { string input = "eyup!"; byte[] compressedData = CompressionUtils.Gzip(input); Assert.IsTrue(CompressionUtils.IsGZipped(compressedData)); }
public void IsnotGZipped() { string input = "eyup!"; byte[] inputBytes = Encoding.UTF8.GetBytes(input); Assert.IsFalse(CompressionUtils.IsGZipped(inputBytes)); }
public void SimpleSyncProducerSends2CompressedMessagesAndConsumerConnectorGetsThemBack() { var prodConfig = this.SyncProducerConfig1; var consumerConfig = this.ZooKeeperBasedConsumerConfig; // first producing string payload1 = "kafka 1."; byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1); var msg1 = new Message(payloadData1); string payload2 = "kafka 2."; byte[] payloadData2 = Encoding.UTF8.GetBytes(payload2); var msg2 = new Message(payloadData2); Message compressedMessage = CompressionUtils.Compress(new List <Message> { msg1, msg2 }, CompressionCodecs.DefaultCompressionCodec); var producerRequest = new ProducerRequest(CurrentTestTopic, 0, new List <Message> { compressedMessage }); using (var producer = new SyncProducer(prodConfig)) { producer.Send(producerRequest); } // now consuming var resultMessages = new List <Message>(); using (IConsumerConnector consumerConnector = new ZookeeperConsumerConnector(consumerConfig, true)) { var topicCount = new Dictionary <string, int> { { CurrentTestTopic, 1 } }; var messages = consumerConnector.CreateMessageStreams(topicCount); var sets = messages[CurrentTestTopic]; try { foreach (var set in sets) { foreach (var message in set) { resultMessages.Add(message); } } } catch (ConsumerTimeoutException) { // do nothing, this is expected } } Assert.AreEqual(2, resultMessages.Count); Assert.AreEqual(msg1.ToString(), resultMessages[0].ToString()); Assert.AreEqual(msg2.ToString(), resultMessages[1].ToString()); }
public void GZipRoundTripTest() { var compressedBytes = CompressionUtils.GZipString(input); var uncompressed = CompressionUtils.GUnzipString(compressedBytes); Assert.AreEqual(input, uncompressed, "Round trip failed."); Assert.IsTrue(Encoding.UTF8.GetBytes(input).Length > compressedBytes.Length, "Compression generated a larger byte array than the input!"); }
public void TestString() { string input = "eyup!"; byte[] compressedData = CompressionUtils.Gzip(input); string uncompressedData = CompressionUtils.UnGzipToString(compressedData); Assert.AreEqual(input, uncompressedData); }
/// <summary> /// /// </summary> /// <param name="context"></param> public override void ExecuteResult(ControllerContext context) { if (this.IsUpload) { context.HttpContext.Response.Write("<textarea>{0}</textarea>".FormatWith(new ClientConfig().Serialize(this))); } else { CompressionUtils.GZipAndSend(new ClientConfig().Serialize(this)); } }
public override object ConvertToStorageType(object value) { if (((string)value == string.Empty) || ((string)value == null)) { return(null); } else { return(CompressionUtils.Compress(new MemoryStream(System.Text.Encoding.UTF8.GetBytes((string)value))).ToArray()); } }
public virtual void Return() { if (!this.IsUpload && !this.NativeUpload) { CompressionUtils.GZipAndSend(this); } else { HttpContext.Current.Response.Write(this.ToString()); } }
/// <summary> /// Encodes and compresses this container. /// </summary> /// <returns> The buffer. </returns> /// <exception cref="IOException"> if an I/O error occurs. </exception> public ByteBuffer Encode() { var data = GetData(); // so we have a read only view, making this method thread safe /* grab the data as a byte array for compression */ var bytes = new byte[data.limit()]; data.mark(); data.get(bytes); data.reset(); /* compress the data */ byte[] compressed; if (type == COMPRESSION_NONE) { compressed = bytes; } else if (type == COMPRESSION_GZIP) { compressed = CompressionUtils.Gzip(bytes); } else if (type == COMPRESSION_BZIP2) { compressed = CompressionUtils.Bzip2(bytes); } else { throw new IOException("Invalid compression type"); } /* calculate the size of the header and trailer and allocate a buffer */ var header = 5 + (type == COMPRESSION_NONE ? 0 : 4) + (IsVersioned() ? 2 : 0); var buf = ByteBuffer.allocate(header + compressed.Length); /* write the header, with the optional uncompressed length */ buf.put((byte)type); buf.putInt(compressed.Length); if (type != COMPRESSION_NONE) { buf.putInt(data.limit()); } /* write the compressed length */ buf.put(compressed); /* write the trailer with the optional version */ if (IsVersioned()) { buf.putShort((short)version); } /* flip the buffer and return it */ return((ByteBuffer)buf.flip()); }