public File(byte[] source, bool compression = true, Intentions intentions = Intentions.None) { FileId = Guid.NewGuid().ToString(); Compressed = compression; Intent = intentions; Data = compression ? CompressionUtils.Compress(source) : source; }
public async Task WireMockServer_Should_SupportRequestGZipAndDeflate(string contentEncoding) { // Arrange const string body = "hello wiremock"; byte[] compressed = CompressionUtils.Compress(contentEncoding, Encoding.UTF8.GetBytes(body)); var server = WireMockServer.Start(); server.Given( Request.Create() .WithPath("/foo") .WithBody("hello wiremock") ) .RespondWith( Response.Create().WithBody("OK") ); var content = new StreamContent(new MemoryStream(compressed)); content.Headers.ContentType = new MediaTypeHeaderValue("text/plain"); content.Headers.ContentEncoding.Add(contentEncoding); // Act var response = await new HttpClient().PostAsync($"{server.Urls[0]}/foo", content); // Assert Check.That(await response.Content.ReadAsStringAsync()).Contains("OK"); server.Stop(); }
public async Task BodyParser_Parse_ContentEncoding_GZip_And_DecompressGzipAndDeflate_Is_True_Should_Decompress(string compression) { // Arrange var bytes = Encoding.ASCII.GetBytes("0"); var compressed = CompressionUtils.Compress(compression, bytes); var bodyParserSettings = new BodyParserSettings { Stream = new MemoryStream(compressed), ContentType = "text/plain", DeserializeJson = false, ContentEncoding = compression.ToUpperInvariant(), DecompressGZipAndDeflate = true }; // Act var result = await BodyParser.Parse(bodyParserSettings); // Assert result.DetectedBodyType.Should().Be(BodyType.String); result.DetectedBodyTypeFromContentType.Should().Be(BodyType.String); result.BodyAsBytes.Should().BeEquivalentTo(new byte[] { 48 }); result.BodyAsJson.Should().BeNull(); result.BodyAsString.Should().Be("0"); result.DetectedCompression.Should().Be(compression); }
public void SimpleSyncProducerSends2CompressedMessagesAndConsumerConnectorGetsThemBack() { var prodConfig = this.SyncProducerConfig1; var consumerConfig = this.ZooKeeperBasedConsumerConfig; // first producing string payload1 = "kafka 1."; byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1); var msg1 = new Message(payloadData1); string payload2 = "kafka 2."; byte[] payloadData2 = Encoding.UTF8.GetBytes(payload2); var msg2 = new Message(payloadData2); Message compressedMessage = CompressionUtils.Compress(new List <Message> { msg1, msg2 }, CompressionCodecs.DefaultCompressionCodec); var producerRequest = new ProducerRequest(CurrentTestTopic, 0, new List <Message> { compressedMessage }); using (var producer = new SyncProducer(prodConfig)) { producer.Send(producerRequest); } // now consuming var resultMessages = new List <Message>(); using (IConsumerConnector consumerConnector = new ZookeeperConsumerConnector(consumerConfig, true)) { var topicCount = new Dictionary <string, int> { { CurrentTestTopic, 1 } }; var messages = consumerConnector.CreateMessageStreams(topicCount); var sets = messages[CurrentTestTopic]; try { foreach (var set in sets) { foreach (var message in set) { resultMessages.Add(message); } } } catch (ConsumerTimeoutException) { // do nothing, this is expected } } Assert.AreEqual(2, resultMessages.Count); Assert.AreEqual(msg1.ToString(), resultMessages[0].ToString()); Assert.AreEqual(msg2.ToString(), resultMessages[1].ToString()); }
public override object ConvertToStorageType(object value) { if (((string)value == string.Empty) || ((string)value == null)) { return(null); } else { return(CompressionUtils.Compress(new MemoryStream(System.Text.Encoding.UTF8.GetBytes((string)value))).ToArray()); } }
public void CompressTest() { var testClass = new TestClass { IntProperty = 100, StringProperty = "Test" }; var compressed = CompressionUtils.Compress(testClass); var obj = CompressionUtils.Decompress <TestClass>(compressed); Assert.AreEqual(testClass.IntProperty, obj.IntProperty); Assert.AreEqual(testClass.StringProperty, obj.StringProperty); }
public void AsyncProducerSendsCompressedAndConsumerReceivesSingleSimpleMessage() { var prodConfig = this.AsyncProducerConfig1; var consumerConfig = this.ConsumerConfig1; var sourceMessage = new Message(Encoding.UTF8.GetBytes("test message")); var compressedMessage = CompressionUtils.Compress(new List <Message> { sourceMessage }, CompressionCodecs.GZIPCompressionCodec); long currentOffset = TestHelper.GetCurrentKafkaOffset(CurrentTestTopic, consumerConfig); using (var producer = new AsyncProducer(prodConfig)) { var producerRequest = new ProducerRequest( CurrentTestTopic, 0, new List <Message> { compressedMessage }); producer.Send(producerRequest); } IConsumer consumer = new Consumer(consumerConfig); var request = new FetchRequest(CurrentTestTopic, 0, currentOffset); BufferedMessageSet response; int totalWaitTimeInMiliseconds = 0; int waitSingle = 100; while (true) { Thread.Sleep(waitSingle); response = consumer.Fetch(request); if (response != null && response.Messages.Count() > 0) { break; } totalWaitTimeInMiliseconds += waitSingle; if (totalWaitTimeInMiliseconds >= MaxTestWaitTimeInMiliseconds) { break; } } Assert.NotNull(response); Assert.AreEqual(1, response.Messages.Count()); Message resultMessage = response.Messages.First(); Assert.AreEqual(compressedMessage.ToString(), resultMessage.ToString()); }
public void CompressionUtilsTryToCompressWithNoCompresssionCodec() { string payload1 = "kafka 1."; byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1); var msg1 = new Message(payloadData1); string payload2 = "kafka 2."; byte[] payloadData2 = Encoding.UTF8.GetBytes(payload2); var msg2 = new Message(payloadData2); Assert.Throws <Kafka.Client.Exceptions.UnknownCodecException>(() => CompressionUtils.Compress(new List <Message>() { msg1, msg2 }, CompressionCodecs.NoCompressionCodec)); }
public void CompressionUtilsTryToCompressWithNoCompresssionCodec() { string payload1 = "kafka 1."; byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1); var msg1 = new Message(payloadData1); string payload2 = "kafka 2."; byte[] payloadData2 = Encoding.UTF8.GetBytes(payload2); var msg2 = new Message(payloadData2); CompressionUtils.Compress(new List <Message>() { msg1, msg2 }, CompressionCodecs.NoCompressionCodec, 0); }
public override object ConvertToStorageType(object value) { if (value != null && !(value is Image)) { throw new ArgumentException(); } if (value == null) { return(value); } var ms = new MemoryStream(); ((Image)value).Save(ms, ImageFormat.Jpeg); return(CompressionUtils.Compress(ms).ToArray()); }
public void CompressAndDecompressMessageUsingDefaultCompressionCodec() { byte[] messageBytes = new byte[] { 1, 2, 3, 4, 5 }; Message message = new Message(messageBytes, CompressionCodecs.DefaultCompressionCodec); Message compressedMsg = CompressionUtils.Compress(new List <Message>() { message }); var decompressed = CompressionUtils.Decompress(compressedMsg); int i = 0; foreach (var decompressedMessage in decompressed.Messages) { i++; Assert.AreEqual(message.Payload, decompressedMessage.Payload); } Assert.AreEqual(1, i); }
public void ActualDataSerializationTest() { var language = new Language { LocalizationName = "RU-ru", FontFamilyNames = new List <string>() { "SomeFont" }, Chars = new List <Symbol> { new Symbol { Chr = 'Б', Codes = new HashSet <SymbolCode>( new[] { new SymbolCode(10, new EulerMonomap2D(EulerValue)) }) } } }; var eulerContainer = new EulerContainer() { Languages = new List <Language>() { language }, SpecialChars = new List <Symbol>() { new Symbol() { Chr = 'Ж' } } }; using (var compressed = CompressionUtils.Compress(eulerContainer)) { CompressionUtils.Decompress <EulerContainer>(compressed); } }
public void CompressAndDecompressMessageUsingSnappyCompressionCodec() { var messageBytes = new byte[] { 1, 2, 3, 4, 5 }; var message = new Message(messageBytes, CompressionCodecs.SnappyCompressionCodec); Message compressedMsg = CompressionUtils.Compress(new List <Message>() { message }, CompressionCodecs.SnappyCompressionCodec, 0); var decompressed = CompressionUtils.Decompress(compressedMsg, 0); int i = 0; foreach (var decompressedMessage in decompressed.Messages) { i++; message.Payload.SequenceEqual(decompressedMessage.Payload).Should().BeTrue(); } Assert.AreEqual(1, i); }
private void SaveToCache(List <Security> list) { if (list == null) { return; } try { using (StreamWriter writer = new StreamWriter(SecuritiesCachePath, false)) { string data = CompressionUtils.Compress(list.ToJson()); writer.WriteLine(data); } } catch (Exception e) { SendLogMessage(e.ToString(), LogMessageType.Error); } }
public void CompressAndDecompress3MessagesUsingDefaultCompressionCodec() { byte[] messageBytes = new byte[] { 1, 2, 3, 4, 5 }; Message message1 = new Message(messageBytes, CompressionCodecs.DefaultCompressionCodec); Message message2 = new Message(messageBytes, CompressionCodecs.DefaultCompressionCodec); Message message3 = new Message(messageBytes, CompressionCodecs.DefaultCompressionCodec); Message compressedMsg = CompressionUtils.Compress(new List <Message>() { message1, message2, message3 }, 0); var decompressed = CompressionUtils.Decompress(compressedMsg, 0); int i = 0; foreach (var decompressedMessage in decompressed.Messages) { i++; message1.Payload.SequenceEqual(decompressedMessage.Payload).Should().BeTrue(); } Assert.AreEqual(3, i); }
public void SimpleSyncProducerSendsLotsOfTwiceCompressedMessagesAndConsumerConnectorGetsThemBack() { var prodConfig = this.SyncProducerConfig1; var consumerConfig = this.ZooKeeperBasedConsumerConfig; int numberOfMessages = 500; int messagesPerPackage = 5; int messageSize = 0; int messagesPerInnerPackage = 5; using (var producer = new SyncProducer(prodConfig)) { for (int i = 0; i < numberOfMessages; i++) { var messagePackageList = new List <Message>(); for (int messageInPackageNr = 0; messageInPackageNr < messagesPerPackage; messageInPackageNr++) { var innerMessagePackageList = new List <Message>(); for (int inner = 0; inner < messagesPerInnerPackage; inner++) { string payload1 = "kafka 1."; byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1); var msg = new Message(payloadData1); innerMessagePackageList.Add(msg); } var innerPackageMessage = CompressionUtils.Compress(innerMessagePackageList, CompressionCodecs.GZIPCompressionCodec); messagePackageList.Add(innerPackageMessage); } var packageMessage = CompressionUtils.Compress(messagePackageList, CompressionCodecs.GZIPCompressionCodec); producer.Send(CurrentTestTopic, 0, new List <Message>() { packageMessage }); } } Thread.Sleep(2000); // now consuming int resultCount = 0; using (IConsumerConnector consumerConnector = new ZookeeperConsumerConnector(consumerConfig, true)) { var topicCount = new Dictionary <string, int> { { CurrentTestTopic, 1 } }; var messages = consumerConnector.CreateMessageStreams(topicCount); var sets = messages[CurrentTestTopic]; try { foreach (var set in sets) { foreach (var message in set) { resultCount++; } } } catch (ConsumerTimeoutException) { // do nothing, this is expected } } Assert.AreEqual(numberOfMessages * messagesPerPackage * messagesPerInnerPackage, resultCount); }
public void AddBlock(Block block, IEnumerable <Transaction> transactions, BigInteger minimumFee, StorageChangeSetContext changeSet) { if (!block.IsSigned) { throw new BlockGenerationException($"block must be signed"); } var unsignedBytes = block.ToByteArray(false); if (!block.Signature.Verify(unsignedBytes, block.Validator)) { throw new BlockGenerationException($"block signature does not match validator {block.Validator.Text}"); } var hashList = new StorageList(BlockHeightListTag, this.Storage); var expectedBlockHeight = hashList.Count() + 1; if (expectedBlockHeight != block.Height) { throw new ChainException("unexpected block height"); } // from here on, the block is accepted using (var m = new ProfileMarker("changeSet.Execute")) changeSet.Execute(); hashList.Add <Hash>(block.Hash); using (var m = new ProfileMarker("Compress")) { var blockMap = new StorageMap(BlockHashMapTag, this.Storage); var blockBytes = block.ToByteArray(true); blockBytes = CompressionUtils.Compress(blockBytes); blockMap.Set <Hash, byte[]>(block.Hash, blockBytes); var txMap = new StorageMap(TransactionHashMapTag, this.Storage); var txBlockMap = new StorageMap(TxBlockHashMapTag, this.Storage); foreach (Transaction tx in transactions) { var txBytes = tx.ToByteArray(true); txBytes = CompressionUtils.Compress(txBytes); txMap.Set <Hash, byte[]>(tx.Hash, txBytes); txBlockMap.Set <Hash, Hash>(tx.Hash, block.Hash); } } using (var m = new ProfileMarker("AddressBlockHashMapTag")) foreach (var transaction in transactions) { var addresses = new HashSet <Address>(); var events = block.GetEventsForTransaction(transaction.Hash); foreach (var evt in events) { if (evt.Address.IsSystem) { continue; } addresses.Add(evt.Address); } var addressTxMap = new StorageMap(AddressBlockHashMapTag, this.Storage); foreach (var address in addresses) { var addressList = addressTxMap.Get <Address, StorageList>(address); addressList.Add <Hash>(transaction.Hash); } } using (var m = new ProfileMarker("Nexus.PluginTriggerBlock")) Nexus.PluginTriggerBlock(this, block); }
private async void OnStartClick(object sender, RoutedEventArgs e) { const string DefaultDictionaries = "../../Qocr.Dics"; if (!Directory.Exists(DefaultDictionaries)) { MessageBox.Show("Check \"Qocr.Dics\" Folder existence"); return; } this.IsEnabled = false; _genImageNumber = 0; if (IsPrintDebug.IsChecked.GetValueOrDefault()) { RecreateTestDir(); } DateTime dNow = DateTime.Now; var container = new EulerContainer(); var fontFamilies = GetFileFonts().ToArray(); if (IsPrintDebug.IsChecked.GetValueOrDefault()) { _generator.BitmapCreated += GeneratorOnBitmapCreated; } var lang = await GenerateLanguage( SelectedLanguage.Name, int.Parse(SelectedLanguage.MinFont), int.Parse(SelectedLanguage.MaxFont), SelectedLanguage.MinChar, SelectedLanguage.MaxChar, fontFamilies); var specialChars = new[] { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '@', '$', '#', '&', '(', ')', '*', '/', '\\' }; var specialCharsResult = await _generator.GenerateSpecialChars( specialChars, int.Parse(SelectedLanguage.MinFont), int.Parse(SelectedLanguage.MaxFont), fontFamilies); lang.FontFamilyNames = fontFamilies.Select(font => font.Name).ToList(); container.Languages.Add(lang); container.SpecialChars = specialCharsResult; var compression = CompressionUtils.Compress(container); using (FileStream fileStream = new FileStream(Path.Combine(DefaultDictionaries, $"{SelectedLanguage.Name}.bin"), FileMode.Create)) { compression.Position = 0; compression.CopyTo(fileStream); } MessageBox.Show($"Время создания {DateTime.Now - dNow}"); IsEnabled = true; }
public void SimpleSyncProducerSendsLotsOfCompressedMessagesWithIncreasedSizeAndConsumerConnectorGetsThemBack() { var prodConfig = this.SyncProducerConfig1; var consumerConfig = this.ZooKeeperBasedConsumerConfig; consumerConfig.AutoCommit = true; consumerConfig.AutoCommitInterval = 100; int numberOfMessages = 2000; int messagesPerPackage = 5; string topic = CurrentTestTopic; var multipleBrokersHelper = new TestMultipleBrokersHelper(CurrentTestTopic); multipleBrokersHelper.GetCurrentOffsets( new[] { prodConfig }); int msgNr = 0; long totalSize = 0; using (var producer = new SyncProducer(prodConfig)) { for (int i = 0; i < numberOfMessages; i++) { var messagePackageList = new List <Message>(); for (int messageInPackageNr = 0; messageInPackageNr < messagesPerPackage; messageInPackageNr++) { string payload1 = CreatePayloadByNumber(msgNr); byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1); var msg = new Message(payloadData1); totalSize += msg.Size; messagePackageList.Add(msg); msgNr++; } var packageMessage = CompressionUtils.Compress(messagePackageList, CompressionCodecs.GZIPCompressionCodec); producer.Send(topic, 0, new List <Message>() { packageMessage }); } } // now consuming int resultCount = 0; long resultSize = 0; using (IConsumerConnector consumerConnector = new ZookeeperConsumerConnector(consumerConfig, true)) { var topicCount = new Dictionary <string, int> { { topic, 1 } }; var messages = consumerConnector.CreateMessageStreams(topicCount); var sets = messages[topic]; try { foreach (var set in sets) { foreach (var message in set) { Assert.AreEqual(CreatePayloadByNumber(resultCount), Encoding.UTF8.GetString(message.Payload)); resultCount++; resultSize += message.Size; } } } catch (ConsumerTimeoutException) { // do nothing, this is expected } } Assert.AreEqual(numberOfMessages * messagesPerPackage, resultCount); Assert.AreEqual(totalSize, resultSize); }
public void ConsumerConnectorConsumesTwoDifferentCompressedTopics() { var prodConfig = this.SyncProducerConfig1; var consumerConfig = this.ZooKeeperBasedConsumerConfig; string topic1 = CurrentTestTopic + "1"; string topic2 = CurrentTestTopic + "2"; // first producing string payload1 = "kafka 1."; byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1); var msg1 = new Message(payloadData1); Message compressedMessage1 = CompressionUtils.Compress(new List <Message> { msg1 }, CompressionCodecs.GZIPCompressionCodec); string payload2 = "kafka 2."; byte[] payloadData2 = Encoding.UTF8.GetBytes(payload2); var msg2 = new Message(payloadData2); Message compressedMessage2 = CompressionUtils.Compress(new List <Message> { msg2 }, CompressionCodecs.GZIPCompressionCodec); var producerRequest1 = new ProducerRequest(topic1, 0, new List <Message> { compressedMessage1 }); var producerRequest2 = new ProducerRequest(topic2, 0, new List <Message> { compressedMessage2 }); using (var producer = new SyncProducer(prodConfig)) { producer.Send(producerRequest1); producer.Send(producerRequest2); } // now consuming var resultMessages1 = new List <Message>(); var resultMessages2 = new List <Message>(); using (IConsumerConnector consumerConnector = new ZookeeperConsumerConnector(consumerConfig, true)) { var topicCount = new Dictionary <string, int> { { topic1, 1 }, { topic2, 1 } }; var messages = consumerConnector.CreateMessageStreams(topicCount); Assert.IsTrue(messages.ContainsKey(topic1)); Assert.IsTrue(messages.ContainsKey(topic2)); var sets1 = messages[topic1]; try { foreach (var set in sets1) { foreach (var message in set) { resultMessages1.Add(message); } } } catch (ConsumerTimeoutException) { // do nothing, this is expected } var sets2 = messages[topic2]; try { foreach (var set in sets2) { foreach (var message in set) { resultMessages2.Add(message); } } } catch (ConsumerTimeoutException) { // do nothing, this is expected } } Assert.AreEqual(1, resultMessages1.Count); Assert.AreEqual(msg1.ToString(), resultMessages1[0].ToString()); Assert.AreEqual(1, resultMessages2.Count); Assert.AreEqual(msg2.ToString(), resultMessages2[0].ToString()); }