private ISet <string> PublishMessages(string topic, int count, bool enableBatch) { ISet <string> keys = new HashSet <string>(); var builder = new ProducerConfigBuilder <byte[]>() .Topic(topic) .MessageRoutingMode(Common.MessageRoutingMode.RoundRobinMode) .MaxPendingMessages(count) .BatchingMaxPublishDelay(TimeSpan.FromMilliseconds(80000)); if (enableBatch) { builder.EnableBatching(true); builder.BatchingMaxMessages(count); } else { builder.EnableBatching(false); } var producer = _client.NewProducer(builder); for (var i = 0; i < count; i++) { var key = "key" + i; var data = Encoding.UTF8.GetBytes("my-message-" + i); producer.NewMessage().Key(key).Value(data).Send(); keys.Add(key); } producer.Flush(); return(keys); }
private ISet <MessageId> PublishMessages(string topic, int count) { var ids = new HashSet <MessageId>(); var builder = new ProducerConfigBuilder <DataOp>() .Topic(topic); var producer = _client.NewProducer(AvroSchema <DataOp> .Of(typeof(DataOp)), builder); for (var i = 0; i < count; i++) { var key = "key" + i; MessageId id = null; if (i % 2 == 0) { id = producer.NewMessage().Key(key).Property("twitter", "mestical").Value(new DataOp { Text = "my-event-message-" + i, EventTime = DateTimeHelper.CurrentUnixTimeMillis() }).Send(); } else { id = producer.NewMessage().Key(key).Value(new DataOp { Text = "my-event-message-" + i, EventTime = DateTimeHelper.CurrentUnixTimeMillis() }).Send(); } ids.Add(id); } return(ids); }
public void ProduceAndConsume() { var record1 = AvroSchema <SimpleRecord> .Of(typeof(SimpleRecord)); var consumerBuilder = new ConsumerConfigBuilder <SimpleRecord>() .Topic(_topic) .ConsumerName("avroUpgradeSchema1") .SubscriptionName("test-sub"); var consumer = _client.NewConsumer(record1, consumerBuilder); var producerBuilder = new ProducerConfigBuilder <SimpleRecord>() .Topic(_topic) .ProducerName("avroUpgradeSchema1"); var producer = _client.NewProducer(record1, producerBuilder); producer.NewMessage() .Value(new SimpleRecord { Name = "Ebere Abanonu", Age = int.MaxValue }) .Send(); Thread.Sleep(TimeSpan.FromSeconds(10)); var message = consumer.Receive(); Assert.NotNull(message); consumer.Acknowledge(message); consumer.Unsubscribe(); var record2 = AvroSchema <SimpleRecord2> .Of(typeof(SimpleRecord2)); var consumerBuilder2 = new ConsumerConfigBuilder <SimpleRecord2>() .Topic(_topic) .ConsumerName("avroUpgradeSchema2") .SubscriptionName("test-sub"); var consumer1 = _client.NewConsumer(record2, consumerBuilder2); var producerBuilder2 = new ProducerConfigBuilder <SimpleRecord2>() .Topic(_topic) .ProducerName("avroUpgradeSchema2"); var producer2 = _client.NewProducer(record2, producerBuilder2); producer2.NewMessage() .Value(new SimpleRecord2 { Name = "Ebere", Age = int.MaxValue, Surname = "Abanonu" }) .Send(); Thread.Sleep(TimeSpan.FromSeconds(10)); var msg = consumer1.Receive(); Assert.NotNull(msg); consumer1.Acknowledge(msg); consumer1.Unsubscribe(); producer.Close(); producer2.Close(); consumer.Close(); consumer1.Close(); }
public void ProduceCommitBatchedTest() { var txn = Txn; var topic = $"{_topicOutput}-{Guid.NewGuid()}"; var consumerBuilder = new ConsumerConfigBuilder <byte[]>() .Topic(topic) .ForceTopicCreation(true) .SubscriptionName($"test2") .EnableBatchIndexAcknowledgment(true) .SubscriptionInitialPosition(SubscriptionInitialPosition.Earliest); var consumer = _client.NewConsumer(consumerBuilder); var producerBuilder = new ProducerConfigBuilder <byte[]>() .Topic(topic) //.EnableBatching(true) .SendTimeout(0); var producer = _client.NewProducer(producerBuilder); var txnMessageCnt = 0; var messageCnt = 40; for (var i = 0; i < messageCnt; i++) { producer.NewMessage(txn).Value(Encoding.UTF8.GetBytes("Hello Txn - " + i)).Send(); txnMessageCnt++; } // Can't receive transaction messages before commit. var message = consumer.Receive(); Assert.Null(message); txn.Commit(); // txn1 messages could be received after txn1 committed var receiveCnt = 0; Thread.Sleep(TimeSpan.FromSeconds(5)); for (var i = 0; i < txnMessageCnt; i++) { message = consumer.Receive(); Assert.NotNull(message); receiveCnt++; _output.WriteLine($"message receive count: {receiveCnt}"); } Assert.Equal(txnMessageCnt, receiveCnt); message = consumer.Receive(); Assert.Null(message); _output.WriteLine($"message commit test enableBatch {true}"); }
public virtual void ProducerInstantiation() { var producer = new ProducerConfigBuilder <string>(); producer.Topic("ProducerInstantiation"); var stringProducerBuilder = _client.NewProducer(new StringSchema(), producer); Assert.NotNull(stringProducerBuilder); stringProducerBuilder.Close(); }
public void TestUnAckMessageRedeliveryWithReceive() { var topic = $"persistent://public/default/async-unack-redelivery-{Guid.NewGuid()}"; var builder = new ConsumerConfigBuilder <byte[]>(); builder.Topic(topic); builder.SubscriptionName("sub-TestUnAckMessageRedeliveryWithReceive"); builder.AckTimeout(TimeSpan.FromMilliseconds(8000)); builder.ForceTopicCreation(true); builder.AcknowledgmentGroupTime(0); builder.SubscriptionType(Protocol.Proto.CommandSubscribe.SubType.Shared); var consumer = _client.NewConsumer(builder); var pBuilder = new ProducerConfigBuilder <byte[]>(); pBuilder.Topic(topic); var producer = _client.NewProducer(pBuilder); const int messageCount = 10; for (var i = 0; i < messageCount; i++) { var receipt = producer.Send(Encoding.UTF8.GetBytes("my-message-" + i)); _output.WriteLine(JsonSerializer.Serialize(receipt, new JsonSerializerOptions { WriteIndented = true })); } var messageReceived = 0; Thread.Sleep(TimeSpan.FromSeconds(5)); for (var i = 0; i < messageCount; ++i) { var m = consumer.Receive(); var receivedMessage = Encoding.UTF8.GetString(m.Data); _output.WriteLine($"Received message: [{receivedMessage}]"); Assert.NotNull(receivedMessage); messageReceived++; } Assert.Equal(10, messageReceived); Thread.Sleep(TimeSpan.FromSeconds(10)); for (var i = 0; i < messageCount; i++) { var m = consumer.Receive(); var receivedMessage = Encoding.UTF8.GetString(m.Data); _output.WriteLine($"Received message: [{receivedMessage}]"); Assert.NotNull(receivedMessage); messageReceived++; } Assert.Equal(20, messageReceived); producer.Close(); consumer.Close(); }
public void TestLargeMessage() { //this.conf.MaxMessageSize = 5; const int totalMessages = 3; var topicName = $"persistent://public/default/my-topic1-{DateTimeHelper.CurrentUnixTimeMillis()}"; var builder = new ConsumerConfigBuilder <byte[]>() .Topic(topicName) .SubscriptionName("my-subscriber-name") .AckTimeout(TimeSpan.FromMilliseconds(20000)) .ForceTopicCreation(true) .AcknowledgmentGroupTime(0); var consumer = _client.NewConsumer(builder); var pBuilder = new ProducerConfigBuilder <byte[]>() .Topic(topicName) .EnableChunking(true) .MaxMessageSize(5); var producer = _client.NewProducer(pBuilder); IList <string> publishedMessages = new List <string>(); for (var i = 1; i < totalMessages; i++) { var message = CreateMessagePayload(i * 10); publishedMessages.Add(message); producer.Send(Encoding.UTF8.GetBytes(message)); } IMessage <byte[]> msg = null; ISet <string> messageSet = new HashSet <string>(); IList <IMessage <byte[]> > msgIds = new List <IMessage <byte[]> >(); Thread.Sleep(TimeSpan.FromSeconds(5)); for (var i = 0; i < totalMessages - 1; i++) { msg = consumer.Receive(); var receivedMessage = Encoding.UTF8.GetString(msg.Data); _output.WriteLine($"[{i}] - Published [{publishedMessages[i]}] Received message: [{receivedMessage}]"); var expectedMessage = publishedMessages[i]; TestMessageOrderAndDuplicates(messageSet, receivedMessage, expectedMessage); msgIds.Add(msg); } foreach (var msgId in msgIds) { consumer.Acknowledge(msgId); } producer.Close(); consumer.Close(); }
private Producer <byte[]> CreateProducer(string topic, bool enableBatch, int batchSize = 500) { var pBuilder = new ProducerConfigBuilder <byte[]>(); pBuilder.Topic(topic); if (enableBatch) { pBuilder.EnableBatching(true); pBuilder.BatchBuilder(IBatcherBuilder.KeyBased(_client.ActorSystem)); pBuilder.BatchingMaxMessages(batchSize); pBuilder.BatchingMaxPublishDelay(TimeSpan.FromMilliseconds(5000)); } return(_client.NewProducer(pBuilder)); }
private static List <MessageId> PublishMessages(string topic, int count, string message, PulsarClient client) { var keys = new List <MessageId>(); var builder = new ProducerConfigBuilder <byte[]>() .Topic(topic); var producer = client.NewProducer(builder); for (var i = 0; i < count; i++) { var key = "key" + i; var data = Encoding.UTF8.GetBytes($"{message}-{i}"); var id = producer.NewMessage().Key(key).Value(data).Send(); keys.Add(id); } return(keys); }
public void ProduceAndConsumeBatch() { var r = new Random(0); var byteKey = new byte[1000]; r.NextBytes(byteKey); var consumerBuilder = new ConsumerConfigBuilder <byte[]>() .Topic(_topic) .ForceTopicCreation(true) .SubscriptionName($"Batch-subscriber-{Guid.NewGuid()}"); var consumer = _client.NewConsumer(consumerBuilder); var producerBuilder = new ProducerConfigBuilder <byte[]>(); producerBuilder.Topic(_topic); producerBuilder.EnableBatching(true); producerBuilder.BatchingMaxPublishDelay(TimeSpan.FromMilliseconds(10000)); producerBuilder.BatchingMaxMessages(5); var producer = _client.NewProducer(producerBuilder); for (var i = 0; i < 5; i++) { producer.NewMessage().KeyBytes(byteKey) .Properties(new Dictionary <string, string> { { "KeyBytes", Encoding.UTF8.GetString(byteKey) } }) .Value(Encoding.UTF8.GetBytes($"TestMessage-{i}")) .Send(); } Thread.Sleep(TimeSpan.FromSeconds(5)); for (var i = 0; i < 5; i++) { var message = consumer.Receive(); Assert.Equal(byteKey, message.KeyBytes); Assert.True(message.HasBase64EncodedKey()); var receivedMessage = Encoding.UTF8.GetString(message.Data); _output.WriteLine($"Received message: [{receivedMessage}]"); Assert.Equal($"TestMessage-{i}", receivedMessage); } producer.Close(); consumer.Close(); }
public void ZeroQueueSizeNormalConsumer() { string key = "nonZeroQueueSizeNormalConsumer"; // 1. Config string topicName = "topic-" + key; string subscriptionName = "my-ex-subscription-" + key; string messagePredicate = "my-message-" + key + "-"; // 2. Create Producer var pBuilder = new ProducerConfigBuilder <sbyte[]>() .Topic(topicName) .EnableBatching(false); var producer = _client.NewProducer(pBuilder); // 3. Create Consumer var config = new ConsumerConfigBuilder <sbyte[]>() .Topic(topicName) .SubscriptionName(subscriptionName) .ReceiverQueueSize(0); var consumer = _client.NewConsumer(config); // 3. producer publish messages for (int i = 0; i < _totalMessages; i++) { var msg = messagePredicate + i; _output.WriteLine("Producer produced: " + msg); producer.Send(Encoding.UTF8.GetBytes(msg).ToSBytes()); } // 4. Receiver receives the message IMessage <sbyte[]> message; for (int i = 0; i < _totalMessages; i++) { Assert.Equal(0, consumer.NumMessagesInQueue()); message = consumer.Receive(); var r = Encoding.UTF8.GetString(message.Data.ToBytes()); Assert.Equal(r, messagePredicate + i); Assert.Equal(0, consumer.NumMessagesInQueue()); _output.WriteLine("Consumer received : " + r); } }
private ISet <string> PublishMessages(string topic, int count) { ISet <string> keys = new HashSet <string>(); var builder = new ProducerConfigBuilder <DataOp>() .Topic(topic); var producer = _client.NewProducer(AvroSchema <DataOp> .Of(typeof(DataOp)), builder); for (var i = 0; i < count; i++) { var key = "key" + i; producer.NewMessage().Key(key).Value(new DataOp { Text = "my-sql-message-" + i }).Send(); keys.Add(key); } return(keys); }
public void TestPauseAndResume() { const string topicName = "zero-queue-pause-and-resume"; const string subName = "sub"; AtomicReference <CountdownEvent> latch = new AtomicReference <CountdownEvent>(new CountdownEvent(1)); AtomicInteger received = new AtomicInteger(); var config = new ConsumerConfigBuilder <sbyte[]>() .Topic(topicName) .SubscriptionName(subName) .ReceiverQueueSize(0) .MessageListener(new MessageListener <sbyte[]>((consumer, msg) => { Assert.NotNull(msg); consumer.Tell(new AcknowledgeMessage <sbyte[]>(msg)); received.GetAndIncrement(); latch.Value.AddCount(); }, null)); var consumer = _client.NewConsumer(config); consumer.Pause(); var pBuilder = new ProducerConfigBuilder <sbyte[]>() .Topic(topicName) .EnableBatching(false); Producer <sbyte[]> producer = _client.NewProducer(pBuilder); for (int i = 0; i < 2; i++) { producer.Send(Encoding.UTF8.GetBytes("my-message-" + i).ToSBytes()); } // Paused consumer receives only one message //Assert.True(latch.Value.Wait(TimeSpan.FromSeconds(2))); //Thread.Sleep(2000); //Assert.Equal(1, received.GetValue()); //latch.GetAndSet(new CountdownEvent(1)); consumer.Resume(); Thread.Sleep(10000); //Assert.True(latch.Value.Wait(TimeSpan.FromSeconds(2)), "Timed out waiting for message listener acks"); consumer.Unsubscribe(); producer.Close(); }
private void PlainKeyValueProducer(string topic) { //var jsonSchem = AvroSchema<JournalEntry>.Of(typeof(JournalEntry)); var jsonSchem = KeyValueSchema <string, string> .Of(ISchema <string> .String, ISchema <string> .String); var builder = new ConsumerConfigBuilder <KeyValue <string, string> >() .Topic(topic) .SubscriptionName($"subscriber-name-{DateTimeHelper.CurrentUnixTimeMillis()}") .AckTimeout(TimeSpan.FromMilliseconds(20000)) .ForceTopicCreation(true) .AcknowledgmentGroupTime(0); var consumer = _client.NewConsumer(jsonSchem, builder); var producerConfig = new ProducerConfigBuilder <KeyValue <string, string> >() .ProducerName(topic.Split("/").Last()) .Topic(topic) .Schema(jsonSchem) .SendTimeout(10000); var producer = _client.NewProducer(jsonSchem, producerConfig); for (var i = 0; i < 10; i++) { var metadata = new Dictionary <string, string> { ["Key"] = "Single", ["Properties"] = JsonSerializer.Serialize(new Dictionary <string, string> { { "Tick", DateTime.Now.Ticks.ToString() } }, new JsonSerializerOptions { WriteIndented = true }) }; var id = producer.NewMessage().Properties(metadata).Value <string, string>(new KeyValue <string, string>("Ebere", $"[{i}]Ebere")).Send(); _output.WriteLine(id.ToString()); } Thread.Sleep(TimeSpan.FromSeconds(5)); for (var i = 0; i < 10; i++) { var msg = consumer.Receive(); if (msg != null) { var kv = msg.Value; _output.WriteLine($"key:{kv.Key}, value:{kv.Value}"); } } }
private IActorRef CreateProducer() { var topic = _pulsarSettings.Topic; var producerListener = new DefaultProducerListener((o) => { }, s => { }); var producerConfig = new ProducerConfigBuilder() .ProducerName($"Web-{topic}-{Guid.NewGuid()}") .Topic(topic) .Schema(_schema) .EnableChunking(true) .EventListener(producerListener) .ProducerConfigurationData; return(_pulsarSystem.PulsarProducer(new CreateProducer(_schema, producerConfig)).Producer); }
public void TestZeroQueueSizeMessageRedeliveryForListener() { string topic = $"testZeroQueueSizeMessageRedeliveryForListener-{DateTime.Now.Ticks}"; const int messages = 10; CountdownEvent latch = new CountdownEvent(messages * 2); ISet <int> receivedMessages = new HashSet <int>(); var config = new ConsumerConfigBuilder <int>() .Topic(topic) .SubscriptionName("sub") .ReceiverQueueSize(0) .SubscriptionType(SubType.Shared) .AckTimeout(1, TimeUnit.SECONDS) .MessageListener(new MessageListener <int>((consumer, msg) => { try { receivedMessages.Add(msg.Value); } finally { latch.Signal(); } }, null)); var consumer = _client.NewConsumer(ISchema <object> .Int32, config); var pBuilder = new ProducerConfigBuilder <int>() .Topic(topic) .EnableBatching(false); var producer = _client.NewProducer(ISchema <object> .Int32, pBuilder); for (int i = 0; i < messages; i++) { producer.Send(i); } latch.Wait(); Assert.Equal(receivedMessages.Count, messages); consumer.Close(); producer.Close(); }
public void ProduceAndConsume() { var topic = $"persistent://public/default/my-topic-{Guid.NewGuid()}"; var r = new Random(0); var byteKey = new byte[1000]; r.NextBytes(byteKey); var consumerBuilder = new ConsumerConfigBuilder <byte[]>() .Topic(topic) .ForceTopicCreation(true) .SubscriptionName($"ByteKeysTest-subscriber-{Guid.NewGuid()}"); var consumer = _client.NewConsumer(consumerBuilder); var producerBuilder = new ProducerConfigBuilder <byte[]>(); producerBuilder.Topic(topic); var producer = _client.NewProducer(producerBuilder); producer.NewMessage().KeyBytes(byteKey) .Properties(new Dictionary <string, string> { { "KeyBytes", Encoding.UTF8.GetString(byteKey) } }) .Value(Encoding.UTF8.GetBytes("TestMessage")) .Send(); Thread.Sleep(TimeSpan.FromSeconds(10)); var message = consumer.Receive(); Assert.Equal(byteKey, message.KeyBytes); Assert.True(message.HasBase64EncodedKey()); var receivedMessage = Encoding.UTF8.GetString(message.Data); _output.WriteLine($"Received message: [{receivedMessage}]"); Assert.Equal("TestMessage", receivedMessage); producer.Close(); consumer.Close(); }
public void ProduceAbortTest() { var txn = Txn; var consumerBuilder = new ConsumerConfigBuilder <byte[]>() .Topic(_topicOutput) .SubscriptionName($"test{DateTime.Now.Ticks}") .ForceTopicCreation(true) .EnableBatchIndexAcknowledgment(true) .SubscriptionInitialPosition(SubscriptionInitialPosition.Earliest); var consumer = _client.NewConsumer(consumerBuilder); var producerBuilder = new ProducerConfigBuilder <byte[]>(); producerBuilder.Topic(_topicOutput); producerBuilder.SendTimeout(0); var producer = _client.NewProducer(producerBuilder); var messageCnt = 10; for (var i = 0; i < messageCnt; i++) { producer.NewMessage(txn).Value(Encoding.UTF8.GetBytes("Hello Txn - " + i)).Send(); } // Can't receive transaction messages before abort. var message = consumer.Receive(); Assert.Null(message); txn.Abort(); // Cant't receive transaction messages after abort. message = consumer.Receive(); Assert.Null(message); }
public void TestZeroQueueSizeMessageRedelivery() { const string topic = "testZeroQueueSizeMessageRedelivery"; var config = new ConsumerConfigBuilder <int>() .Topic(topic) .SubscriptionName("sub") .ReceiverQueueSize(0) .SubscriptionType(SubType.Shared) .AckTimeout(1, TimeUnit.SECONDS); var consumer = _client.NewConsumer(ISchema <object> .Int32, config); var pBuilder = new ProducerConfigBuilder <int>() .Topic(topic) .EnableBatching(false); var producer = _client.NewProducer(ISchema <object> .Int32, pBuilder); const int messages = 10; for (int i = 0; i < messages; i++) { producer.Send(i); } ISet <int> receivedMessages = new HashSet <int>(); for (int i = 0; i < messages * 2; i++) { receivedMessages.Add(consumer.Receive().Value); } Assert.Equal(receivedMessages.Count, messages); consumer.Close(); producer.Close(); }
public void TxnCumulativeAckTestBatched() { var normalTopic = _nAMESPACE1 + $"/normal-topic-{Guid.NewGuid()}"; var consumerBuilder = new ConsumerConfigBuilder <byte[]>() .Topic(normalTopic) .ForceTopicCreation(true) .SubscriptionName($"test-{Guid.NewGuid()}") .EnableBatchIndexAcknowledgment(true) .SubscriptionType(SubType.Failover) .AcknowledgmentGroupTime(3000) .AckTimeout(TimeSpan.FromMilliseconds(10000)); var consumer = _client.NewConsumer(consumerBuilder); var producerBuilder = new ProducerConfigBuilder <byte[]>() .Topic(normalTopic) .EnableBatching(true) .BatchingMaxMessages(50) .BatchingMaxPublishDelay(TimeSpan.FromMilliseconds(1000)); var producer = _client.NewProducer(producerBuilder); for (var retryCnt = 0; retryCnt < 2; retryCnt++) { var abortTxn = Txn; var messageCnt = 100; // produce normal messages for (var i = 0; i < messageCnt; i++) { producer.NewMessage().Value(Encoding.UTF8.GetBytes("Hello")).Send(); } IMessage <byte[]> message = null; Thread.Sleep(TimeSpan.FromSeconds(5)); for (var i = 0; i < messageCnt; i++) { message = consumer.Receive(); Assert.NotNull(message); if (i % 3 == 0) { // throws org.apache.pulsar.transaction.common.exception.TransactionCon"org.apache.pulsar.transaction.common.exception.TransactionConflictException: [persistent://public/default/normal-topic-24636acf-51a4-4309-8f68-86354383cefe][test-b1954d51-2e93-49f9-a3b7-2f76dcdedd36] Transaction:(1,42) try to cumulative batch ack position: 14960:0 within range of current currentPosition: 14960:0 //better done outside consumer.AcknowledgeCumulative(message.MessageId, abortTxn); } _output.WriteLine($"receive msgId abort: {message.MessageId}, retryCount : {retryCnt}, count : {i}"); } // consumer.AcknowledgeCumulative(message.MessageId, abortTxn); // the messages are pending ack state and can't be received message = consumer.Receive(); Assert.Null(message); abortTxn.Abort(); var commitTxn = Txn; Thread.Sleep(TimeSpan.FromSeconds(5)); for (var i = 0; i < messageCnt; i++) { message = consumer.Receive(); Assert.NotNull(message); if (i % 3 == 0) { consumer.AcknowledgeCumulative(message.MessageId, commitTxn); } _output.WriteLine($"receive msgId abort: {message.MessageId}, retryCount : {retryCnt}, count : {i}"); } commitTxn.Commit(); Thread.Sleep(TimeSpan.FromSeconds(5)); message = consumer.Receive(); Assert.Null(message); } }
public void TxnCumulativeAckTest() { var normalTopic = _nAMESPACE1 + $"/normal-topic-{Guid.NewGuid()}"; var consumerBuilder = new ConsumerConfigBuilder <byte[]>() .Topic(normalTopic) .SubscriptionName($"test-{Guid.NewGuid()}") .ForceTopicCreation(true) //.SubscriptionType(SubType.Failover) .EnableBatchIndexAcknowledgment(true) .AcknowledgmentGroupTime(3000) .AckTimeout(TimeSpan.FromMilliseconds(10000)); var consumer = _client.NewConsumer(consumerBuilder); var producerBuilder = new ProducerConfigBuilder <byte[]>() .Topic(normalTopic); var producer = _client.NewProducer(producerBuilder); for (var retryCnt = 0; retryCnt < 2; retryCnt++) { var abortTxn = Txn; var messageCnt = 50; // produce normal messages for (var i = 0; i < messageCnt; i++) { producer.NewMessage().Value(Encoding.UTF8.GetBytes("Hello")).Send(); } IMessage <byte[]> message = null; Thread.Sleep(TimeSpan.FromSeconds(5)); for (var i = 0; i < messageCnt; i++) { message = consumer.Receive(); Assert.NotNull(message); if (i % 3 == 0) { consumer.AcknowledgeCumulative(message.MessageId, abortTxn); } _output.WriteLine($"receive msgId abort: {message.MessageId}, retryCount : {retryCnt}, count : {i}"); } // the messages are pending ack state and can't be received message = consumer.Receive(); Assert.Null(message); abortTxn.Abort(); var commitTxn = Txn; Thread.Sleep(TimeSpan.FromSeconds(5)); for (var i = 0; i < messageCnt; i++) { message = consumer.Receive(); Assert.NotNull(message); consumer.AcknowledgeCumulative(message.MessageId, commitTxn); _output.WriteLine($"receive msgId abort: {message.MessageId}, retryCount : {retryCnt}, count : {i}"); } commitTxn.Commit(); Thread.Sleep(TimeSpan.FromSeconds(5)); message = consumer.Receive(); Assert.Null(message); } }
public Producer <T> NewProducer <T>(ISchema <T> schema, ProducerConfigBuilder <T> configBuilder) { return(NewProducerAsync(schema, configBuilder).GetAwaiter().GetResult()); }
public async ValueTask <Producer <byte[]> > NewProducerAsync(ProducerConfigBuilder <byte[]> producerConfigBuilder) { return(await NewProducerAsync(ISchema <object> .Bytes, producerConfigBuilder).ConfigureAwait(false)); }
private void PlainAvroProducer(string topic) { var jsonSchem = AvroSchema <JournalEntry> .Of(typeof(JournalEntry)); var builder = new ConsumerConfigBuilder <JournalEntry>() .Topic(topic) .SubscriptionName($"my-subscriber-name-{DateTimeHelper.CurrentUnixTimeMillis()}") .AckTimeout(TimeSpan.FromMilliseconds(20000)) .ForceTopicCreation(true) .AcknowledgmentGroupTime(0); var consumer = _client.NewConsumer(jsonSchem, builder); var producerConfig = new ProducerConfigBuilder <JournalEntry>() .ProducerName(topic.Split("/").Last()) .Topic(topic) .Schema(jsonSchem) .SendTimeout(10000); var producer = _client.NewProducer(jsonSchem, producerConfig); for (var i = 0; i < 10; i++) { var student = new Students { Name = $"[{i}] Ebere: {DateTimeOffset.Now.ToUnixTimeMilliseconds()} - presto-ed {DateTime.Now.ToString(CultureInfo.InvariantCulture)}", Age = 202 + i, School = "Akka-Pulsar university" }; var journal = new JournalEntry { Id = $"[{i}]Ebere: {DateTimeOffset.Now.ToUnixTimeMilliseconds()}", PersistenceId = "sampleActor", IsDeleted = false, Ordering = 0, Payload = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(student)), SequenceNr = 0, Tags = "root" }; var metadata = new Dictionary <string, string> { ["Key"] = "Single", ["Properties"] = JsonSerializer.Serialize(new Dictionary <string, string> { { "Tick", DateTime.Now.Ticks.ToString() } }, new JsonSerializerOptions { WriteIndented = true }) }; var id = producer.NewMessage().Properties(metadata).Value(journal).Send(); } Thread.Sleep(TimeSpan.FromSeconds(5)); for (var i = 0; i < 10; i++) { var msg = consumer.Receive(); if (msg != null) { var receivedMessage = msg.Value; _output.WriteLine(JsonSerializer.Serialize(receivedMessage, new JsonSerializerOptions { WriteIndented = true })); } } }
public async ValueTask <Producer <T> > NewProducerAsync <T>(ISchema <T> schema, ProducerConfigBuilder <T> configBuilder) { var interceptors = configBuilder.GetInterceptors; configBuilder.Schema(schema); var conf = configBuilder.Build(); // config validation Condition.CheckArgument(!(conf.BatchingEnabled && conf.ChunkingEnabled), "Batching and chunking of messages can't be enabled together"); if (conf.TopicName == null) { throw new ArgumentException("Topic name must be set on the producer builder"); } if (interceptors == null || interceptors.Count == 0) { return(await CreateProducer(conf, schema).ConfigureAwait(false)); } else { return(await CreateProducer(conf, schema, new ProducerInterceptors <T>(_actorSystem.Log, interceptors)).ConfigureAwait(false)); } }
public void TxnMessageAckTest() { var topic = $"{_topicMessageAckTest}-{Guid.NewGuid()}"; var subName = $"test-{Guid.NewGuid()}"; var consumerBuilder = new ConsumerConfigBuilder <byte[]>() .Topic(topic) .SubscriptionName(subName) .ForceTopicCreation(true) .EnableBatchIndexAcknowledgment(true) .AcknowledgmentGroupTime(0); var consumer = _client.NewConsumer(consumerBuilder); var producerBuilder = new ProducerConfigBuilder <byte[]>() .Topic(topic) .EnableBatching(false) .SendTimeout(0); var producer = _client.NewProducer(producerBuilder); var txn = Txn; var messageCnt = 10; for (var i = 0; i < messageCnt; i++) { producer.NewMessage(txn).Value(Encoding.UTF8.GetBytes("Hello Txn - " + i)).Send(); } _output.WriteLine("produce transaction messages finished"); // Can't receive transaction messages before commit. var message = consumer.Receive(); Assert.Null(message); _output.WriteLine("transaction messages can't be received before transaction committed"); txn.Commit(); var ackedMessageCount = 0; var receiveCnt = 0; Thread.Sleep(TimeSpan.FromSeconds(5)); for (var i = 0; i < messageCnt; i++) { message = consumer.Receive(); Assert.NotNull(message); receiveCnt++; if (i % 2 == 0) { consumer.Acknowledge(message); ackedMessageCount++; } } Assert.Equal(messageCnt, receiveCnt); message = consumer.Receive(); Assert.Null(message); consumer.RedeliverUnacknowledgedMessages(); Thread.Sleep(TimeSpan.FromSeconds(10)); receiveCnt = 0; for (var i = 0; i < messageCnt; i++) { message = consumer.Receive(); Assert.NotNull(message); consumer.Acknowledge(message); receiveCnt++; } Assert.Equal(messageCnt - ackedMessageCount, receiveCnt); message = consumer.Receive(); Assert.Null(message); _output.WriteLine($"receive transaction messages count: {receiveCnt}"); }
private void TestNegativeAcks(bool batching, bool usePartition, CommandSubscribe.SubType subscriptionType, int negAcksDelayMillis, int ackTimeout) { _output.WriteLine($"Test negative acks batching={batching} partitions={usePartition} subType={subscriptionType} negAckDelayMs={negAcksDelayMillis}"); var topic = "testNegativeAcks-" + DateTime.Now.Ticks; var builder = new ConsumerConfigBuilder <byte[]>() .Topic(topic) .SubscriptionName($"sub1-{Guid.NewGuid()}") .AckTimeout(TimeSpan.FromMilliseconds(ackTimeout)) .ForceTopicCreation(true) .AcknowledgmentGroupTime(0) .NegativeAckRedeliveryDelay(TimeSpan.FromMilliseconds(negAcksDelayMillis)) .SubscriptionType(subscriptionType); var consumer = _client.NewConsumer(builder); var pBuilder = new ProducerConfigBuilder <byte[]>(); pBuilder.Topic(topic); if (batching) { pBuilder.EnableBatching(batching); pBuilder.BatchingMaxPublishDelay(TimeSpan.FromMilliseconds(negAcksDelayMillis)); pBuilder.BatchingMaxMessages(10); } var producer = _client.NewProducer(pBuilder); ISet <string> sentMessages = new HashSet <string>(); const int n = 10; for (var i = 0; i < n; i++) { var value = "test-" + i; producer.Send(Encoding.UTF8.GetBytes(value)); sentMessages.Add(value); } Thread.Sleep(TimeSpan.FromSeconds(10)); for (var i = 0; i < n; i++) { var msg = consumer.Receive(); if (msg != null) { var ms = Encoding.UTF8.GetString(msg.Data); consumer.NegativeAcknowledge(msg); _output.WriteLine(ms); } } ISet <string> receivedMessages = new HashSet <string>(); Thread.Sleep(TimeSpan.FromSeconds(10)); // All the messages should be received again for (var i = 0; i < n; i++) { var msg = consumer.Receive(); if (msg != null) { var ms = Encoding.UTF8.GetString(msg.Data); _output.WriteLine(ms); receivedMessages.Add(ms); consumer.Acknowledge(msg); } } Assert.Equal(sentMessages, receivedMessages); var nu = consumer.Receive(); // There should be no more messages Assert.Null(nu); producer.Close(); consumer.Close(); }
public void TxnAckTestBatchedFailoverSub() { var normalTopic = _nAMESPACE1 + $"/normal-topic-{Guid.NewGuid()}"; var consumerBuilder = new ConsumerConfigBuilder <byte[]>() .Topic(normalTopic) .SubscriptionName($"test-{Guid.NewGuid()}") .ForceTopicCreation(true) .EnableBatchIndexAcknowledgment(true) .AcknowledgmentGroupTime(5000) .SubscriptionType(SubType.Failover); var consumer = _client.NewConsumer(consumerBuilder); var producerBuilder = new ProducerConfigBuilder <byte[]>() .Topic(normalTopic) .EnableBatching(true) .BatchingMaxMessages(100); var producer = _client.NewProducer(producerBuilder); for (var retryCnt = 0; retryCnt < 1; retryCnt++) { var txn = Txn; //Thread.Sleep(TimeSpan.FromSeconds(30)); var messageCnt = 100; // produce normal messages for (var i = 0; i < messageCnt; i++) { producer.NewMessage().Value("hello".GetBytes()).Send(); } // consume and ack messages with txn Thread.Sleep(TimeSpan.FromSeconds(5)); for (var i = 0; i < messageCnt; i++) { var msg = consumer.Receive(); Assert.NotNull(msg); _output.WriteLine($"receive msgId: {msg.MessageId}, count : {i}"); consumer.Acknowledge(msg.MessageId, txn); } // the messages are pending ack state and can't be received var message = consumer.Receive(); Assert.Null(message); // 1) txn abort txn.Abort(); // after transaction abort, the messages could be received var commitTxn = Txn; //Thread.Sleep(TimeSpan.FromSeconds(5)); Thread.Sleep(TimeSpan.FromSeconds(30)); for (var i = 0; i < messageCnt; i++) { message = consumer.Receive(); Assert.NotNull(message); consumer.Acknowledge(message.MessageId, commitTxn); _output.WriteLine($"receive msgId: {message.MessageId}, count: {i}"); } // 2) ack committed by a new txn commitTxn.Commit(); // after transaction commit, the messages can't be received message = consumer.Receive(); Assert.Null(message); } }
public void TestGenericTopic() { var schema = AvroSchema <ComplexGenericData> .Of(typeof(ComplexGenericData)); var genericSchema = GenericAvroSchema.Of(schema.SchemaInfo); _output.WriteLine(schema.SchemaInfo.SchemaDefinition); var pBuilder = new ProducerConfigBuilder <IGenericRecord>() .Topic(_topic); var producer = _client.NewProducer(genericSchema, pBuilder); const int messageCount = 10; for (var i = 0; i < messageCount; i++) { var dataForWriter = new GenericRecord((Avro.RecordSchema)genericSchema.AvroSchema); dataForWriter.Add("Feature", "Education"); dataForWriter.Add("StringData", new Dictionary <string, string> { { "Index", i.ToString() }, { "FirstName", "Ebere" }, { "LastName", "Abanonu" } }); dataForWriter.Add("ComplexData", ToBytes(new ComplexData { ProductId = i, Point = i * 2, Sales = i * 2 * 5 })); var record = new GenericAvroRecord(null, genericSchema.AvroSchema, genericSchema.Fields, dataForWriter); var receipt = producer.Send(record); _output.WriteLine(JsonSerializer.Serialize(receipt, new JsonSerializerOptions { WriteIndented = true })); } var messageReceived = 0; var builder = new ConsumerConfigBuilder <IGenericRecord>() .Topic(_topic) .ForceTopicCreation(true) .SubscriptionName($"generic_sub"); var consumer = _client.NewConsumer(ISchema <object> .AutoConsume(), builder); Thread.Sleep(TimeSpan.FromSeconds(5)); for (var i = 0; i < messageCount; ++i) { var m = consumer.Receive(); Assert.NotNull(m); var receivedMessage = m.Value; var feature = receivedMessage.GetField("Feature").ToString(); var strinData = (Dictionary <string, object>)receivedMessage.GetField("StringData"); var complexData = FromBytes <ComplexData>((byte[])receivedMessage.GetField("ComplexData")); _output.WriteLine(feature); _output.WriteLine(JsonSerializer.Serialize(strinData, new JsonSerializerOptions { WriteIndented = true })); _output.WriteLine(JsonSerializer.Serialize(complexData, new JsonSerializerOptions { WriteIndented = true })); messageReceived++; consumer.Acknowledge(m); } Assert.Equal(10, messageReceived); producer.Close(); consumer.Close(); }
public void ProduceCommitTest() { var txn1 = Txn; var txn2 = Txn; var topic = $"{_topicOutput}"; var consumerBuilder = new ConsumerConfigBuilder <byte[]>() .Topic(topic) .ForceTopicCreation(true) .SubscriptionName($"test-{Guid.NewGuid()}"); var consumer = _client.NewConsumer(consumerBuilder); var producerBuilder = new ProducerConfigBuilder <byte[]>() .Topic(topic) .SendTimeout(0); var producer = _client.NewProducer(producerBuilder); var txnMessageCnt = 0; var messageCnt = 10; for (var i = 0; i < messageCnt; i++) { if (i % 5 == 0) { producer.NewMessage(txn1).Value(Encoding.UTF8.GetBytes("Hello Txn - " + i)).Send(); } else { producer.NewMessage(txn2).Value(Encoding.UTF8.GetBytes("Hello Txn - " + i)).Send(); } txnMessageCnt++; } // Can't receive transaction messages before commit. var message = consumer.Receive(); Assert.Null(message); txn1.Commit(); _output.WriteLine($"Committed 1"); txn2.Commit(); _output.WriteLine($"Committed 1"); // txn1 messages could be received after txn1 committed var receiveCnt = 0; Thread.Sleep(TimeSpan.FromSeconds(5)); for (var i = 0; i < txnMessageCnt; i++) { message = consumer.Receive(); Assert.NotNull(message); _output.WriteLine(Encoding.UTF8.GetString(message.Value)); receiveCnt++; } for (var i = 0; i < txnMessageCnt; i++) { message = consumer.Receive(); Assert.NotNull(message); receiveCnt++; } Assert.Equal(txnMessageCnt, receiveCnt); message = consumer.Receive(); Assert.Null(message); _output.WriteLine($"message commit test enableBatch {true}"); }