public static void Main(string[] args) { ILoggerFactory loggerFactory = LoggerFactory.Create(builder => builder.AddConsole()); ILogger logger = loggerFactory.CreateLogger("ChannelsDemo"); var cts = new CancellationTokenSource(); logger.LogInformation("Starting Producer / Consumer demo"); var producerFactory = new ProducerFactory <char>( cts.Token, logger, GetWorkingDirectory(), (char c) => BitConverter.GetBytes(c)); var channel = new UnboundedChannelFacade <char>(); var producer = new ProducerWrapper <char>(producerFactory, cts.Token, channel); Task producerTask = producer.RunAsync(); RunLoop(channel, logger); cts.Cancel(); Console.WriteLine(); logger.LogInformation("Waiting for producer to shutdown"); producerTask.Wait(); logger.LogInformation("Finished. Exiting."); }
static void Main(string[] args) { Console.Title = "Producer"; RabbitMqConfig config = new RabbitMqConfig(); config.ExchangeName = "test"; config.HostName = "localhost"; config.Password = "******"; config.UserName = "******"; config.VirtualHost = "/"; ProducerFactory factory = new ProducerFactory(config); var producer = factory.CreateProducer("Producer1"); Console.WriteLine("准备就绪"); Console.WriteLine("请输入要发送的消息,格式: Topic,Message。按 Q 退出"); while (true) { var input = Console.ReadLine(); if ("Q".Equals(input, StringComparison.OrdinalIgnoreCase)) { break; } var array = input.Split(new char[] { ',' }, StringSplitOptions.RemoveEmptyEntries); if (array.Length != 2) { Console.WriteLine("消息格式错误,请重新输入"); continue; } producer.Publish(array[0], Guid.NewGuid().ToString("N"), array[1]); } }
public INetStreamBuilder <TKey, TMessage> ToTopic <TResponseKey, TResponseMessage>(string topic, Func <TResponseMessage, TResponseKey> resolveKey) { var producer = new ProducerFactory().Create <TResponseKey, TResponseMessage>(topic, Configuration); var writer = new KafkaTopicWriter <TResponseKey, TResponseMessage>(producer, resolveKey); _pipeline.AppendStep(new WriteStreamStep <TKey, TMessage>(writer)); return(this); }
static void Main(string[] args) { using var producerFactory = new ProducerFactory("172.17.20.6:9876"); using var consumerContainer = new ConsumerContainer("172.17.20.6:9876"); TestConsumeMsg(consumerContainer, MsgTags[0]); TestPublishMsg(producerFactory); Console.ReadKey(); }
protected override void OnStopped() { try { ConsumerFactory.Unsubscribe(); ProducerFactory.Dispose(); } catch (Exception ex) { Logger.Error("Stopped Error", ex); } base.OnStopped(); }
public async Task <int> ChannelPerf() { var channel = CreateChannel(); var itemsToProduce = Capacity / ProducerCount; var producerFactory = new ProducerFactory(channel.Writer, itemsToProduce); var subscriberFactory = new ConsumerFactory(channel.Reader); var prodThread = producerFactory.Start(); var subsThread = subscriberFactory.Start(); await Task.WhenAll(prodThread, subsThread); return(subsThread.Result); }
static void TestPublishMsg(ProducerFactory producerFactory) { var producer = producerFactory.GetTopicProducer("delay_msg_test"); for (var i = 0; i < 10; i++) { var str = $"order_test_{DateTime.UtcNow.Ticks}"; var msg = new Message { Body = str.GetBytes(), Tags = MsgTags[i % 3], DelayTimeLevel = 2 }; var sr = producer.Publish(msg); Console.WriteLine($"发送消息: {str}, Tag: {msg.Tags}, 发送结果: {sr.Status}, 队列: {sr.Queue}"); } }
private static ProducerFactory AddOrGetRegisteredProducerFactory(IServiceCollection services) { var factory = services .Where(x => x.ServiceType == typeof(ProducerFactory)) .Select(x => x.ImplementationInstance) .Cast <ProducerFactory>() .SingleOrDefault(); if (factory == null) { factory = new ProducerFactory(); services.AddSingleton(factory); } return(factory); }
public void resolves_a_typed_producer_for_an_abstract_service() { var services = new ServiceCollection(); services.AddLogging(); services.AddTransient <MessageSenderOne.AnotherDependency>(); services.AddProducerFor <IMessageSenderOne, MessageSenderOne>(options => { options.WithBootstrapServers("dummy"); }); var provider = services.BuildServiceProvider(); var messageSender = provider.GetRequiredService <IMessageSenderOne>(); Assert.NotNull(messageSender); Assert.NotNull(messageSender.Producer); Assert.Equal(ProducerFactory.GetKeyNameOf <MessageSenderOne>(), messageSender.Producer.Name); }
private async Task StartTest(TimeSpan duration, int producerId, StrategyKind producerStrategy, StrategyKind consumerStrategy) { Console.Write($"Producer: {producerStrategy,-10} - Consumer: {consumerStrategy,-10} "); var producerFactory = new ProducerFactory(); using (var producer = producerFactory.Create(producerId, producerStrategy)) { var consumerFactory = new ConsumerFactory(); var consumer = consumerFactory.Create(consumerStrategy); consumer.Consume(producer); await Task.Delay(duration); Console.WriteLine($"Min:{consumer.Min.TotalMilliseconds,6:0.00}ms - Max:{consumer.Max.TotalMilliseconds,6:0.00}ms"); } }
public void registers_a_typed_producer() { var services = new ServiceCollection(); services.AddTransient <MessageSenderOne.AnotherDependency>(); services.AddProducerFor <MessageSenderOne>(options => { options.WithBootstrapServers("dummy"); }); var provider = services.BuildServiceProvider(); var messageSender = provider.GetRequiredService <MessageSenderOne>(); Assert.NotNull(messageSender); Assert.NotNull(messageSender.Producer); Assert.NotNull(messageSender.ADependency); Assert.Equal("hello one", messageSender.ADependency.Message); Assert.Equal(ProducerFactory.GetKeyNameOf <MessageSenderOne>(), messageSender.Producer.Name); }
public Cluster(Configuration configuration, ILogger logger, NodeFactory nodeFactory, ProducerFactory producerFactory, ConsumerFactory consumerFactory, IStatistics statistics = null) { _configuration = configuration; _seeds = configuration.Seeds; _seedsGetter = configuration.SeedsGetter; Logger = logger; Statistics = statistics ?? new Statistics(); _timeoutScheduler = new TimeoutScheduler(configuration.ClientRequestTimeoutMs / 2); _pools = InitPools(Statistics, configuration, logger); // Producer init ProduceRouter = producerFactory != null?producerFactory() : new ProduceRouter(this, configuration, _pools.MessageBuffersPool); ProduceRouter.MessageExpired += (t, m) => { Statistics.UpdateExpired(); UpdateExited(1); }; ProduceRouter.MessagesAcknowledged += (t, c) => { Statistics.UpdateSuccessfulSent(c); UpdateExited(c); SignalSeenTopic(t); }; ProduceRouter.MessageDiscarded += (t, m) => { Statistics.UpdateDiscarded(); UpdateExited(1); }; RoutingTableChange += ProduceRouter.ChangeRoutingTable; ProduceRouter.BrokerTimeoutError += Statistics.UpdateBrokerTimeoutError; ProduceRouter.MessageReEnqueued += Statistics.UpdateMessageRetry; ProduceRouter.MessagePostponed += Statistics.UpdateMessagePostponed; // Consumer init ConsumeRouter = consumerFactory != null?consumerFactory() : new ConsumeRouter(this, configuration); ConsumeRouter.MessageReceived += UpdateConsumerMessageStatistics; if (ConsumeRouter is ConsumeRouter) { (ConsumeRouter as ConsumeRouter).InternalError += ex => Logger.LogError("An unexpected error occured in the consumer: " + ex); } RoutingTableChange += ConsumeRouter.ChangeRoutingTable; // Node factory var clientId = Encoding.UTF8.GetBytes(configuration.ClientId); var serializer = new Node.Serialization(configuration.SerializationConfig, configuration.Compatibility, _pools.RequestsBuffersPool, clientId, configuration.RequiredAcks, configuration.RequestTimeoutMs, configuration.CompressionCodec, configuration.FetchMinBytes, configuration.FetchMaxWaitTime, configuration.FetchMaxBytes); _nodeFactory = nodeFactory ?? ((h, p) => new Node(string.Format("[{0}:{1}]", h, p), () => new Connection(h, p, ep => new RealSocket(ep), _pools.SocketBuffersPool, _pools.RequestsBuffersPool, configuration.SendBufferSize, configuration.ReceiveBufferSize), serializer, configuration, _timeoutScheduler, _resolution)); _nodeFactory = DecorateFactory(_nodeFactory); // Inner actor _agent = new ActionBlock <ClusterMessage>(r => ProcessMessage(r), new ExecutionDataflowBlockOptions { TaskScheduler = configuration.TaskScheduler }); // Bootstrap BuildNodesFromSeeds(); if (_nodes.Count == 0) { var message = _seedsGetter != null ? "Invalid seeds Getter" : "Invalid seeds: " + _seeds; throw new ArgumentException(message); } }
private async Task StreamTableAsync(CancellationToken token, string executionId, TableSchema tableSchema, SerializationMode serializationMode, bool sendWithKey, int batchSize, int printPercentProgressMod) { string topicName = _kafkaTopicPrefix + tableSchema.TableName.ToLower(); var rowCount = await _cdcReaderClient.GetRowCountAsync(tableSchema); Console.WriteLine($"Table {tableSchema.Schema}.{tableSchema.TableName} has {rowCount} rows to export"); int progress = 0; using (var producer = ProducerFactory.GetProducer(topicName, tableSchema, serializationMode, sendWithKey, _kafkaBootstrapServers, _schemaRegistryUrl)) { long ctr = 0; PrimaryKeyValue lastRetrievedKey = null; var existingOffset = await _cdcReaderClient.GetLastFullLoadOffsetAsync(executionId, tableSchema.TableName); if (existingOffset.Result == CdcReader.State.Result.NoStoredState) { Console.WriteLine($"Table {tableSchema.TableName} - No previous stored offset. Starting from first row"); var firstBatch = await _cdcReaderClient.GetFirstBatchAsync(tableSchema, batchSize); ctr = await PublishAsync(producer, token, firstBatch, ctr); lastRetrievedKey = firstBatch.LastRowKey; await _cdcReaderClient.StoreFullLoadOffsetAsync(executionId, tableSchema.TableName, firstBatch.LastRowKey); } else { Console.WriteLine($"Table {tableSchema.TableName} - No data to export"); lastRetrievedKey = existingOffset.State; } bool finished = false; while (!token.IsCancellationRequested && !finished) { var changes = new List <RowChange>(); var batch = await _cdcReaderClient.GetBatchAsync(tableSchema, lastRetrievedKey, batchSize); ctr = await PublishAsync(producer, token, batch, ctr); int latestProgress = (int)(((double)ctr / (double)rowCount) * 100); if (progress != latestProgress && latestProgress % printPercentProgressMod == 0) { Console.WriteLine($"Table {tableSchema.Schema}.{tableSchema.TableName} - Progress at {latestProgress}% ({ctr} records)"); } progress = latestProgress; lastRetrievedKey = batch.LastRowKey; await _cdcReaderClient.StoreFullLoadOffsetAsync(executionId, tableSchema.TableName, lastRetrievedKey); if (!batch.Records.Any() || batch.Records.Count < batchSize) { finished = true; } } if (token.IsCancellationRequested) { Console.WriteLine($"Table {tableSchema.Schema}.{tableSchema.TableName} - cancelled at progress at {progress}% ({ctr} records)"); } else { Console.WriteLine($"Table {tableSchema.Schema}.{tableSchema.TableName} - complete ({ctr} records)"); } } }
private static void StartProducing(ILogger logger, IConfiguration config, SchemaRegistryConfig schemaRegistryConfig) { var cachedSchemaRegistryClient = new CachedSchemaRegistryClient(schemaRegistryConfig); var producerFactory = new ProducerFactory <string, Record>( logger, new ProtobufSerializer <Record>(cachedSchemaRegistryClient), KafkaOptions.ForProducer(config)); var kafkaProducer = producerFactory.CreateProducer(); for (int i = 0; i < 10; i++) { var person = new Person { Id = i, Name = $"{nameof(Person.Name)} {i}", Email = $"{nameof(Person.Email)} {i}", Age = 20 + i, PhoneNumber = $"{nameof(Person.PhoneNumber)} {i}" }; var address = new Address { PersonId = i, State = $"{nameof(Address.State)} {i}", Street = $"{nameof(Address.Street)} {i}", ZipCode = $"{nameof(Address.ZipCode)} {i}", }; var personRecord = new Record { CreatedDate = Timestamp.FromDateTime(DateTime.UtcNow), Id = Guid.NewGuid().ToString(), Payload = Any.Pack(person) }; var addressRecord = new Record { CreatedDate = Timestamp.FromDateTime(DateTime.UtcNow), Id = Guid.NewGuid().ToString(), Payload = Any.Pack(address) }; var personMessage = new Confluent.Kafka.Message <string, Record> { Key = Guid.NewGuid().ToString(), Value = personRecord }; var addressMessage = new Message <string, Record> { Key = Guid.NewGuid().ToString(), Value = addressRecord }; logger.Information( "Sending message => Topic: {Topic} Key: {Key} Value: {Value}", PERSON_TOPIC, personMessage.Key, personMessage.Value); kafkaProducer.ProduceAsync(PERSON_TOPIC, personMessage) .GetAwaiter() .GetResult(); logger.Information( "Sending message => Topic: {Topic} Key: {Key} Value: {Value}", PERSON_TOPIC, addressMessage.Key, addressMessage.Value); kafkaProducer.ProduceAsync(PERSON_TOPIC, addressMessage) .GetAwaiter() .GetResult(); } }
private async Task StartPublishingChanges(CancellationToken token, string executionId, string tableName, TimeSpan maxInterval, int batchSize, bool sendWithKey, SerializationMode serializationMode) { var tableTopic = _tableTopicPrefix + tableName.ToLower(); var tableSchema = await _cdcReaderClient.GetTableSchemaAsync(tableName); using (var producer = ProducerFactory.GetProducer(tableTopic, tableSchema, serializationMode, sendWithKey, _kafkaBootstrapServers, _schemaRegistryUrl)) { var cdcState = await SetInitialStateAsync(token, producer, executionId, tableSchema, maxInterval); var sw = new Stopwatch(); while (!token.IsCancellationRequested) { cdcState.ToLsn = await _cdcReaderClient.GetMaxLsnAsync(); sw.Start(); Console.WriteLine($"Table {tableName} - Starting to export LSN range {GetBigInteger(cdcState.FromLsn)} to {GetBigInteger(cdcState.ToLsn)}"); bool more = true; int blockCounter = 0; while (!token.IsCancellationRequested && more) { if (GetBigInteger(cdcState.FromLsn) <= GetBigInteger(cdcState.ToLsn)) { blockCounter++; ChangeBatch batch = null; if (cdcState.UnfinishedLsn) { batch = await _cdcReaderClient.GetChangeBatchAsync(tableSchema, cdcState.FromLsn, cdcState.FromSeqVal, cdcState.ToLsn, batchSize); } else { batch = await _cdcReaderClient.GetChangeBatchAsync(tableSchema, cdcState.FromLsn, cdcState.ToLsn, batchSize); } if (batch.Changes.Any()) { Console.WriteLine($"Table {tableName} - Retrieved block #{blockCounter} with {batch.Changes.Count} changes"); foreach (var change in batch.Changes) { await producer.SendAsync(token, change); cdcState.FromLsn = change.Lsn; cdcState.FromSeqVal = change.SeqVal; } more = batch.MoreChanges; cdcState.UnfinishedLsn = batch.MoreOfLastTransaction; if (cdcState.UnfinishedLsn) { cdcState.FromSeqVal = Increment(cdcState.FromSeqVal); } else { cdcState.FromLsn = Increment(cdcState.FromLsn); } var offset = GetOffset(cdcState); await BlockingStoreCdcOffsetAsync(token, executionId, tableName, offset); } else { more = false; cdcState.UnfinishedLsn = false; Console.WriteLine($"Table {tableName} - No changes"); } } else { more = false; cdcState.UnfinishedLsn = false; Console.WriteLine($"Table {tableName} - No changes"); } } var remainingMs = maxInterval.TotalMilliseconds - sw.Elapsed.TotalMilliseconds; if (remainingMs > 0) { await Task.Delay((int)remainingMs); } sw.Reset(); } } }
private static TImplementation CreateInstance <TImplementation>(IServiceProvider provider, ProducerFactory factory) { var loggerFactory = provider.GetRequiredService <ILoggerFactory>(); var producer = factory.GetFor <TImplementation>(loggerFactory); return(ActivatorUtilities.CreateInstance <TImplementation>(provider, producer)); }
public MessageDispatcher(ProducerFactory producerFactory) { this.producerFactory = producerFactory; }
private static void StartProducing(ILogger logger, IConfiguration config) { var producerFactory = new ProducerFactory <string, Record>( logger, new StringSerializer <Record>(e => Encode.FromString(JsonConvert.SerializeObject(e))), KafkaOptions.ForProducer(config)); var kafkaProducer = producerFactory.CreateProducer(); for (int i = 0; i < 10; i++) { var person = new Person { Id = i, Name = $"{nameof(Person.Name)} {i}", Email = $"{nameof(Person.Email)} {i}", Age = 20 + i, PhoneNumber = $"{nameof(Person.PhoneNumber)} {i}" }; var address = new Address { PersonId = i, State = $"{nameof(Address.State)} {i}", Street = $"{nameof(Address.Street)} {i}", ZipCode = $"{nameof(Address.ZipCode)} {i}", }; var personRecord = Record.Create(person, typeof(Person)); var addressRecord = Record.Create(address, typeof(Address)); var personMessage = new Message <string, Record> { Key = person.Id.ToString(), Value = personRecord }; var addressMessage = new Message <string, Record> { Key = person.Id.ToString(), Value = addressRecord }; logger.Information( "Sending message => Topic: {Topic} Key: {Key} Value: {Value}", PERSON_TOPIC, personMessage.Key, personMessage.Value); kafkaProducer.ProduceAsync(PERSON_TOPIC, personMessage) .GetAwaiter() .GetResult(); logger.Information( "Sending message => Topic: {Topic} Key: {Key} Value: {Value}", PERSON_TOPIC, addressMessage.Key, addressMessage.Value); kafkaProducer.ProduceAsync(PERSON_TOPIC, addressMessage) .GetAwaiter() .GetResult(); } }