public void ClientNameVersion(string bootstrapServers)
        {
            LogToFile("start ClientNameVersion");

            var producerConfig = new ProducerConfig
            {
                BootstrapServers = bootstrapServers
            };

            producerConfig.Set("client.software.name", "test");
            producerConfig.Set("client.software.version", "1.0");

            var consumerConfig = new ConsumerConfig
            {
                GroupId          = Guid.NewGuid().ToString(),
                BootstrapServers = bootstrapServers,
                SessionTimeoutMs = 6000
            };

            consumerConfig.Set("client.software.name", "test");
            consumerConfig.Set("client.software.version", "1.0");


            using (var producer = new ProducerBuilder <Null, string>(producerConfig).Build())
                using (var consumer = new ConsumerBuilder <byte[], byte[]>(consumerConfig).Build())
                { }

            Assert.Equal(0, Library.HandleCount);
            LogToFile("end   ClientNameVersion");
        }
        internal KafkaSubscriptionStorage(IRebusLoggerFactory rebusLoggerFactory, IAsyncTaskFactory asyncTaskFactory
                                          , string brokerList, string inputQueueName, string groupId = null, CancellationToken cancellationToken = default(CancellationToken))
        {
            if (string.IsNullOrWhiteSpace(brokerList))
            {
                throw new NullReferenceException(nameof(brokerList));
            }
            var maxNameLength = 249;

            if (inputQueueName.Length > maxNameLength && _topicRegex.IsMatch(inputQueueName))
            {
                throw new ArgumentException("Invalid characters or length of a topic (file)", nameof(inputQueueName));
            }
            if (inputQueueName.StartsWith(_magicSubscriptionPrefix))
            {
                throw new ArgumentException($"Sorry, but the queue name '{inputQueueName}' cannot be used because it conflicts with Rebus' internally used 'magic subscription prefix': '{_magicSubscriptionPrefix}'. ");
            }

            _config = new ConsumerConfig
            {
                BootstrapServers    = brokerList,
                ApiVersionRequest   = true,
                GroupId             = !string.IsNullOrEmpty(groupId) ? groupId : Guid.NewGuid().ToString("N"),
                EnableAutoCommit    = false,
                FetchWaitMaxMs      = 5,
                FetchErrorBackoffMs = 5,
                QueuedMinMessages   = 1000,
                SessionTimeoutMs    = 6000,
                //StatisticsIntervalMs = 5000,
#if DEBUG
                TopicMetadataRefreshIntervalMs = 20000,                 // Otherwise it runs maybe five minutes
                Debug = "msg",
#endif
                AutoOffsetReset    = AutoOffsetReset.Latest,
                EnablePartitionEof = true
            };
            _config.Set("fetch.message.max.bytes", "10240");

            _asyncTaskFactory = asyncTaskFactory ?? throw new ArgumentNullException(nameof(asyncTaskFactory));
            _log = rebusLoggerFactory.GetLogger <KafkaSubscriptionStorage>();
            _cancellationToken = cancellationToken;

            _subscriptions.TryAdd(inputQueueName, new[] { inputQueueName });
        }
Beispiel #3
0
        /// <summary>Creates new instance <see cref="KafkaConsumer"/>.</summary>
        /// <param name="brokerList">Initial list of brokers as a CSV list of broker host or host:port.</param>
        /// <param name="groupId">Id of group</param>
        /// <param name="logger"></param>
        public KafkaConsumer(string brokerList, string groupId = null, ILogger <KafkaConsumer> logger = null)
        {
            _logger = logger;
            if (string.IsNullOrWhiteSpace(brokerList))
            {
                throw new NullReferenceException(nameof(brokerList));
            }
            var config = new ConsumerConfig
            {
                BootstrapServers    = brokerList,
                ApiVersionRequest   = true,
                GroupId             = !string.IsNullOrEmpty(groupId) ? groupId : Guid.NewGuid().ToString(),
                EnableAutoCommit    = false,
                FetchWaitMaxMs      = 5,
                FetchErrorBackoffMs = 5,
                QueuedMinMessages   = 1000,
                SessionTimeoutMs    = 6000,
                //StatisticsIntervalMs = 5000,
#if DEBUG
                Debug = "msg",
#endif
                AutoOffsetReset    = AutoOffsetReset.Latest,
                EnablePartitionEof = true
            };

            config.Set("fetch.message.max.bytes", "10240");

            // Note: If a key or value deserializer is not set (as is the case below), the
            // deserializer corresponding to the appropriate type from Confluent.Kafka.Serdes
            // will be used automatically (where available). The default deserializer for string
            // is UTF8. The default deserializer for Ignore returns null for all input data
            // (including non-null data).
            _consumer = new ConsumerBuilder <Null, string>(config)
                        .SetKeyDeserializer(Deserializers.Null)
                        .SetValueDeserializer(Deserializers.Utf8)
                        .SetLogHandler(OnLog)
                        .SetErrorHandler(OnError)
                        .SetStatisticsHandler((_, json) => Console.WriteLine($"Statistics: {json}"))
                        .SetPartitionsAssignedHandler(ConsumerOnPartitionsAssigned)
                        .SetPartitionsRevokedHandler(ConsumerOnPartitionsRevoked)
                        .Build();
        }
        public void Consumer_Exiting(string bootstrapServers)
        {
            LogToFile("start Consumer_Exiting");

            int N             = 2;
            var firstProduced = Util.ProduceNullStringMessages(bootstrapServers, singlePartitionTopic, 100, N);

            var consumerConfig = new ConsumerConfig
            {
                BootstrapServers = bootstrapServers,
                SessionTimeoutMs = 6000,
                Debug            = "all"
            };

            for (int i = 0; i < 4; ++i)
            {
                consumerConfig.Set("group.id", Guid.NewGuid().ToString());

                using (var consumer =
                           new ConsumerBuilder <byte[], byte[]>(consumerConfig)
                           .SetPartitionsAssignedHandler((c, partitions) =>
                {
                    return(partitions.Select(p => new TopicPartitionOffset(p, firstProduced.Offset)));
                })
                           .Build())
                {
                    consumer.Subscribe(singlePartitionTopic);

                    int tryCount = 10;
                    while (tryCount-- > 0)
                    {
                        var record = consumer.Consume(TimeSpan.FromSeconds(10));
                        if (record != null)
                        {
                            break;
                        }
                    }

                    Assert.True(tryCount > 0);

                    // there should be no ill effect doing any of this before disposing a consumer.
                    switch (i)
                    {
                    case 0:
                        LogToFile("  -- Unsubscribe");
                        consumer.Unsubscribe();
                        break;

                    case 1:
                        LogToFile("  -- Commit");
                        consumer.Commit();
                        break;

                    case 3:
                        LogToFile("  -- Close");
                        consumer.Close();
                        break;

                    case 4:
                        break;
                    }
                }
            }

            Assert.Equal(0, Library.HandleCount);
            LogToFile("end   Consumer_Exiting");
        }
Beispiel #5
0
        static void Main(string[] args)
        {
            var producerConfig = new ProducerConfig
            {
                //BootstrapServers = , //will be set from the general parameter
                ApiVersionRequest       = true,
                QueueBufferingMaxKbytes = 10240,
#if DEBUG
                Debug = "msg",
#endif
                MessageTimeoutMs = 3000,
            };

            producerConfig.Set("request.required.acks", "-1");
            producerConfig.Set("queue.buffering.max.ms", "5");

            var consumerConfig = new ConsumerConfig
            {
                //BootstrapServers = , //will be set from the general parameter
                ApiVersionRequest = true,
                //GroupId = // will be set random
                EnableAutoCommit    = false,
                FetchWaitMaxMs      = 5,
                FetchErrorBackoffMs = 5,
                QueuedMinMessages   = 1000,
                SessionTimeoutMs    = 6000,
                //StatisticsIntervalMs = 5000,
#if DEBUG
                TopicMetadataRefreshIntervalMs = 20000,                 // Otherwise it runs maybe five minutes
                Debug = "msg",
#endif
                AutoOffsetReset    = AutoOffsetReset.Latest,
                EnablePartitionEof = true
            };

            consumerConfig.Set("fetch.message.max.bytes", "10240");

            IContainer container;
            var        builder = new ContainerBuilder();

            builder.RegisterInstance(new Counter(ItemCount)).As <Counter>().SingleInstance();
            builder.RegisterType <ConfirmationHandler>().As(typeof(IHandleMessages <>).MakeGenericType(typeof(Confirmation)));
            builder.RegisterRebus((configurer, context) => configurer
                                  .Logging(l => l.ColoredConsole(Rebus.Logging.LogLevel.Info))
                                  .Transport(t => t.UseKafka(_kafkaEndpoint
                                                             , "scaleout.producer", producerConfig, consumerConfig))
                                  .Routing(r => r.TypeBased().Map <TestMessage>("scaleout.consumers"))
                                  );

            using (container = builder.Build())
                using (IBus bus = container.Resolve <IBus>())
                {
                    bus.Subscribe <Confirmation>().Wait();

                    char key;
                    do
                    {
                        var sw         = Stopwatch.StartNew();
                        var sendAmount = 0;
                        var messages   = Enumerable.Range(1, ItemCount)
                                         .Select(i =>
                        {
                            sendAmount = sendAmount + i;
                            return(bus.Publish(new TestMessage {
                                MessageNumber = i
                            }));
                        }).ToArray();
                        Task.WaitAll(messages);
                        Console.WriteLine($"Send: {sendAmount} for {sw.ElapsedMilliseconds / 1000f:N3}c");
                        Console.WriteLine("Press any key to exit or 'r' to repeat.");
                        key = Console.ReadKey().KeyChar;
                    } while (key == 'r' || key == 'к');

                    bus.Unsubscribe <Confirmation>().Wait();            // only for test
                }
        }
Beispiel #6
0
        /// <summary>Initializes the transport by ensuring that the input queue has been created</summary>
        public void Initialize()
        {
            // ToDo: Allow configuring transport options via Rebus
            var producerConfig = new ProducerConfig
            {
                BootstrapServers        = _brokerList,
                ApiVersionRequest       = true,
                QueueBufferingMaxKbytes = 10240,
                //{ "socket.blocking.max.ms", 1 }, // **DEPRECATED * *No longer used.
#if DEBUG
                Debug = "msg",
#endif
                MessageTimeoutMs = 3000,
            };

            producerConfig.Set("request.required.acks", "-1");
            producerConfig.Set("queue.buffering.max.ms", "5");

            var builder = new ProducerBuilder <Ignore, TransportMessage>(producerConfig)
                          .SetKeySerializer(new IgnoreSerializer())
                          .SetValueSerializer(new TransportMessageSerializer())
                          .SetLogHandler(ProducerOnLog)
                          .SetStatisticsHandler(ProducerOnStatistics)
                          .SetErrorHandler(ProducerOnError);

            try
            {
                _producer = builder.Build();
            }
            catch (DllNotFoundException)
            {               // Try loading librdkafka.dll
                if (!Library.IsLoaded)
                {
                    string directory   = System.IO.Path.GetDirectoryName(System.Reflection.Assembly.GetEntryAssembly().GetName().CodeBase.Substring(8));
                    var    pathToLibrd = System.IO.Path.Combine(directory, $"librdkafka/{(Environment.Is64BitOperatingSystem ? "x64" : "x86")}/librdkafka.dll");
                    _log.Info($"librdkafka is not loaded. Trying to load {pathToLibrd}");
                    Confluent.Kafka.Library.Load(pathToLibrd);
                    _log.Info($"Using librdkafka version: {Library.Version}");
                }
                _producer = builder.Build();
            }
            // ToDo: Allow configuring transport options
            var config = new ConsumerConfig
            {
                BootstrapServers    = _brokerList,
                ApiVersionRequest   = true,
                GroupId             = !string.IsNullOrEmpty(_groupId) ? _groupId : Guid.NewGuid().ToString("N"),
                EnableAutoCommit    = false,
                FetchWaitMaxMs      = 5,
                FetchErrorBackoffMs = 5,
                QueuedMinMessages   = 1000,
                SessionTimeoutMs    = 6000,
                //StatisticsIntervalMs = 5000,
#if DEBUG
                Debug = "msg",
#endif
                AutoOffsetReset    = AutoOffsetReset.Latest,
                EnablePartitionEof = true
            };

            config.Set("fetch.message.max.bytes", "10240");

            // Note: If a key or value deserializer is not set (as is the case below), the
            // deserializer corresponding to the appropriate type from Confluent.Kafka.Serdes
            // will be used automatically (where available). The default deserializer for string
            // is UTF8. The default deserializer for Ignore returns null for all input data
            // (including non-null data).
            _consumer = new ConsumerBuilder <Ignore, TransportMessage>(config)
                        .SetKeyDeserializer(Deserializers.Ignore)
                        .SetValueDeserializer(new TransportMessageDeserializer())
                        .SetLogHandler(ConsumerOnLog)
                        .SetErrorHandler(ConsumerOnError)
                        .SetStatisticsHandler(ConsumerOnStatistics)
                        .SetRebalanceHandler(ConsumerOnRebalance)
                        .Build();
            _consumer.Subscribe(_knownRoutes.Values);
        }