public async Task EnviarPedido(CriarPedidoDto pedidoDto) { var config = new ProducerConfig { BootstrapServers = _options.Value.BootstrapServers }; //producer mais seguro config.Acks = _options.Value.Acks; config.EnableIdempotence = _options.Value.EnableIdempotence; config.MessageSendMaxRetries = _options.Value.MessageSendMaxRetries; config.MaxInFlight = _options.Value.MaxInFlight; //melhorar taxa de transferencia config.CompressionType = _options.Value.CompressionType; config.LingerMs = _options.Value.LingerMs; config.BatchSize = _options.Value.BatchSizeKB * 1024; using var producer = new ProducerBuilder <int, string>(config).Build(); try { var value = JsonConvert.SerializeObject(pedidoDto); await producer.ProduceAsync( _options.Value.Topic, new Message <int, string> { Key = new Random().Next(0, 2), Value = value }); } catch (ProduceException <int, string> e) { Console.WriteLine($"Falha ao entregar a mensagem: {e.Message} [{e.Error.Code}]"); } }
public async Task <object> AskSinglePeer <T>(string clusterCode, string peerCode, T message) { var questionId = Guid.NewGuid().ToString(); var question = new object(); PendingQuestions.Add(questionId, question); var conf = _configs[clusterCode]; using (var p = new ProducerBuilder <Null, MessageWrapper <T> >(conf).Build()) await p.ProduceAsync(message.GetType().FullName, new Message <Null, MessageWrapper <T> >() { Value = new MessageWrapper <T>() { Message = message, DestClusterCode = clusterCode, DestPeerCode = peerCode, QuestionId = questionId } }); lock (question) { Monitor.Wait(question); } var answer = ReceivedAnswers[questionId]; ReceivedAnswers.Remove(questionId); return(answer); }
public static async Task Main(string[] args) { var config = new ProducerConfig { BootstrapServers = "localhost:9092" }; // If serializers are not specified, default serializers from // `Confluent.Kafka.Serializers` will be automatically used where // available. Note: by default strings are encoded as UTF8. using (var producer = new ProducerBuilder <Null, string>(config).Build()) { var message = new Message <Null, string> { Value = "test" }; try { var result = await producer.ProduceAsync(Topic, message); Console.WriteLine($"Delivered '{result.Value}' to: topic={result.Topic}, partition={result.Partition.Value}, offset={result.Offset.Value}"); } catch (ProduceException <Null, string> e) { Console.WriteLine($"Delivery failed: {e.Error.Reason}"); } } }
/// <inheritdoc cref="IProducerAsync.ProduceManyAsync"/> public async Task ProduceManyAsync(IList <Event> events, CancellationToken token = default) { using var schemaRegistry = new CachedSchemaRegistryClient(_schemaRegistryConfig); using var producerBuilder = new ProducerBuilder <string, Event>(_producerConfig) .SetKeySerializer(new AvroSerializer <string>(schemaRegistry)) .SetValueSerializer(new AvroSerializer <Event>(schemaRegistry)) .SetErrorHandler((_, error) => _logger.LogError("Kafka encountered an error: {@Error}", error)) .Build(); foreach (var @event in events) { var message = new Message <string, Event> { Key = @event.AggregateName, Value = @event }; try { await producerBuilder.ProduceAsync(@event.AggregateName, message, token); } catch (Exception e) { throw new ProducerException(e.Message, e); } } }
public async Task <string> Index(string something) { var config = new ProducerConfig { BootstrapServers = "localhost:9092" }; // If serializers are not specified, default serializers from // `Confluent.Kafka.Serializers` will be automatically used where // available. Note: by default strings are encoded as UTF8. using (var p = new ProducerBuilder <Null, string>(config).Build()) { try { var dr = await p.ProduceAsync("test_topic", new Message <Null, string> { Value = something ?? "Missing value" }); Console.WriteLine($"Delivered '{ dr.Value }' to '{ dr.TopicPartitionOffset }'"); } catch (ProduceException <Null, string> e) { Console.WriteLine($"Delivery failed: { e.Error.Reason }"); } } return(something ?? "Missing value"); }
static async Task ProducerDemoAsync() { IProducer <string, string> producer = new ProducerBuilder <string, string>(new ProducerConfig { BootstrapServers = BootstrapServers, Acks = Acks.Leader, MessageSendMaxRetries = 5, BatchSize = 20, LingerMs = 3000, }).Build(); try { while (true) { DeliveryResult <string, string> deliveryResult = await producer.ProduceAsync(Topic, new Message <string, string> { Key = Guid.NewGuid().ToString(), Value = DateTime.Now.ToString() }); Console.WriteLine($"Producer::{deliveryResult.Key}::{deliveryResult.Value}::{deliveryResult.Partition.Value}::{deliveryResult.Offset.Value}::{Thread.CurrentThread.ManagedThreadId}"); // await Task.Delay(100); } } catch (Exception ex) { Console.WriteLine(ex); } finally { producer.Flush(); producer.Dispose(); } }
static void Main(string[] args) { var address = args[0]; var topic = args[1]; var count = (args.Length == 3) ? Convert.ToInt32(args[2]) : 5; var generator = new Generator(); var ads = generator.GenerateAds(count); var config = new ProducerConfig { BootstrapServers = address, BrokerAddressFamily = BrokerAddressFamily.V4 }; using (var p = new ProducerBuilder <Null, string>(config).Build()) { try { foreach (var ad in ads) { var result = p.ProduceAsync(topic, new Message <Null, string> { Value = ad }).Result; } } catch (ProduceException <Null, string> e) { Console.WriteLine($"Delivery failed: {e.Error.Reason}"); } } }
public static async Task Producer(string brokerList, string connStr, string topic, string cacertlocation) { try { var config = new ProducerConfig { BootstrapServers = brokerList, SecurityProtocol = SecurityProtocol.SaslSsl, SaslMechanism = SaslMechanism.Plain, SaslUsername = "******", SaslPassword = connStr, SslCaLocation = cacertlocation, //Debug = "security,broker,protocol" //Uncomment for librdkafka debugging information }; using (var producer = new ProducerBuilder <long, string>(config).SetKeySerializer(Serializers.Int64).SetValueSerializer(Serializers.Utf8).Build()) { Console.WriteLine("Sending 10 messages to topic: " + topic + ", broker(s): " + brokerList); for (int x = 0; x < 10; x++) { var msg = string.Format("Sample message #{0} sent at {1}", x, DateTime.Now.ToString("yyyy-MM-dd_HH:mm:ss.ffff")); var deliveryReport = await producer.ProduceAsync(topic, new Message <long, string> { Key = DateTime.UtcNow.Ticks, Value = msg }); Console.WriteLine(string.Format("Message {0} sent (value: '{1}')", x, msg)); } } } catch (Exception e) { Console.WriteLine(string.Format("Exception Occurred - {0}", e.Message)); } }
public void Produce(Message message, string topicName) { Task.Run(() => { var messageObject = new { messageType = message.GetType().Name, occuredAt = DateTime.Now, payload = message }; string messageJson = JsonConvert.SerializeObject(messageObject); Thread.Sleep(2000); using (var producer = new ProducerBuilder<Null, string>(_producerConfig).Build()) { Type type = message.GetType(); var t = producer.ProduceAsync(topicName, new Message<Null, string> { Value = messageJson }); t.Wait(); } }); }
static void Main(string[] args) { var config = new ProducerConfig() { BootstrapServers = "omnibus-01.srvs.cloudkafka.com:9094,omnibus-02.srvs.cloudkafka.com:9094,omnibus-03.srvs.cloudkafka.com:9094", SaslUsername = "******", SaslPassword = "******", SaslMechanism = SaslMechanism.ScramSha256, SecurityProtocol = SecurityProtocol.SaslSsl, EnableSslCertificateVerification = false }; using (var producer = new ProducerBuilder <Null, string>(config).Build()) { int count = 0; while (true) { var t = producer.ProduceAsync("p5yt75io-test", new Message <Null, string> { Value = $"message {count++}" }); t.ContinueWith(t => { if (!t.IsFaulted) { System.Console.WriteLine($"Delivered: {t.Result} to {t.Result.TopicPartitionOffset}"); } else { System.Console.WriteLine($"Delivery failed: {t.Result.Offset}"); } }); } } }
public void PublishMessage(Message_Customer message) { var config = KafkaConfigManagement.Instance; var msg = JsonConvert.SerializeObject(message); //using (var producer = new Producer<Null, string>(config.GetConfigProducer(), null, new StringSerializer(Encoding.UTF8))) //{ // producer.ProduceAsync(config.GetTopic, null, msg).GetAwaiter().GetResult(); // //producer.Flush(config.TimeOut); //} using (var producer = new ProducerBuilder <Null, string>(config.GetProducerConfig()).Build()) { producer.ProduceAsync(config.GetTopic, new Message <Null, string> { Value = msg }) .GetAwaiter() .GetResult(); //for (int i = 0; i < 10; i++) //{ // producer.ProduceAsync(new TopicPartition(config.GetTopic, new Partition(i)), new Message<Null, string> // { // Value = msg // }); //} producer.Flush(config.TimeOut); } }
private string SendMessageKafka(string message) { var config = new ProducerConfig { BootstrapServers = "localhost:9092" }; using (var producer = new ProducerBuilder <Null, string>(config).Build()) { try { var sendResult = producer .ProduceAsync("fila_pedido", new Message <Null, string> { Value = message }) .GetAwaiter() .GetResult(); } catch (ProduceException <Null, string> ex) { Console.WriteLine($"Delivery failed: { ex.Error.Reason}"); } } return(string.Empty); }
public async Task <StatusMessageProduced> SendDataAsync(T message) { var result = new StatusMessageProduced(); var bootstrapServers = _kafkaConnection; var nomeTopic = QueueName; var config = new ProducerConfig { BootstrapServers = bootstrapServers }; try { using var producer = new ProducerBuilder <Null, T>(config).Build(); await producer.ProduceAsync( nomeTopic, new Message <Null, T> { Value = message }); } catch (Exception ex) { result.Reason = ex.Message; } return(result); }
static void Main(string[] args) { var config = new ProducerConfig { BootstrapServers = "localhost:9092" }; Action <DeliveryReport <Null, string> > handler = r => Console.WriteLine(!r.Error.IsError ? $"Delivered message to {r.TopicPartitionOffset}" : $"Delivery Error: {r.Error.Reason}"); using (var producer = new ProducerBuilder <Null, string>(config).Build()) { var stringValue = ""; for (int i = 0; i < 5; ++i) { stringValue += "👮🙉� lol" + DateTime.Now.ToLongTimeString(); producer.ProduceAsync("banana-topic", new Message <Null, string> { Value = stringValue }); } producer.Flush(TimeSpan.FromSeconds(10)); } }
static async Task Main(string[] args) { var config = new ProducerConfig { BootstrapServers = "localhost:9092" }; while (true) { var message = Console.ReadLine(); using (var p = new ProducerBuilder <Null, string>(config).Build()) { try { var dr = await p.ProduceAsync("test-topic", new Message <Null, string> { Value = message }); Console.WriteLine($"Delivered '{dr.Value}' to '{dr.TopicPartitionOffset}'"); } catch (ProduceException <Null, string> e) { Console.WriteLine($"Delivery failed: {e.Error.Reason}"); } } } }
protected override async Task ExecuteAsync(CancellationToken stoppingToken) { var pconfig = new ProducerConfig() { BootstrapServers = "kafka:29092", }; var _producer = new ProducerBuilder <int, string>(pconfig).Build(); var id = 1; while (true) { for (var i = 1; i <= 1000; ++i) { var value = new Random().Next(0, 100); var json = JsonConvert.SerializeObject(new { Id = id, Temperature = value }); Console.WriteLine("Produced value: " + value); await _producer.ProduceAsync("test", new Message <int, string>() { Key = id, Value = json }, stoppingToken); } id++; } }
public async Task <bool> AddAsync(DataLog data) { // TODO pasar a variables de ambiente o setting de la api string kafkaEndpoint = "127.0.0.1:9092"; string kafkaTopic = "registro-log"; var config = new ProducerConfig { BootstrapServers = kafkaEndpoint }; Action <DeliveryReport <Null, string> > handler = r => Console.WriteLine(!r.Error.IsError ? $"Delivered message to {r.TopicPartitionOffset}" : $"Delivery Error: {r.Error.Reason}"); using (var producer = new ProducerBuilder <Null, string>(config).Build()) { await producer.ProduceAsync(kafkaTopic, new Message <Null, string> { Value = data.Mensaje }); producer.Flush(TimeSpan.FromSeconds(10)); } return(true); }
/// <summary> /// /// </summary> /// <typeparam name="TEvent"></typeparam> /// <param name="event"></param> /// <param name="cancellationToken"></param> /// <returns></returns> public async Task <bool> PublishEventAsync <TEvent>( TEvent @event, CancellationToken cancellationToken = default) { using var producer = new ProducerBuilder <Guid, string>(settings.ProducerConfig).Build(); var result = await policy.ExecuteAsync(async() => await producer.ProduceAsync(GetTopic <TEvent>(), new Message <Guid, string> { Key = Guid.NewGuid(), Value = JsonSerializer.Serialize(@event), }, cancellationToken)); if (result.Status == PersistenceStatus.Persisted) { logger.LogInformation($"Publish event success,status:{result.Status},offset:{result.Offset}"); if (task == null) { DoConsume(); } return(true); } else { logger.LogError($"Publish event failed,status:{result.Status},message:{result.Message}"); return(false); } }
public async Task <string> Send <T>(T message, string topicName) { var config = new ProducerConfig { BootstrapServers = _plaintext }; using (var producer = new ProducerBuilder <Null, string>(config).Build()) { try { var sendResult = producer.ProduceAsync(topicName, new Message <Null, string> { Value = JsonSerializer.Serialize(message) }).GetAwaiter().GetResult(); Console.WriteLine($"Mensagem '{sendResult.Value}' de '{sendResult.TopicPartitionOffset}'"); return(sendResult.Value); } catch (ProduceException <Null, string> e) { Console.WriteLine($"Delivery failed: {e.Error.Reason}"); } } return(string.Empty); }
static async Task ProduceSpecific(string bootstrapServers, string schemaRegistryUrl) { using (var schemaRegistry = new CachedSchemaRegistryClient(new SchemaRegistryConfig { SchemaRegistryUrl = schemaRegistryUrl })) using (var producer = new ProducerBuilder <Null, BadgeEvent>(new ProducerConfig { BootstrapServers = bootstrapServers }) .SetValueSerializer(new AvroSerializer <BadgeEvent>(schemaRegistry)) .Build()) { await producer.ProduceAsync("badgeevent", new Message <Null, BadgeEvent> { Value = new BadgeEvent { id = "9", name = "Teacher", userId = "16", displayName = "dragonmantank", reputation = "7636", upVotes = 56, downVotes = 3, processedDate = DateTime.UtcNow.ToString() } }); producer.Flush(TimeSpan.FromSeconds(30)); } }
public async Task <DeliveryResult <Null, string> > ProduceAsync <TValue>( MessageProducerConfiguration messageConfiguration, TValue value) { System.Console.WriteLine($"Producing to {messageConfiguration.Topic}"); using (var producer = new ProducerBuilder <Null, string>(_producerConfig).Build()) { try { var deliveryResult = await producer.ProduceAsync( messageConfiguration.Topic, new Message <Null, string>() { Value = JsonConvert.SerializeObject(value) }); return(deliveryResult); } catch (Exception ex) { throw new MessageProducerException(messageConfiguration.Topic, ex); } } }
public void GarbageCollect(string bootstrapServers) { LogToFile("start GarbageCollect"); var producerConfig = new ProducerConfig { BootstrapServers = bootstrapServers }; var consumerConfig = new ConsumerConfig { GroupId = Guid.NewGuid().ToString(), BootstrapServers = bootstrapServers }; using (var producer = new ProducerBuilder <byte[], byte[]>(producerConfig).Build()) { producer.ProduceAsync(singlePartitionTopic, new Message <byte[], byte[]> { Value = Serializers.Utf8.Serialize("test string", SerializationContext.Empty) }).Wait(); } using (var consumer = new ConsumerBuilder <byte[], byte[]>(consumerConfig).Build()) { consumer.Subscribe(singlePartitionTopic); consumer.Consume(TimeSpan.FromSeconds(10)); consumer.Close(); } // The process running the tests has probably had many created / destroyed clients by now. // This is an arbitrarily chosen test to put this check in. Assert.Equal(0, Library.HandleCount); GC.Collect(); // if an attempt is made to free an unmanaged resource a second time // in an object finalizer, the call to .Collect() will likely segfault. LogToFile("end GarbageCollect"); }
private async Task SendMessageAvro(string message) { var config = new ProducerConfig { BootstrapServers = "kafka:29092", ClientId = Dns.GetHostName() }; String key = "key1"; String userSchema = "{\"type\":\"record\"," + "\"name\":\"myrecord\"," + "\"fields\":[{\"name\":\"f1\",\"type\":\"string\"}]}"; var schema = (RecordSchema)RecordSchema.Parse(userSchema); GenericRecord avroRecord = new GenericRecord(schema); avroRecord.Add("f1", "value"); var schemaRegistryUrl = "http://schema-registry:8085"; using (var schemaRegistry = new CachedSchemaRegistryClient(new SchemaRegistryConfig { Url = schemaRegistryUrl })) { using (var producer = new ProducerBuilder <string, GenericRecord>(config) .SetKeySerializer(new AvroSerializer <string>(schemaRegistry)) .SetValueSerializer(new AvroSerializer <GenericRecord>(schemaRegistry)) .Build()) { await producer.ProduceAsync("api-methods-requested-avro", new Message <string, GenericRecord> { Key = Guid.NewGuid().ToString("N"), Value = avroRecord }); } } }
public async Task <IActionResult> ConsultarHotelesAsync([FromBody] ConsultarHoteles model) { try { DateTime dateTimeInicio; DateTime dateTimeFinal; if (!DateTime.TryParseExact(model.FechaInicio, "yyyy'-'MM'-'dd", CultureInfo.InvariantCulture, DateTimeStyles.None, out dateTimeInicio)) { return(BadRequest("Formato de fecha invalido, formato permitido dd/MM/aaaa")); } if (!DateTime.TryParseExact(model.FechaFinal, "yyyy'-'MM'-'dd", CultureInfo.InvariantCulture, DateTimeStyles.None, out dateTimeFinal)) { return(BadRequest("Formato de fecha invalido, formato permitido yyyy-MM-dd")); } var destino = _db.Aeropuertos.FirstOrDefault(c => c.CiudadUbicacin == model.CiudadDestino); if (destino == null) { return(NotFound("No se encontro la ciudad de destino")); } ParametrosDTO parametros = new ParametrosDTO(); parametros.processType = "CATALOG"; parametros.Uuid = model.Uuid; parametros.Tipo_proveedor = "HOTEL"; parametros.Tipo_proceso = "catalogue"; Consulta2 consultaHotel = new Consulta2 { City = destino.CiudadUbicacin, Country = "Colombia", QuantityRooms = "1", RoomType = "Bar", EndDate = model.FechaFinal, StartDate = model.FechaInicio }; parametros.Parametros.hotel.consulta = consultaHotel; string parametrosSerializados = JsonConvert.SerializeObject(parametros); using (var producer = new ProducerBuilder <Null, string>(_config).Build()) { await producer.ProduceAsync("topic-info-reader", new Message <Null, string> { Value = parametrosSerializados }); producer.Flush(TimeSpan.FromSeconds(10)); return(Ok()); } } catch (Exception ex) { Logger.LogError("Excepcion generada en ConsultarVuelos: " + ex.Message); return(StatusCode(500, "Ocurrio un error")); throw; } }
public async Task <HealthCheckResult> CheckHealthAsync(HealthCheckContext context, CancellationToken cancellationToken = default) { try { using (var producer = new ProducerBuilder <string, string>(_configuration).Build()) { var message = new Message <string, string>() { Key = "healthcheck-key", Value = $"Check Kafka healthy on {DateTime.UtcNow}" }; var result = await producer.ProduceAsync(_topic, message); if (result.Status == PersistenceStatus.NotPersisted) { return(new HealthCheckResult(context.Registration.FailureStatus, description: $"Message is not persisted or a failure is raised on health check for kafka.")); } return(HealthCheckResult.Healthy()); } } catch (Exception ex) { return(new HealthCheckResult(context.Registration.FailureStatus, exception: ex)); } }
public async Task <ProduceResult> Publish(MessageValue mv) { try { using (var p = new ProducerBuilder <Null, string>(_config).Build()) { Console.WriteLine($"Produce message {mv.Value}"); var random = new Random(); TopicPartition tp = new TopicPartition(this._topicName, new Partition(random.Next(0, 5))); var dr = await p.ProduceAsync(tp, new Message <Null, string> { Value = mv.Value }); return(new ProduceResult() { TopicPartitionOffset = dr.TopicPartitionOffset.Offset.Value, TopicPartition = dr.TopicPartition.Partition.Value }); } } catch (System.Exception e) { Console.Write($"Error: {e.Message}"); } return(new ProduceResult()); }
async static Task ProduceSpecific(string bootstrapServers, string schemaRegistryUrl) { using (var schemaRegistry = new CachedSchemaRegistryClient(new SchemaRegistryConfig { Url = schemaRegistryUrl })) using (var producer = new ProducerBuilder <Null, MessageTypes.LogMessage>(new ProducerConfig { BootstrapServers = bootstrapServers }) .SetValueSerializer(new AvroSerializer <MessageTypes.LogMessage>(schemaRegistry)) .Build()) { await producer.ProduceAsync("log-messages", new Message <Null, MessageTypes.LogMessage> { Value = new MessageTypes.LogMessage { IP = "192.168.0.1", Message = "a test message 2", Severity = MessageTypes.LogLevel.Info, Tags = new Dictionary <string, string> { { "location", "CA" } } } }); producer.Flush(TimeSpan.FromSeconds(30)); } }
private static string SendMessageByKafka(string message) { var config = new ProducerConfig { BootstrapServers = "localhost:9092" }; using (var producer = new ProducerBuilder <Null, string>(config).Build()) { try { var sendResult = producer .ProduceAsync("fila_pedido", new Message <Null, string> { Value = message }) .GetAwaiter() .GetResult(); return($"Mensagem { sendResult.Value } de { sendResult.TopicPartitionOffset }"); } catch (ProduceException <Null, string> e) { throw new Exception($"Delivery failed: { e.Error.Reason } ");; } } }
public void Producer_ProduceAsync_Await_Serializing(string bootstrapServers) { LogToFile("start Producer_ProduceAsync_Await_Serializing"); Func <Task> mthd = async() => { using (var producer = new ProducerBuilder <Null, string>(new ProducerConfig { BootstrapServers = bootstrapServers }).Build()) { var dr = await producer.ProduceAsync( singlePartitionTopic, new Message <Null, string> { Value = "test string" }); Assert.Equal(0, producer.Flush(TimeSpan.FromSeconds(10))); Assert.NotEqual(Offset.Unset, dr.Offset); } }; mthd().Wait(); Assert.Equal(0, Library.HandleCount); LogToFile("end Producer_ProduceAsync_Await_Serializing"); }
public string SendMessage(string msg) { var config = new ProducerConfig { BootstrapServers = "localhost:9092" }; using (var producer = new ProducerBuilder <Null, string>(config).Build()) { try { var sendResult = producer .ProduceAsync("test", new Message <Null, string> { Value = msg }) .GetAwaiter() .GetResult(); return($"Mensagem {sendResult.Value} de {sendResult.TopicPartitionOffset}"); } catch (ProduceException <Null, string> e) { Console.WriteLine($"Erro ao enviar mensagem {e.Error.Reason}"); } return(default);