public ProducerViewModel(Producer producer) { Address = new AddressViewModel(producer.Address); AreMultiple = producer.AreMultiple; BusinessName = producer.BusinessName; Contact = new ContactViewModel(producer.Contact); }
public void TestPriorityConsumption() { IConnection conn = createConnection(true); ISession receiverSession = conn.CreateSession(); ISession senderSession = conn.CreateSession(); IDestination queue = receiverSession.GetQueue(DESTINATION_NAME); PurgeQueue(conn, queue); IMessageConsumer consumer = receiverSession.CreateConsumer(queue); Producer producer1 = new Producer(senderSession, queue, MSG_COUNT, MsgPriority.High); Producer producer2 = new Producer(senderSession, queue, MSG_COUNT, MsgPriority.Low); producer1.Start(); producer2.Start(); producer1.Join(); producer2.Join(); for(int i = 0; i < MSG_COUNT * 2; i++) { IMessage msg = consumer.Receive(TimeSpan.FromMilliseconds(1000)); Assert.IsNotNull(msg, "Message {0} was null", i); Assert.AreEqual(i < MSG_COUNT ? MsgPriority.High : MsgPriority.Low, msg.NMSPriority, "Message {0} priority was wrong", i); } }
public _2_Work_Queues() { InitializeComponent(); //create the producer producer = new Producer(HOST_NAME, QUEUE_NAME); //**CONSUMER 1 ** //create the consumer consumer = new Consumer(HOST_NAME, QUEUE_NAME); //this from will handle messages consumer.onMessageReceived += handleMessage; //start consuming consumer.StartConsuming(); //**CONSUMER 2 ** //create the second consumer consumer2 = new Consumer(HOST_NAME, QUEUE_NAME); //this from will handle messages consumer2.onMessageReceived += handleMessage2; //start consuming consumer2.StartConsuming(); }
void Loop(Action<string> producer, Action<Action<string>, Action> consumer, int producerCount, int consumerCount, int iterations) { var producers = new Producer[producerCount]; var consumers = new Consumer[consumerCount]; int remaining = iterations*producerCount; Stopwatch timer = Stopwatch.StartNew(); for (int i = 0; i < consumerCount; i++) { consumers[i] = new Consumer(consumer, () => remaining == 0, () => Interlocked.Decrement(ref remaining)); consumers[i].Start(); } for (int i = 0; i < producerCount; i++) { producers[i] = new Producer(producer, iterations); producers[i].Start(); } for (int i = 0; i < producerCount; i++) producers[i].Stop(); for (int i = 0; i < consumerCount; i++) consumers[i].Stop(); timer.Stop(); Console.WriteLine("Total Time: " + timer.ElapsedMilliseconds + "ms"); }
public void TestProducerPublish() { var topicName = "publish" + DateTime.Now.Unix(); const int msgCount = 10; var config = new Config(); var w = new Producer("127.0.0.1:4150", new ConsoleLogger(LogLevel.Debug), config); try { for (int i = 0; i < msgCount; i++) { w.Publish(topicName, "publish_test_case"); } w.Publish(topicName, "bad_test_case"); readMessages(topicName, msgCount); } finally { w.Stop(); _nsqdHttpClient.DeleteTopic(topicName); _nsqLookupdHttpClient.DeleteTopic(topicName); } }
public void ZKAwareProducerSends1Message() { int totalWaitTimeInMiliseconds = 0; int waitSingle = 100; var originalMessage = new Message(Encoding.UTF8.GetBytes("TestData")); var multipleBrokersHelper = new TestMultipleBrokersHelper(CurrentTestTopic); multipleBrokersHelper.GetCurrentOffsets(); var producerConfig = new ProducerConfig(clientConfig); var mockPartitioner = new MockAlwaysZeroPartitioner(); using (var producer = new Producer<string, Message>(producerConfig, mockPartitioner, new DefaultEncoder())) { var producerData = new ProducerData<string, Message>( CurrentTestTopic, "somekey", new List<Message>() { originalMessage }); producer.Send(producerData); while (!multipleBrokersHelper.CheckIfAnyBrokerHasChanged()) { totalWaitTimeInMiliseconds += waitSingle; Thread.Sleep(waitSingle); if (totalWaitTimeInMiliseconds > MaxTestWaitTimeInMiliseconds) { Assert.Fail("None of the brokers changed their offset after sending a message"); } } totalWaitTimeInMiliseconds = 0; var consumerConfig = new ConsumerConfig(clientConfig) { Host = multipleBrokersHelper.BrokerThatHasChanged.Address, Port = multipleBrokersHelper.BrokerThatHasChanged.Port }; IConsumer consumer = new Consumers.Consumer(consumerConfig); var request = new FetchRequest(CurrentTestTopic, 0, multipleBrokersHelper.OffsetFromBeforeTheChange); BufferedMessageSet response; while (true) { Thread.Sleep(waitSingle); response = consumer.Fetch(request); if (response != null & response.Messages.Count() > 0) { break; } totalWaitTimeInMiliseconds += waitSingle; if (totalWaitTimeInMiliseconds >= MaxTestWaitTimeInMiliseconds) { break; } } Assert.NotNull(response); Assert.AreEqual(1, response.Messages.Count()); Assert.AreEqual(originalMessage.ToString(), response.Messages.First().ToString()); } }
public void ProducerSends1Message() { var prodConfig = this.ConfigBasedSyncProdConfig; int totalWaitTimeInMiliseconds = 0; int waitSingle = 100; var originalMessage = new Message(Encoding.UTF8.GetBytes("TestData")); var multipleBrokersHelper = new TestMultipleBrokersHelper(CurrentTestTopic); multipleBrokersHelper.GetCurrentOffsets( new[] { this.SyncProducerConfig1, this.SyncProducerConfig2, this.SyncProducerConfig3 }); using (var producer = new Producer(prodConfig)) { var producerData = new ProducerData<string, Message>( CurrentTestTopic, new List<Message> { originalMessage }); producer.Send(producerData); Thread.Sleep(waitSingle); } while ( !multipleBrokersHelper.CheckIfAnyBrokerHasChanged( new[] { this.SyncProducerConfig1, this.SyncProducerConfig2, this.SyncProducerConfig3 })) { totalWaitTimeInMiliseconds += waitSingle; Thread.Sleep(waitSingle); if (totalWaitTimeInMiliseconds > this.maxTestWaitTimeInMiliseconds) { Assert.Fail("None of the brokers changed their offset after sending a message"); } } totalWaitTimeInMiliseconds = 0; var consumerConfig = new ConsumerConfiguration( multipleBrokersHelper.BrokerThatHasChanged.Host, multipleBrokersHelper.BrokerThatHasChanged.Port); IConsumer consumer = new Consumer(consumerConfig); var request1 = new FetchRequest(CurrentTestTopic, multipleBrokersHelper.PartitionThatHasChanged, multipleBrokersHelper.OffsetFromBeforeTheChange); BufferedMessageSet response; while (true) { Thread.Sleep(waitSingle); response = consumer.Fetch(request1); if (response != null && response.Messages.Count() > 0) { break; } totalWaitTimeInMiliseconds += waitSingle; if (totalWaitTimeInMiliseconds >= this.maxTestWaitTimeInMiliseconds) { break; } } Assert.NotNull(response); Assert.AreEqual(1, response.Messages.Count()); Assert.AreEqual(originalMessage.ToString(), response.Messages.First().ToString()); }
public void TestMethod1() { BoundedBuffer buf = new BoundedBuffer(4); Producer prod = new Producer(buf, 10); Consumer con = new Consumer(buf); Parallel.Invoke(prod.Run, con.Run); }
static void ClassicCombineTest() { var p1 = new Producer<ExampleEventArgs1>(15, () => new ExampleEventArgs1()); var p2 = new Producer<ExampleEventArgs2>(40, () => new ExampleEventArgs2()); Consumer c = new Consumer(p1, p2); c.OnCombinedEvents += IncreaseCounter; Parallel.Invoke(() => p1.Run(1000), () => p2.Run(500)); }
private static Producer MapProducer(Models.Product mProduct) { Producer producer = new Producer(); producer.Id = mProduct.Producer1.Id; producer.Name = mProduct.Producer1.Name; producer.Address = mProduct.Producer1.Address; producer.Telephone = mProduct.Producer1.Telephone; producer.Email = mProduct.Producer1.Email; producer.Website = mProduct.Producer1.Website; return producer; }
/// <summary> /// Initializes a new instance of the NsqSharp.Bus.Configuration.BuiltIn.NsqdTcpPublisher class. /// </summary> /// <exception cref="ArgumentNullException">Thrown when one or more required arguments are null.</exception> /// <param name="nsqdAddress">The nsqd address.</param> /// <param name="logger">The logger.</param> /// <param name="config">The configuration.</param> public NsqdTcpPublisher(string nsqdAddress, ILogger logger, Config config) { if (string.IsNullOrEmpty(nsqdAddress)) throw new ArgumentNullException("nsqdAddress"); if (logger == null) throw new ArgumentNullException("logger"); if (config == null) throw new ArgumentNullException("config"); _producer = new Producer(nsqdAddress, logger, config); }
public NotificationProducerTests() { producerCollection = new ProducerCollection(new Guid("784C0850-7FBA-401D-BA0F-64600B36583C")); anyProducer1 = producerCollection.AddProducer(ObjectFactory.CreateEmptyProducerBusiness(), ObjectFactory.CreateDefaultAddress(), ObjectFactory.CreateEmptyContact()); anyProducer2 = producerCollection.AddProducer(ObjectFactory.CreateEmptyProducerBusiness(), ObjectFactory.CreateDefaultAddress(), ObjectFactory.CreateEmptyContact()); EntityHelper.SetEntityId(anyProducer1, new Guid("18C7F135-AE7F-4F1E-B6F9-076C12224292")); EntityHelper.SetEntityId(anyProducer2, new Guid("58318441-922C-4453-8A96-D5AA2E3D9B5A")); }
public void RunSoakTest() { var producer = new Producer(); var consumer = new Consumer(); var producerThread = new Thread(producer.Run); var consumerThread = new Thread(consumer.Run); producerThread.Start(); consumerThread.Start(); consumerThread.Join(); }
public void ProducerSendsMessage() { string payload1 = "kafka 1."; byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1); Message msg1 = new Message(payloadData1); string payload2 = "kafka 2."; byte[] payloadData2 = Encoding.UTF8.GetBytes(payload2); Message msg2 = new Message(payloadData2); Producer producer = new Producer(KafkaServer, KafkaPort); producer.Send("test", 0, new List<Message> { msg1, msg2 }); }
public void ProducerSendMultiRequest() { List<ProducerRequest> requests = new List<ProducerRequest> { new ProducerRequest("test", 0, new List<Message> { new Message(Encoding.UTF8.GetBytes("1: " + DateTime.UtcNow)) }), new ProducerRequest("test", 0, new List<Message> { new Message(Encoding.UTF8.GetBytes("2: " + DateTime.UtcNow)) }), new ProducerRequest("testa", 0, new List<Message> { new Message(Encoding.UTF8.GetBytes("3: " + DateTime.UtcNow)) }), new ProducerRequest("testa", 0, new List<Message> { new Message(Encoding.UTF8.GetBytes("4: " + DateTime.UtcNow)) }) }; MultiProducerRequest request = new MultiProducerRequest(requests); Producer producer = new Producer(KafkaServer, KafkaPort); producer.Send(request); }
public PubSub_Producer() { InitializeComponent(); //Declare the producer producer = new Producer(HOST_NAME, EXCHANGE_NAME, ExchangeType.Fanout); //connect to RabbitMQ if(!producer.ConnectToRabbitMQ()) { //Show a basic error if we fail MessageBox.Show("Could not connect to Broker"); } }
public Form1() { InitializeComponent(); //create the producer producer = new Producer(HOST_NAME, QUEUE_NAME); //create the consumer consumer = new Consumer(HOST_NAME, QUEUE_NAME); //this from will handle messages consumer.onMessageReceived += handleMessage; //start consuming consumer.StartConsuming(); }
public static void Main(String[] args) { Console.WriteLine("Welcome to MLT."); Factory.init(); Profile profile = new Profile(""); Producer p = new Producer(profile, args[0], null); if (p.is_valid()) { Consumer c = new Consumer(profile, "sdl", null); c.set("rescale", "none"); c.connect(p); c.start(); while (!c.is_stopped()) Thread.Sleep(300); c.stop(); } }
public ProducerViewModel(Producer producer, int countOfProducers, string processText, bool? isIsProcessAnnexAttachedAttached) { Name = producer.Business.Name; address = new AddressViewModel(producer.Address); ContactPerson = producer.Contact.FullName; Telephone = producer.Contact.Telephone.ToFormattedContact(); Fax = producer.Contact.Fax.ToFormattedContact(); Email = producer.Contact.Email; RegistrationNumber = producer.Business.RegistrationNumber; IsSiteOfGeneration = producer.IsSiteOfExport; AnnexMessage = string.Empty; SetSiteOfGeneration(countOfProducers); CountOfProducers = countOfProducers; ProcessOfGeneration = processText ?? string.Empty; IsProcessAnnexAttached = isIsProcessAnnexAttachedAttached; DescriptionTitle = "Process of generation"; }
public IHttpActionResult Post([FromBody]ProducerDataModel model) { if (!this.ModelState.IsValid) { return this.BadRequest(this.ModelState); } var producer = new Producer { Name = model.Name }; this.data.Producers.Add(producer); this.data.Savechanges(); return this.Created(this.Url.ToString(), producer); }
public async Task<ActionResult> Index(Guid id, ProducerViewModel model) { if (!ModelState.IsValid) { return View(model); } var producer = new Producer(id) { Address = model.Address.AsAddress(), AreMultiple = model.AreMultiple, BusinessName = model.BusinessName, Contact = model.Contact.AsContact() }; await mediator.SendAsync(new SetDraftData<Producer>(id, producer)); return RedirectToAction("Index", "Facility"); }
public void TestDefaultEncoderProducerAndFetch() { var topic = "test-topic"; var config = Producer.Config; var stringProducer1 = new Producer<string, string>(config); stringProducer1.Send(new KeyedMessage<string, string>(topic, "test-message")); // note we can't validate high watermark here var request = new FetchRequestBuilder().ClientId("test-client").AddFetch(topic, 0, 0, 10000).Build(); var fetched = Consumer.Fetch(request); Assert.Equal(0, fetched.CorrelationId); var messageSet = fetched.MessageSet(topic, 0); Assert.True(messageSet.Iterator().HasNext()); var fetchedMessageAndOffset = messageSet.Iterator().Next(); Assert.Equal("test-message", Util.ReadString(fetchedMessageAndOffset.Message.Payload)); }
public void ProducerSendsMessageAsynchronously() { bool waiting = true; List<Message> messages = GenerateRandomMessages(50); Producer producer = new Producer(KafkaServer, KafkaPort); producer.SendAsync( "test", 0, messages, (requestContext) => { waiting = false; }); while (waiting) { Console.WriteLine("Keep going..."); Thread.Sleep(10); } }
public ActionResult AddProducer(AddProducerInputModel model) { if (ModelState.IsValid) { var newProducer = new Producer { Name = model.Name, Address = model.Address, DateAdded = DateTime.UtcNow, Email = model.Email, Telephone = model.Telephone, Website = model.Website, CountryId = int.Parse(model.Country.CountryId) }; this.producers.Add(newProducer); } return Redirect("/"); }
public IHttpActionResult Post(ProducerRequestModel request) { if (request == null) { return this.BadRequest(GlobalMessages.EntityMustNotBeNullMessage); } var producer = new Producer { FirstName = request.FirstName, LastName = request.LastName, Age = request.Age }; Country defaultCountry = this.data.CountriesRepository.All().FirstOrDefault(); producer.Country = defaultCountry; this.data.ProducersRepository.Add(producer); int result = this.data.SaveChanges(); return this.Ok($"{GlobalMessages.EntitySuccessfullyAddedMessage} - {result}"); }
public void TestProducerConnection() { string topicName = "write_test" + DateTime.Now.Unix(); try { var config = new Config(); var w = new Producer("127.0.0.1:4150", new ConsoleLogger(LogLevel.Debug), config); w.Publish(topicName, "test"); w.Stop(); Assert.Throws<ErrStopped>(() => w.Publish(topicName, "fail test")); } finally { _nsqdHttpClient.DeleteTopic(topicName); _nsqLookupdHttpClient.DeleteTopic(topicName); } }
public void TestProducerHeartbeat() { var topicName = "heartbeat" + DateTime.Now.Unix(); var config = new Config(); config.HeartbeatInterval = TimeSpan.FromMilliseconds(100); var w = new Producer("127.0.0.1:4150", new ConsoleLogger(LogLevel.Debug), config); try { ErrIdentify errIdentify = Assert.Throws<ErrIdentify>(() => w.Publish(topicName, "publish_test_case")); Assert.AreEqual("E_BAD_BODY IDENTIFY heartbeat interval (100) is invalid", errIdentify.Reason); } finally { w.Stop(); // note: if test successful, topic will not be created - don't need to delete } try { config = new Config(); config.HeartbeatInterval = TimeSpan.FromMilliseconds(1000); w = new Producer("127.0.0.1:4150", new ConsoleLogger(LogLevel.Debug), config); w.Publish(topicName, "publish_test_case"); // TODO: what are we testing here? Thread.Sleep(1100); const int msgCount = 10; for (int i = 0; i < msgCount; i++) { w.Publish(topicName, "publish_test_case"); } w.Publish(topicName, "bad_test_case"); readMessages(topicName, msgCount + 1); } finally { w.Stop(); _nsqdHttpClient.DeleteTopic(topicName); _nsqLookupdHttpClient.DeleteTopic(topicName); } }
public static void ProduceConsume(string bootstrapServers, string schemaRegistryServers) { string topic = Guid.NewGuid().ToString(); var producerConfig = new ProducerConfig { BootstrapServers = bootstrapServers }; var consumerConfig = new ConsumerConfig { BootstrapServers = bootstrapServers, GroupId = Guid.NewGuid().ToString(), SessionTimeoutMs = 6000, AutoOffsetReset = AutoOffsetResetType.Earliest }; var serdeProviderConfig = new AvroSerdeProviderConfig { SchemaRegistryUrl = schemaRegistryServers }; using (var serdeProvider = new AvroSerdeProvider(serdeProviderConfig)) using (var producer = new Producer <string, User>(producerConfig, serdeProvider.GetSerializerGenerator <string>(), serdeProvider.GetSerializerGenerator <User>())) { for (int i = 0; i < 100; ++i) { var user = new User { name = i.ToString(), favorite_number = i, favorite_color = "blue" }; producer.ProduceAsync(topic, new Message <string, User> { Key = user.name, Value = user }); } Assert.Equal(0, producer.Flush(TimeSpan.FromSeconds(10))); } using (var serdeProvider = new AvroSerdeProvider(serdeProviderConfig)) using (var consumer = new Consumer <string, User>(consumerConfig, serdeProvider.GetDeserializerGenerator <string>(), serdeProvider.GetDeserializerGenerator <User>())) { bool consuming = true; consumer.OnPartitionEOF += (_, topicPartitionOffset) => consuming = false; consumer.OnError += (_, e) => Assert.True(false, e.Reason); consumer.Subscribe(topic); int i = 0; while (consuming) { var record = consumer.Consume(TimeSpan.FromMilliseconds(100)); if (record != null) { Assert.Equal(i.ToString(), record.Message.Key); Assert.Equal(i.ToString(), record.Message.Value.name); Assert.Equal(i, record.Message.Value.favorite_number); Assert.Equal("blue", record.Message.Value.favorite_color); i += 1; } } Assert.Equal(100, i); consumer.Close(); } }
public void TestProducerPublishAsync() { var topicName = "async_publish" + DateTime.Now.Unix(); const int msgCount = 10; var config = new Config(); var w = new Producer("127.0.0.1:4150", new ConsoleLogger(LogLevel.Debug), config); try { var tasks = new List<Task<ProducerResponse>>(); for (int i = 0; i < msgCount; i++) { var task = w.PublishAsync(topicName, "publish_test_case", "test", i); tasks.Add(task); } for (int i = 0; i < msgCount; i++) { tasks[i].Wait(); var trans = tasks[i].Result; Assert.IsNull(trans.Error); Assert.IsNotNull(trans.Args); Assert.AreEqual(2, trans.Args.Length); Assert.AreEqual("test", trans.Args[0]); Assert.AreEqual(i, trans.Args[1]); } w.Publish(topicName, "bad_test_case"); readMessages(topicName, msgCount); } finally { w.Stop(); _nsqdHttpClient.DeleteTopic(topicName); _nsqLookupdHttpClient.DeleteTopic(topicName); } }
public void startTurn() { List <Producer> producers = new List <Producer>(); List <Improvement> improvements = new List <Improvement>(); List <Store> stores = new List <Store>(); foreach (Corporation c in corporations) { c.startTurn(); producers.AddRange(c.producersWithNeed); improvements.AddRange(c.improvements); stores.AddRange(c.stores); } foreach (Producer p in producers) { Improvement.P = p; improvements.Sort(Improvement.priceCompare); while (p.qouta > 0) { Improvement I = searchImprovementsForUnsold(improvements, p.neededResource); double amountSold = I.resource.spendResource(p.qouta); double cost = I.getHarvestCost(amountSold); I.receiveMoney(cost); p.qouta -= amountSold; p.totalCost += cost; p.recieveResources(amountSold); } } foreach (Store s in stores) { Producer.S = s; improvements.Sort(Improvement.priceCompare); while (s.qouta > 0) { foreach (PlayerResource p in s.neededResources) { Producer P = searchProducersForUnsold(producers, p); double amountSold = P.producedResource.spendResource(s.qouta); double cost = P.getHarvestCost(amountSold); P.receiveMoney(cost); s.cost = cost; s.qouta -= amountSold; s.recieveResources(p.resourceName, amountSold); } } } foreach (Corporation c in corporations) { c.totalCosts(); } corporations.Sort(Corporation.wealthComparison); foreach (Corporation c in corporations) { for (int i = 0; i < corporations.Count; i++) { if (corporations[i].money < c.money * wealthShareToBuy) { Corporation sold = corporations[i]; corporations.Remove(sold); c.improvements.AddRange(sold.improvements); c.producers.AddRange(sold.producers); c.stores.AddRange(sold.stores); sold.sellTo(c); break; } } } }
public void TestProduceAndMultiFetch() { this.CreateSimpleTopicsAndAwaitLeader( this.ZkClient, new List <string> { "test1", "test2", "test3", "test4" }, Configs.First().BrokerId); // send some messages, with non-ordered topics var topics = new List <Tuple <string, int> > { Tuple.Create("test4", 0), Tuple.Create("test1", 0), Tuple.Create("test2", 0), Tuple.Create("test3", 0) }; { var messages = new Dictionary <string, List <string> >(); var builder = new FetchRequestBuilder(); foreach (var topicAndOffset in topics) { var topic = topicAndOffset.Item1; var partition = topicAndOffset.Item2; var messageList = new List <string> { "a_" + topic, "b_" + topic }; var producerData = messageList.Select(m => new KeyedMessage <string, string>(topic, topic, m)).ToArray(); messages[topic] = messageList; Producer.Send(producerData); builder.AddFetch(topic, partition, 0, 10000); } // wait a bit for produced message to be available var request = builder.Build(); var response = Consumer.Fetch(request); foreach (var topicAndPartition in topics) { var fetched = response.MessageSet(topicAndPartition.Item1, topicAndPartition.Item2); Assert.Equal(messages[topicAndPartition.Item1], fetched.Select(m => Util.ReadString(m.Message.Payload)).ToList()); } } { // send some invalid offsets var builder = new FetchRequestBuilder(); foreach (var topicAndPartition in topics) { builder.AddFetch(topicAndPartition.Item1, topicAndPartition.Item2, -1, 10000); } try { var request = builder.Build(); var response = Consumer.Fetch(request); foreach (var pdata in response.Data.Values) { ErrorMapping.MaybeThrowException(pdata.Error); } Assert.True(false, "Expected exception when fetching message with invalid offset"); } catch (OffsetOutOfRangeException) { // ok } } { // send some invalid partitions var builder = new FetchRequestBuilder(); foreach (var topicAndPartition in topics) { builder.AddFetch(topicAndPartition.Item1, -1, 0, 10000); } try { var request = builder.Build(); var response = Consumer.Fetch(request); foreach (var pdata in response.Data.Values) { ErrorMapping.MaybeThrowException(pdata.Error); } Assert.True(false, "Expected exception when fetching message with invalid partition"); } catch (UnknownTopicOrPartitionException) { // ok } } }
public static async void AddTo(Producer prod, Fields fields, Loadable l, params object[] obj) { AddTo(prod, fields.getFields(), l, obj); }
public long Insert(Producer entity) { db.Producers.Add(entity); db.SaveChanges(); return(entity.ID); }
public static string ImportProducersAlbums(MusicHubDbContext context, string jsonString) { var producerDtos = JsonConvert.DeserializeObject <ImportProducerDto[]>(jsonString); var producers = new List <Producer>(); var sb = new StringBuilder(); foreach (var producerDto in producerDtos) { var isValidProducer = IsValid(producerDto); var areValidAlbums = producerDto.Albums.All(s => IsValid(s)); if (!isValidProducer || !areValidAlbums) { sb.AppendLine(ErrorMessage); continue; } var producer = new Producer { Name = producerDto.Name, PhoneNumber = producerDto.PhoneNumber, Pseudonym = producerDto.Pseudonym }; foreach (var albumDto in producerDto.Albums) { var album = new Album { Name = albumDto.Name, ReleaseDate = DateTime.ParseExact(albumDto.ReleaseDate, "dd/MM/yyyy", CultureInfo.InvariantCulture) }; producer.Albums.Add(album); } producers.Add(producer); if (producer.PhoneNumber == null) { sb.AppendLine(string.Format(SuccessfullyImportedProducerWithNoPhone, producer.Name, producer.Albums.Count)); } else { sb.AppendLine(string.Format(SuccessfullyImportedProducerWithPhone, producer.Name, producer.PhoneNumber, producer.Albums.Count)); } } context.Producers.AddRange(producers); context.SaveChanges(); return(sb.ToString().TrimEnd()); }
private KafkaEventProducer(Producer <string, string> publisher) { this.publisher = publisher; }
private async Task UpdateProducerAndTransporter(TrashInspection trashInspection, TrashInspectionModel createModel) { var producer = _dbContext.Producers .Where(x => x.WorkflowState != Constants.WorkflowStates.Removed) .SingleOrDefault(x => x.Name == createModel.Producer); if (producer == null) { producer = new Producer { Name = createModel.Producer, Address = createModel.ProducerAddress, City = createModel.ProducerCity, ContactPerson = createModel.ProducerContact, Phone = createModel.ProducerPhone, ZipCode = createModel.ProducerZip, ForeignId = createModel.ProducerForeignId, UpdatedByUserId = _userService.UserId, CreatedByUserId = _userService.UserId, }; await producer.Create(_dbContext); } else { producer.Address = createModel.ProducerAddress; producer.City = createModel.ProducerCity; producer.ContactPerson = createModel.ProducerContact; producer.Phone = createModel.ProducerPhone; producer.ZipCode = createModel.ProducerZip; producer.ForeignId = createModel.ProducerForeignId; producer.UpdatedByUserId = _userService.UserId; await producer.Update(_dbContext); } trashInspection.ProducerId = producer.Id; var transporter = _dbContext.Transporters .Where(x => x.WorkflowState != Constants.WorkflowStates.Removed) .SingleOrDefault(x => x.Name == createModel.Transporter); if (transporter == null) { transporter = new Transporter { Name = createModel.Transporter, Address = createModel.TransporterAddress, City = createModel.TransporterCity, ZipCode = createModel.TransporterZip, Phone = createModel.TransporterPhone, ContactPerson = createModel.TransporterContact, ForeignId = createModel.TransporterForeignId, UpdatedByUserId = _userService.UserId, CreatedByUserId = _userService.UserId, }; await transporter.Create(_dbContext); } else { transporter.Address = createModel.TransporterAddress; transporter.City = createModel.TransporterCity; transporter.ZipCode = createModel.TransporterZip; transporter.Phone = createModel.TransporterPhone; transporter.ContactPerson = createModel.TransporterContact; transporter.ForeignId = createModel.TransporterForeignId; transporter.UpdatedByUserId = _userService.UserId; await transporter.Update(_dbContext); } trashInspection.TransporterId = transporter.Id; transporter.UpdatedByUserId = _userService.UserId; await trashInspection.Update(_dbContext); }
public static async Task Main(string[] args) { List <string> products = new List <string>() { "Nintendo Switch" }; var config = new ProducerConfig { BootstrapServers = "127.0.0.1:9092" }; using (var producer = new Producer <string, string>(config)) { Console.WriteLine("\n-----------------------------------------------------------------------"); Console.WriteLine("-----------------------------------------------------------------------"); Console.WriteLine("To create a kafka message with UTF-8 encoded key and value:"); Console.WriteLine("> key value<Enter>"); Console.WriteLine("To create a kafka message with a null key and UTF-8 encoded value:"); Console.WriteLine("> value<enter>"); Console.WriteLine("Ctrl-C to quit.\n"); var cancelled = false; Console.CancelKeyPress += (_, e) => { e.Cancel = true; // prevent the process from terminating. cancelled = true; }; //create 10 switchs var deliveryReport = await producer.ProduceAsync(TOPICS.INVENTORYEVENTS, new Message <string, string> { Key = products[0], Value = JsonConvert.SerializeObject( new OrderCreated() { ProductName = products[0], User = "******", Quantity = 10 }) }); while (!cancelled) { Console.Write("> "); string text; try { text = Console.ReadLine(); } catch (IOException) { // IO exception is thrown when ConsoleCancelEventArgs.Cancel == true. break; } if (text == null) { // Console returned null before // the CancelKeyPress was treated break; } string key = null; string val = text; // split line if both key and value specified. int index = text.IndexOf(" "); if (index != -1) { key = text.Substring(0, index); val = text.Substring(index + 1); } try { // Awaiting the asynchronous produce request below prevents flow of execution // from proceeding until the acknowledgement from the broker is received. deliveryReport = await producer.ProduceAsync(TOPICS.ORDERSEVENTS, new Message <string, string> { Key = products[0], Value = JsonConvert.SerializeObject( new OrderCreated() { ProductName = products[0], User = "******", Quantity = 1 }) }); Console.WriteLine($"delivered to: {deliveryReport.TopicPartitionOffset}"); } catch (KafkaException e) { Console.WriteLine($"failed to deliver message: {e.Message} [{e.Error.Code}]"); } } // Since we are producing synchronously, at this point there will be no messages // in-flight and no delivery reports waiting to be acknowledged, so there is no // need to call producer.Flush before disposing the producer. } }
public static void AssignPastEnd(string bootstrapServers, string topic) { var consumerConfig = new Dictionary <string, object> { { "group.id", "test-consumer-group" }, { "bootstrap.servers", bootstrapServers }, { "session.timeout.ms", 6000 } }; var producerConfig = new Dictionary <string, object> { { "bootstrap.servers", bootstrapServers } }; var testString = "hello world"; Message <Null, string> dr; using (var producer = new Producer <Null, string>(producerConfig, null, new StringSerializer(Encoding.UTF8))) { dr = producer.ProduceAsync(topic, null, testString).Result; Assert.True(dr.Offset >= 0); producer.Flush(); } consumerConfig["default.topic.config"] = new Dictionary <string, object>() { { "auto.offset.reset", "latest" } }; using (var consumer = new Consumer(consumerConfig)) { Message msg; // Consume API consumer.Assign(new List <TopicPartitionOffset>() { new TopicPartitionOffset(dr.TopicPartition, dr.Offset + 1) }); Assert.False(consumer.Consume(out msg, TimeSpan.FromSeconds(10))); consumer.Assign(new List <TopicPartitionOffset>() { new TopicPartitionOffset(dr.TopicPartition, dr.Offset + 2) }); Assert.False(consumer.Consume(out msg, TimeSpan.FromSeconds(10))); // Poll API consumer.Assign(new List <TopicPartitionOffset>() { new TopicPartitionOffset(dr.TopicPartition, dr.Offset + 1) }); consumer.OnMessage += (_, message) => { Assert.True(false); }; consumer.Poll(TimeSpan.FromSeconds(10)); consumer.Assign(new List <TopicPartitionOffset>() { new TopicPartitionOffset(dr.TopicPartition, dr.Offset + 2) }); consumer.Poll(TimeSpan.FromSeconds(10)); } consumerConfig["default.topic.config"] = new Dictionary <string, object>() { { "auto.offset.reset", "earliest" } }; using (var consumer = new Consumer(consumerConfig)) { Message msg; consumer.Assign(new List <TopicPartitionOffset>() { new TopicPartitionOffset(dr.TopicPartition, dr.Offset + 1) }); Assert.False(consumer.Consume(out msg, TimeSpan.FromSeconds(10))); // Note: dr.Offset+2 is an invalid (c.f. dr.Offset+1 which is valid), so auto.offset.reset will come // into play here to determine which offset to start from (earliest). Due to the the produce call above, // there is guarenteed to be a message on the topic, so consumer.Consume will return true. consumer.Assign(new List <TopicPartitionOffset>() { new TopicPartitionOffset(dr.TopicPartition, dr.Offset + 2) }); Assert.True(consumer.Consume(out msg, TimeSpan.FromSeconds(10))); } }
static void Main(string[] args) { var pConfig = new Dictionary <string, object> { { "bootstrap.servers", "<ccloud bootstrap servers>" }, { "broker.version.fallback", "0.10.0.0" }, { "api.version.fallback.ms", 0 }, { "sasl.mechanisms", "PLAIN" }, { "security.protocol", "SASL_SSL" }, // On Windows, default trusted root CA certificates are stored in the Windows Registry. // They are not automatically discovered by Confluent.Kafka and it's not possible to // reference them using the `ssl.ca.location` property. You will need to obtain these // from somewhere else, for example use the cacert.pem file distributed with curl: // https://curl.haxx.se/ca/cacert.pem and reference that file in the `ssl.ca.location` // property: { "ssl.ca.location", "/usr/local/etc/openssl/cert.pem" }, // suitable configuration for linux, osx. // { "ssl.ca.location", "c:\\path\\to\\cacert.pem" }, // windows { "sasl.username", "<ccloud key>" }, { "sasl.password", "<ccloud secret>" } }; using (var producer = new Producer <Null, string>(pConfig, null, new StringSerializer(Encoding.UTF8))) { producer.ProduceAsync("dotnet-test-topic", null, "test value") .ContinueWith(result => { var msg = result.Result; if (msg.Error.Code != ErrorCode.NoError) { Console.WriteLine($"failed to deliver message: {msg.Error.Reason}"); } else { Console.WriteLine($"delivered to: {result.Result.TopicPartitionOffset}"); } }); producer.Flush(TimeSpan.FromSeconds(10)); } var cConfig = new Dictionary <string, object> { { "bootstrap.servers", "<confluent cloud bootstrap servers>" }, { "broker.version.fallback", "0.10.0.0" }, { "api.version.fallback.ms", 0 }, { "sasl.mechanisms", "PLAIN" }, { "security.protocol", "SASL_SSL" }, { "ssl.ca.location", "/usr/local/etc/openssl/cert.pem" }, // suitable configuration for linux, osx. // { "ssl.ca.location", "c:\\path\\to\\cacert.pem" }, // windows { "sasl.username", "<confluent cloud key>" }, { "sasl.password", "<confluent cloud secret>" }, { "group.id", Guid.NewGuid().ToString() }, { "auto.offset.reset", "smallest" } }; using (var consumer = new Consumer <Null, string>(cConfig, null, new StringDeserializer(Encoding.UTF8))) { consumer.Subscribe("dotnet-test-topic"); consumer.OnConsumeError += (_, err) => Console.WriteLine($"consume error: {err.Error.Reason}"); consumer.OnMessage += (_, msg) => Console.WriteLine($"consumed: {msg.Value}"); consumer.OnPartitionEOF += (_, tpo) => Console.WriteLine($"end of partition: {tpo}"); while (true) { consumer.Poll(TimeSpan.FromMilliseconds(100)); } } }
public void AddProducer(Producer producer) { _movieDbContext.Producers.Add(producer); }
private void BenchmarkTcp(int parallel) { string topicName = "test_benchmark_" + DateTime.Now.UnixNano(); try { const int benchmarkNum = 30000; byte[] body = new byte[512]; var p = new Producer("127.0.0.1:4150"); p.Connect(); var startCh = new Chan <bool>(); var wg = new WaitGroup(); for (int j = 0; j < parallel; j++) { wg.Add(1); //int localj = j; GoFunc.Run(() => { startCh.Receive(); for (int i = 0; i < benchmarkNum / parallel; i++) { //if (i%10 == 0) //{ // Debug.WriteLine(string.Format("{0}: {1}/{2}", localj, i, benchmarkNum/parallel)); //} p.Publish(topicName, body); } wg.Done(); }, "ProducerBenchmarkTcpTest: sendLoop"); } var stopwatch = Stopwatch.StartNew(); startCh.Close(); var done = new Chan <bool>(); GoFunc.Run(() => { wg.Wait(); done.Send(true); }, "waiter and done sender"); bool finished = false; Select .CaseReceive(done, b => finished = b) .CaseReceive(Time.After(TimeSpan.FromSeconds(10)), b => finished = false) .NoDefault(); stopwatch.Stop(); if (!finished) { Assert.Fail("timeout"); } Console.WriteLine(string.Format("{0:#,0} sent in {1:mm\\:ss\\.fff}; Avg: {2:#,0} msgs/s; Threads: {3}", benchmarkNum, stopwatch.Elapsed, benchmarkNum / stopwatch.Elapsed.TotalSeconds, parallel)); } finally { _nsqdHttpClient.DeleteTopic(topicName); _nsqLookupdHttpClient.DeleteTopic(topicName); } }
public void Flush() { publisher?.Flush(1000); publisher?.Dispose(); publisher = null; }
static void Main(string[] args) { var topicName = Environment.GetEnvironmentVariable("TOPIC_NAME"); var kafkaUrl = Environment.GetEnvironmentVariable("KAFKA_URL"); Console.WriteLine($"Broker address : {kafkaUrl}"); Console.WriteLine($"Topic name: {topicName}"); // https://kafka.apache.org/0110/documentation.html#producerconfigs var config = new ProducerConfig() { MessageMaxBytes = 3000000, BootstrapServers = kafkaUrl, MessageTimeoutMs = 1000 }; using (var producer = new Producer <string, string>(config)) { string text = null; while (text != "exit") { Console.Write("Enter message: "); text = Console.ReadLine(); if (string.IsNullOrEmpty(text)) { continue; } try { var values = text.Split('-'); var value = values[0]; string key = default; if (values.Count() > 1) { key = values[1]; } Message <string, string> message; Task <DeliveryReport <string, string> > dr; if (!string.IsNullOrEmpty(key)) { message = new Message <string, string> { Key = key, Value = value }; dr = producer.ProduceAsync(topicName, message); } else { message = new Message <string, string> { Value = value }; dr = producer.ProduceAsync(topicName, message); } var result = dr.GetAwaiter().GetResult(); Console.WriteLine($"{DateTime.Now} Delivered to '{result.TopicPartitionOffset}'"); } catch (KafkaException e) { Console.WriteLine($"Delivery failed: {e.Error.Reason}"); } } producer.Flush(TimeSpan.FromSeconds(10)); } }
public static void Initialize(MovieContext context) { context.Database.EnsureCreated(); // Look for any students. if (context.Actors.Any()) { return; // DB has been seeded } var actors = new Actor[] { new Actor { Id = 1, Name = "Salman Khan", sex = "M", Dob = DateTime.Parse("1975-09-01"), Bio = "Superstar" }, new Actor { Id = 2, Name = "Amir Khan", sex = "M", Dob = DateTime.Parse("1978-09-01"), Bio = "Superstar" }, new Actor { Id = 3, Name = "Depika Padukon", sex = "F", Dob = DateTime.Parse("1988-09-01"), Bio = "Superstar" }, new Actor { Id = 4, Name = "Kartik Aryan", sex = "M", Dob = DateTime.Parse("1987-09-01"), Bio = "star" } }; foreach (Actor s in actors) { context.Actors.Add(s); } context.SaveChanges(); var movies = new Movie[] { new Movie { Id = 1, MovieId = 1050, producerId = 1211, Name = "Wanted", plot = "Mumbai", yearofRealse = DateTime.Parse("2019"), poster = "circle" }, new Movie { Id = 1, MovieId = 1051, producerId = 1211, Name = "pk", plot = "Delhi", yearofRealse = DateTime.Parse("2017"), poster = "square" }, new Movie { MovieId = 1052, producerId = 1213, Name = "piku", plot = "Mumbai", yearofRealse = DateTime.Parse("2016"), poster = "circle" }, new Movie { MovieId = 1053, producerId = 1214, Name = "Pyar ka punchnama", plot = "Mumbai", yearofRealse = DateTime.Parse("2018"), poster = "circle" } }; foreach (Movie c in movies) { context.Movies.Add(c); } context.SaveChanges(); var producers = new Producer[] { new Producer { producerId = 1211, Name = "Boney Kappor", sex = "M", Dob = DateTime.Parse("1975-09-01"), Bio = "Superstar" }, new Producer { producerId = 1212, MovieId = 1052, Name = "Vidhu Vinod chopra", sex = "M", Dob = DateTime.Parse("1978-09-01"), Bio = "Superstar" }, new Producer { producerId = 1213, Name = "karan johar", sex = "M", Dob = DateTime.Parse("1988-09-01"), Bio = "Superstar" }, new Producer { producerId = 1214, Name = "Sajid Khan", sex = "M", Dob = DateTime.Parse("1987-09-01"), Bio = "star" } }; foreach (Producer e in producers) { context.producers.Add(e); } context.SaveChanges(); }
public static void ProduceIncompatibleTypes(string bootstrapServers, string schemaRegistryServers) { var producerConfig = new ProducerConfig { BootstrapServers = bootstrapServers }; var consumerConfig = new ConsumerConfig { BootstrapServers = bootstrapServers, GroupId = Guid.NewGuid().ToString(), SessionTimeoutMs = 6000, AutoOffsetReset = AutoOffsetResetType.Earliest, }; var schemaRegistryConfig = new SchemaRegistryConfig { SchemaRegistryUrl = schemaRegistryServers }; var topic = Guid.NewGuid().ToString(); using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig)) using (var producer = new Producer <string, string>(producerConfig, new AvroSerializer <string>(schemaRegistry), new AvroSerializer <string>(schemaRegistry))) { producer .ProduceAsync(topic, new Message <string, string> { Key = "hello", Value = "world" }) .Wait(); Assert.Equal(0, producer.Flush(TimeSpan.FromSeconds(10))); } using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig)) using (var producer = new Producer <int, string>(producerConfig, new AvroSerializer <int>(schemaRegistry), new AvroSerializer <string>(schemaRegistry))) { Assert.Throws <SerializationException>(() => { try { producer .ProduceAsync(topic, new Message <int, string> { Key = 42, Value = "world" }) .Wait(); } catch (AggregateException e) { throw e.InnerException; } }); } using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig)) using (var producer = new Producer <string, int>(producerConfig, new AvroSerializer <string>(schemaRegistry), new AvroSerializer <int>(schemaRegistry))) { Assert.Throws <SerializationException>(() => { try { producer .ProduceAsync(topic, new Message <string, int> { Key = "world", Value = 42 }) .Wait(); } catch (AggregateException e) { throw e.InnerException; } }); } }
public static void AutoRegisterSchemaDisabled(string bootstrapServers, string schemaRegistryServers) { string topic = Guid.NewGuid().ToString(); var producerConfig = new Dictionary <string, object> { { "bootstrap.servers", bootstrapServers }, { "avro.serializer.auto.register.schemas", false } }; var consumerConfig = new Dictionary <string, object> { { "bootstrap.servers", bootstrapServers }, { "group.id", Guid.NewGuid().ToString() }, { "session.timeout.ms", 6000 }, { "auto.offset.reset", "smallest" } }; var schemaRegistryConfig = new Dictionary <string, object> { { "schema.registry.url", schemaRegistryServers } }; using (var schemaRegistryClient = new CachedSchemaRegistryClient(schemaRegistryConfig)) using (var producer = new Producer <string, int>(producerConfig, new AvroSerializer <string>(schemaRegistryClient), new AvroSerializer <int>(schemaRegistryClient))) { Assert.Throws <SchemaRegistryException>(() => { try { producer.ProduceAsync(topic, "test", 112).Wait(); } catch (AggregateException e) { throw e.InnerException; } }); } var producerConfig2 = new Dictionary <string, object> { { "bootstrap.servers", bootstrapServers }, { "schema.registry.url", schemaRegistryServers } }; using (var producer = new Producer <string, int>(producerConfig2, new AvroSerializer <string>(), new AvroSerializer <int>())) { producer.ProduceAsync(topic, "test", 112).Wait(); } var producerConfig3 = new Dictionary <string, object> { { "bootstrap.servers", bootstrapServers }, { "schema.registry.url", schemaRegistryServers }, { "avro.serializer.auto.register.schemas", false } }; // config with avro.serializer.auto.register.schemas == false should work now. using (var producer = new Producer <string, int>(producerConfig3, new AvroSerializer <string>(), new AvroSerializer <int>())) { producer.ProduceAsync(topic, "test", 112).Wait(); } }
public ActionResult Edit(int id) { Producer producer = _db.Producer.Find(id); return(View(producer)); }
public static string ImportProducersAlbums(MusicHubDbContext context, string jsonString) { StringBuilder sb = new StringBuilder(); List <ProducerImportDto> producerDtos = JsonConvert.DeserializeObject <List <ProducerImportDto> >(jsonString); List <Producer> producers = new List <Producer>(); foreach (var producerDto in producerDtos) { if (!IsValid(producerDto) || !producerDto.Albums.All(IsValid)) { sb.AppendLine(ErrorMessage); continue; } Producer producer = new Producer() { Name = producerDto.Name, Pseudonym = producerDto.Pseudonym, PhoneNumber = producerDto.PhoneNumber }; bool isAllAlbumsValid = true; foreach (var albumDto in producerDto.Albums) { if (!IsValid(albumDto)) { isAllAlbumsValid = false; break; } bool isValidReleaseDate = DateTime.TryParseExact(albumDto.ReleaseDate, "dd/MM/yyyy", CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime releaseDate); if (!isValidReleaseDate) { isAllAlbumsValid = false; break; } Album album = new Album() { Name = albumDto.Name, ReleaseDate = releaseDate }; producer.Albums.Add(album); } if (!isAllAlbumsValid) { sb.AppendLine(ErrorMessage); continue; } producers.Add(producer); string message = producer.PhoneNumber != null ? $"Imported {producer.Name} with phone: {producer.PhoneNumber} produces {producer.Albums.Count} albums" : $"Imported {producer.Name} with no phone number produces {producer.Albums.Count} albums"; sb.AppendLine(message); } context.Producers.AddRange(producers); context.SaveChanges(); return(sb.ToString().TrimEnd()); }
public static void Producer_DisableDeliveryReports(string bootstrapServers, string singlePartitionTopic, string partitionedTopic) { LogToFile("start Producer_DisableDeliveryReports"); byte[] TestKey = new byte[] { 1, 2, 3, 4 }; byte[] TestValue = new byte[] { 5, 6, 7, 8 }; var producerConfig = new ProducerConfig { BootstrapServers = bootstrapServers, EnableDeliveryReports = false, // the below are just a few extra tests that the property is recognized (all // set to defaults). the functionality is not tested. EnableBackgroundPoll = true, DeliveryReportFields = "all" }; // If delivery reports are disabled: // 1. callback functions should never called, even if specified. // 2. specifying no delivery report handlers is valid. // 3. tasks should complete immediately. int count = 0; using (var producer = new Producer <byte[], byte[]>(producerConfig)) { producer.BeginProduce(singlePartitionTopic, new Message <byte[], byte[]> { Key = TestKey, Value = TestValue }, (DeliveryReportResult <byte[], byte[]> dr) => count += 1); producer.BeginProduce(new TopicPartition(singlePartitionTopic, 0), new Message <byte[], byte[]> { Key = TestKey, Value = TestValue }); producer.BeginProduce(singlePartitionTopic, new Message <byte[], byte[]> { Key = TestKey, Value = TestValue }); producer.BeginProduce(new TopicPartition(singlePartitionTopic, 0), new Message <byte[], byte[]> { Key = TestKey, Value = TestValue }); var drTask = producer.ProduceAsync(singlePartitionTopic, new Message <byte[], byte[]> { Key = TestKey, Value = TestValue }); Assert.True(drTask.IsCompleted); Assert.Equal(Offset.Invalid, drTask.Result.Offset); Assert.Equal(Partition.Any, drTask.Result.Partition); Assert.Equal(singlePartitionTopic, drTask.Result.Topic); Assert.Equal(TestKey, drTask.Result.Message.Key); Assert.Equal(TestValue, drTask.Result.Message.Value); drTask = producer.ProduceAsync(new TopicPartition(singlePartitionTopic, 0), new Message <byte[], byte[]> { Key = TestKey, Value = TestValue }); Assert.True(drTask.IsCompleted); Assert.Equal(Offset.Invalid, drTask.Result.Offset); Assert.Equal(0, (int)drTask.Result.Partition); Assert.Equal(singlePartitionTopic, drTask.Result.Topic); Assert.Equal(TestKey, drTask.Result.Message.Key); Assert.Equal(TestValue, drTask.Result.Message.Value); Assert.Equal(0, producer.Flush(TimeSpan.FromSeconds(10))); } Assert.Equal(0, count); Assert.Equal(0, Library.HandleCount); LogToFile("end Producer_DisableDeliveryReports"); }
public async Task Should_Return_Multiple_Deliveries(int orderLockInHours, int year, int month, int day, int hour, int minute, int second, int expectedFarmDelivery_FirstDay, int expectedFarmDelivery_SecondDay, int expectedMarketDelivery_FirstDay, int expectedMarketDelivery_SecondDay) { var token = CancellationToken.None; var currentDate = new DateTime(year, month, day, hour, minute, second); var producerId = Guid.NewGuid(); _currentUserService.Setup(c => c.GetCurrentUserInfo()).Returns(Result <RequestUser> .Success(new RequestUser())); var producer = new Producer(producerId, "prod1", "fa", "la", "*****@*****.**", new UserAddress("x", null, "x", "x", CountryIsoCode.FR, null)); await _context.AddAsync(producer, token); var entity1 = new DeliveryMode( Guid.NewGuid(), DeliveryKind.Farm, producer, true, new DeliveryAddress("x", null, "x", "x", CountryIsoCode.FR, null, null), new List <DeliveryHours> { new DeliveryHours(DayOfWeek.Wednesday, TimeSpan.FromHours(8), TimeSpan.FromHours(12)) }, "delivery1"); entity1.SetLockOrderHoursBeforeDelivery(orderLockInHours); await _context.AddAsync(entity1, token); var entity2 = new DeliveryMode( Guid.NewGuid(), DeliveryKind.Market, producer, true, new DeliveryAddress("x", null, "x", "x", CountryIsoCode.FR, null, null), new List <DeliveryHours> { new DeliveryHours(DayOfWeek.Friday, TimeSpan.FromHours(16), TimeSpan.FromHours(18)) }, "delivery2"); entity2.SetLockOrderHoursBeforeDelivery(orderLockInHours); await _context.AddAsync(entity2, token); await _context.SaveChangesAsync(token); //test var results = await _queries.GetNextDeliveries( new List <Guid> { producerId }, new List <DeliveryKind> { DeliveryKind.Farm, DeliveryKind.Market }, _context, token, currentDate); //assert var deliveriesResults = results .Should().NotBeNull().And.ContainSingle() .And.Subject.First().Deliveries.Should().HaveCount(2); var marketDelivery = deliveriesResults.And.Subject.First(c => c.Kind == DeliveryKind.Market); var farmDelivery = deliveriesResults.And.Subject.First(c => c.Kind == DeliveryKind.Farm); marketDelivery.DeliveryHours.Should().OnlyContain(c => c.Day == DayOfWeek.Friday); marketDelivery.DeliveryHours.ElementAt(0).ExpectedDeliveryDate.Day.Should().Be(expectedMarketDelivery_FirstDay); marketDelivery.DeliveryHours.ElementAt(1).ExpectedDeliveryDate.Day.Should().Be(expectedMarketDelivery_SecondDay); farmDelivery.DeliveryHours.Should().OnlyContain(c => c.Day == DayOfWeek.Wednesday); farmDelivery.DeliveryHours.ElementAt(0).ExpectedDeliveryDate.Day.Should().Be(expectedFarmDelivery_FirstDay); farmDelivery.DeliveryHours.ElementAt(1).ExpectedDeliveryDate.Day.Should().Be(expectedFarmDelivery_SecondDay); }
static async Task Main(string[] args) { if (args.Length != 3) { Console.WriteLine("Usage: .. bootstrapServers schemaRegistryUrl topicName"); return; } string bootstrapServers = args[0]; string schemaRegistryUrl = args[1]; string topicName = args[2]; var producerConfig = new ProducerConfig { BootstrapServers = bootstrapServers }; var schemaRegistryConfig = new SchemaRegistryConfig { // Note: you can specify more than one schema registry url using the // schema.registry.url property for redundancy (comma separated list). // The property name is not plural to follow the convention set by // the Java implementation. SchemaRegistryUrl = schemaRegistryUrl, // optional schema registry client properties: SchemaRegistryRequestTimeoutMs = 5000, SchemaRegistryMaxCachedSchemas = 10 }; var consumerConfig = new ConsumerConfig { BootstrapServers = bootstrapServers, GroupId = "avro-specific-example-group" }; var avroSerializerConfig = new AvroSerializerConfig { // optional Avro serializer properties: BufferBytes = 100, AutoRegisterSchemas = true }; // Note: The User class in this project was generated using the Confluent fork of the avrogen.exe tool // (avaliable from: https://github.com/confluentinc/avro/tree/confluent-fork) which includes modifications // that prevent namespace clashes with user namespaces that include the identifier 'Avro'. AvroSerializer // and AvroDeserializer are also compatible with classes generated by the official avrogen.exe tool // (available from: https://github.com/apache/avro), with the above limitation. CancellationTokenSource cts = new CancellationTokenSource(); var consumeTask = Task.Run(() => { using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig)) using (var consumer = new Consumer <string, User>(consumerConfig, new AvroDeserializer <string>(schemaRegistry), new AvroDeserializer <User>(schemaRegistry))) { consumer.OnError += (_, e) => Console.WriteLine($"Error: {e.Reason}"); consumer.Subscribe(topicName); while (!cts.Token.IsCancellationRequested) { try { var consumeResult = consumer.Consume(cts.Token); Console.WriteLine($"user key name: {consumeResult.Message.Key}, user value favorite color: {consumeResult.Value.favorite_color}"); } catch (ConsumeException e) { Console.WriteLine("Consume error: " + e.Error.Reason); } } consumer.Close(); } }, cts.Token); using (var schemaRegistry = new CachedSchemaRegistryClient(schemaRegistryConfig)) using (var producer = new Producer <string, User>(producerConfig, new AvroSerializer <string>(schemaRegistry), new AvroSerializer <User>(schemaRegistry))) { Console.WriteLine($"{producer.Name} producing on {topicName}. Enter user names, q to exit."); int i = 0; string text; while ((text = Console.ReadLine()) != "q") { User user = new User { name = text, favorite_color = "green", favorite_number = i++ }; await producer .ProduceAsync(topicName, new Message <string, User> { Key = text, Value = user }) .ContinueWith(task => task.IsFaulted ? $"error producing message: {task.Exception.Message}" : $"produced to: {task.Result.TopicPartitionOffset}"); } } cts.Cancel(); }
public static string ImportProducersAlbums(MusicHubDbContext context, string jsonString) { var sb = new StringBuilder(); var serializer = JsonConvert.DeserializeObject <List <ImportProducerDto> >(jsonString); var producersToAdd = new List <Producer>(); foreach (var producerDto in serializer) { if (!IsValid(producerDto)) { sb.AppendLine(ErrorMessage); continue; } var producer = new Producer() { Name = producerDto.Name, Pseudonym = producerDto.Pseudonym, PhoneNumber = producerDto.PhoneNumber }; bool flag = false; foreach (var albumDto in producerDto.Albums) { if (!IsValid(albumDto)) { sb.AppendLine(ErrorMessage); flag = true; break; } DateTime releaseDate; var isReleaseDateValid = DateTime.TryParseExact(albumDto.ReleaseDate, "dd/MM/yyyy", CultureInfo.InvariantCulture, DateTimeStyles.None, out releaseDate); if (!isReleaseDateValid) { sb.AppendLine(ErrorMessage); continue; } var album = new Album() { Producer = producer, Name = albumDto.Name, ReleaseDate = releaseDate, ProducerId = producer.Id }; producer.Albums.Add(album); } if (!flag) { producersToAdd.Add(producer); if (producer.PhoneNumber == null) { sb.AppendLine(string.Format(SuccessfullyImportedProducerWithNoPhone, producer.Name, producer.Albums.Count)); } else { sb.AppendLine(string.Format(SuccessfullyImportedProducerWithPhone, producer.Name, producer.PhoneNumber, producer.Albums.Count)); } } } context.Producers.AddRange(producersToAdd); context.SaveChanges(); return(sb.ToString().Trim()); }
protected override void OnStop() { Producer.ClearLastIndex(); }
public void AddProducerData(Producer producer) { _producers.Add(producer); }
public void TestProducerMultiPublishAsync() { var topicName = "multi_publish" + DateTime.Now.Unix(); const int msgCount = 10; var config = new Config(); var w = new Producer("127.0.0.1:4150", new ConsoleLogger(LogLevel.Debug), config); try { var testData = new List<byte[]>(); for (int i = 0; i < msgCount; i++) { testData.Add(Encoding.UTF8.GetBytes("multipublish_test_case")); } var responseTask = w.MultiPublishAsync(topicName, testData, "test0", 1); responseTask.Wait(); var trans = responseTask.Result; Assert.IsNull(trans.Error); Assert.IsNotNull(trans.Args); Assert.AreEqual(2, trans.Args.Length); Assert.AreEqual("test0", trans.Args[0]); Assert.AreEqual(1, trans.Args[1]); w.Publish(topicName, "bad_test_case"); readMessages(topicName, msgCount); } finally { w.Stop(); _nsqdHttpClient.DeleteTopic(topicName); _nsqLookupdHttpClient.DeleteTopic(topicName); } }
/// <summary> /// 关闭生产者 /// </summary> public void Close() { _producer?.shutdown(); _producer = null; }
public void UpdateProducer(Producer producer) { _movieDbContext.Entry(producer).State = EntityState.Modified; }
static void SendMessages(Producer producer, string mode, int batchSize, long messageCount, string topic, byte[] payload) { _logger.Info("----Send message starting----"); var sendAction = default(Action <long>); if (_mode == "Oneway") { sendAction = index => { if (batchSize == 1) { var message = new Message(topic, 100, payload); producer.SendOneway(message, index.ToString()); _performanceService.IncrementKeyCount(_mode, (DateTime.Now - message.CreatedTime).TotalMilliseconds); } else { var messages = new List <Message>(); for (var i = 0; i < batchSize; i++) { messages.Add(new Message(topic, 100, payload)); } producer.BatchSendOneway(messages, index.ToString()); var currentTime = DateTime.Now; foreach (var message in messages) { _performanceService.IncrementKeyCount(_mode, (currentTime - message.CreatedTime).TotalMilliseconds); } } }; } else if (_mode == "Sync") { sendAction = index => { if (batchSize == 1) { var message = new Message(topic, 100, payload); var result = producer.Send(message, index.ToString()); if (result.SendStatus != SendStatus.Success) { throw new Exception(result.ErrorMessage); } _performanceService.IncrementKeyCount(_mode, (DateTime.Now - message.CreatedTime).TotalMilliseconds); } else { var messages = new List <Message>(); for (var i = 0; i < batchSize; i++) { messages.Add(new Message(topic, 100, payload)); } var result = producer.BatchSend(messages, index.ToString()); if (result.SendStatus != SendStatus.Success) { throw new Exception(result.ErrorMessage); } var currentTime = DateTime.Now; foreach (var message in messages) { _performanceService.IncrementKeyCount(_mode, (currentTime - message.CreatedTime).TotalMilliseconds); } } }; } else if (_mode == "Async") { sendAction = index => { if (batchSize == 1) { var message = new Message(topic, 100, payload); producer.SendAsync(message, index.ToString()).ContinueWith(t => { if (t.Exception != null) { _hasError = true; _logger.ErrorFormat("Send message has exception, errorMessage: {0}", t.Exception.GetBaseException().Message); return; } if (t.Result == null) { _hasError = true; _logger.Error("Send message timeout."); return; } if (t.Result.SendStatus != SendStatus.Success) { _hasError = true; _logger.ErrorFormat("Send message failed, errorMessage: {0}", t.Result.ErrorMessage); return; } _performanceService.IncrementKeyCount(_mode, (DateTime.Now - message.CreatedTime).TotalMilliseconds); }); } else { var messages = new List <Message>(); for (var i = 0; i < batchSize; i++) { messages.Add(new Message(topic, 100, payload)); } producer.BatchSendAsync(messages, index.ToString()).ContinueWith(t => { if (t.Exception != null) { _hasError = true; _logger.ErrorFormat("Send message has exception, errorMessage: {0}", t.Exception.GetBaseException().Message); return; } if (t.Result == null) { _hasError = true; _logger.Error("Send message timeout."); return; } if (t.Result.SendStatus != SendStatus.Success) { _hasError = true; _logger.ErrorFormat("Send message failed, errorMessage: {0}", t.Result.ErrorMessage); return; } var currentTime = DateTime.Now; foreach (var message in messages) { _performanceService.IncrementKeyCount(_mode, (currentTime - message.CreatedTime).TotalMilliseconds); } }); } }; } else if (_mode == "Callback") { sendAction = index => { if (batchSize == 1) { var message = new Message(topic, 100, payload); producer.SendWithCallback(message, index.ToString()); } else { var messages = new List <Message>(); for (var i = 0; i < batchSize; i++) { messages.Add(new Message(topic, 100, payload)); } producer.BatchSendWithCallback(messages, index.ToString()); } }; } Task.Factory.StartNew(() => { for (var i = 0L; i < messageCount; i++) { try { sendAction(i); } catch (Exception ex) { _hasError = true; _logger.ErrorFormat("Send message failed, errorMsg:{0}", ex.Message); } if (_hasError) { Thread.Sleep(3000); _hasError = false; } } }); }
public void TestProducerMultiPublish() { var topicName = "multi_publish" + DateTime.Now.Unix(); const int msgCount = 10; var config = new Config(); var w = new Producer("127.0.0.1:4150", new ConsoleLogger(LogLevel.Debug), config); try { var testData = new List<byte[]>(); for (int i = 0; i < msgCount; i++) { testData.Add(Encoding.UTF8.GetBytes("multipublish_test_case")); } w.MultiPublish(topicName, testData); w.Publish(topicName, "bad_test_case"); readMessages(topicName, msgCount); } finally { w.Stop(); _nsqdHttpClient.DeleteTopic(topicName); _nsqLookupdHttpClient.DeleteTopic(topicName); } }
public void RemoveProducer(Producer producer) { _movieDbContext.Producers.Remove(producer ?? throw new Exception()); }