Example #1
0
 public Authorization(string twitterConsumerKey, string twitterConsumerSecret, string twitterID, string twitterTokenValue, string twitterTokenSecret)
 {
     TwitterConsumer = new Consumer(twitterConsumerKey, twitterConsumerSecret);
     TwitterID = twitterID;
     TwitterTokenValue = twitterTokenValue;
     TwitterTokenSecret = twitterTokenSecret;
 }
Example #2
0
        public bool Open(Oscillo os, Consumer gc)
        {
            m_Oscillo = os;
            m_GraphControl = gc;

            return m_Oscillo.Ping();
        }
        public static void Main()
        {
            var configuration = new CatalystConfiguration(
                new Uri(DEFAULT_ENGINE_URI),
                new IEventConsumerFactory[]
                    {
                        new RabbitMqEventConsumerFactory("localhost"),
                        new MsmqEventConsumerFactory()
                    },
                new IDataPublisherFactory[]
                    {
                        new RabbitMqDataPublisherFactory(),
                        new MsmqDataPublisherFactory()
                    });

            // create a catalyst adapter
            var adapter = new Catalyst(configuration);
            // attach to the default instance - i.e the default database
            var instance = adapter.GetDefaultInstance();
            // create an injector ... the purpose of the injector is to ensure that
            // events exist and are flowing through the system.
            var injector = new InjectSynthetic(instance);
            injector.Start();
            injector.WaitOne();
            // create a consumer ... the purpose of the consumer is to demonstrate
            // how to setup statements and consume event flow from the engine.
            var consumer = new Consumer(instance, "SyntheticEvent");
            consumer.Start();
        }
Example #4
0
        static void Main(string[] args)
        {
            XmlConfigurator.Configure();

            IApiPushServiceConfiguration configuration = new ApiPushServiceConfiguration();
            ISubscriptionStorage subscriptionStorage = new ReadonlyJsonSubscriptionStorage();
            IPushSender pushSender = new PushSender();
            Consumer consumer = new Consumer(subscriptionStorage, pushSender, configuration);

            IAutoSubscriberMessageDispatcher dispatcher = new MessageDispatcher(consumer);

            HostFactory.Run(x =>
            {
                x.Service<ApiPushService>(s =>
                {
                    s.ConstructUsing(() => new ApiPushService(configuration, dispatcher));
                    s.WhenStarted(push => push.Start());
                    s.WhenStopped(push => push.Stop());
                });

                x.RunAsLocalSystem();
                x.SetDescription("Service that consumes events over rabbitMQ and pushes notifications to http endpoints");
                x.SetDisplayName("ApiPush");
                x.SetServiceName("ApiPush");

            });
        }
Example #5
0
        public void Delegate_Leak_RemoveOnlyDelegate()
        {
            freeAll();

            var memBegin = Process.GetCurrentProcess().PrivateMemorySize64;
            Debug.WriteLine("Begin: " + (memBegin / 1000000));

            var provider = new Provider();
            var consumer = new Consumer(provider.Notify);

            freeAll();

            var afterCreation = Process.GetCurrentProcess().PrivateMemorySize64 - memBegin;
            Debug.WriteLine("After creation (should be around 200MB): " + (afterCreation / 1000000));

            provider = null;
            freeAll();

            var afterDispose = Process.GetCurrentProcess().PrivateMemorySize64 - memBegin;
            Debug.WriteLine("After dispose (should be around 200MB): " + (afterDispose / 1000000));

            consumer.RemoveDelegate();
            freeAll();

            var afterDispose2 = Process.GetCurrentProcess().PrivateMemorySize64 - memBegin;
            Debug.WriteLine("After consumer dispose (should be around 100MB): " + (afterDispose2 / 1000000));
        }
 public ActionResult Create(Consumer consumer)
 {
     if (ModelState.IsValid)
     {
         // Make sure the user did not create a non-unique key
         var match = ProviderContext.Consumers.SingleOrDefault(
             c => c.Key == consumer.Key);
         if (match != null)
         {
             ModelState.AddModelError("Key", UniqueKeyErrorMessage);
         }
         else
         {
             ProviderContext.Consumers.Add(consumer);
             ProviderContext.SaveChanges();
             if (string.IsNullOrEmpty(Request["ReturnURL"]))
             {
                 return RedirectToAction("Index");
             }
             var uri = new UriBuilder(Request["ReturnURL"]);
             uri.Query += "ConsumerId=" + consumer.ConsumerId;
             return Redirect(uri.ToString());
         }
     }
     return View(consumer);
 }
 //
 // GET: /Consumer/Create
 public ActionResult Create()
 {
     Consumer consumer = new Consumer();
     consumer.Key = Guid.NewGuid().ToString("N").Substring(0, 16);
     consumer.Secret = Guid.NewGuid().ToString("N").Substring(0, 16);
     return View(consumer);
 }
Example #8
0
        public static Consumer GetConsumer(string weiboType)
        {
            Consumer consumer = null;
            switch (weiboType)
            {
                case "����΢��":
                    consumer = new Consumer
                    {
                        Key = "��������΢��APP_KEY",
                        Secret = "��������΢��APP_SECRET_KEY",
                        RequestTokenUri = "http://api.t.sina.com.cn/oauth/request_token",
                        AuthorizeUri = "http://api.t.sina.com.cn/oauth/authorize",
                        AccessTokenUri = "http://api.t.sina.com.cn/oauth/access_token"
                    };
                    break;
                case "��Ѷ΢��":
                    consumer = new Consumer
                    {
                        Key = "������Ѷ΢��APP_KEY",
                        Secret = "������Ѷ΢��APP_SECRET_KEY",
                        RequestTokenUri = "https://open.t.qq.com/cgi-bin/request_token",
                        AuthorizeUri = "https://open.t.qq.com/cgi-bin/authorize",
                        AccessTokenUri = "https://open.t.qq.com/cgi-bin/access_token",
                        Callback = "null"
                    };
                    break;
                default:
                    break;
            }

            return consumer;
        }
        public _2_Work_Queues()
        {
            InitializeComponent();

            //create the producer
            producer = new Producer(HOST_NAME, QUEUE_NAME);

            //**CONSUMER 1 **

            //create the consumer
            consumer = new Consumer(HOST_NAME, QUEUE_NAME);

            //this from will handle messages
            consumer.onMessageReceived += handleMessage;

            //start consuming
            consumer.StartConsuming();

            //**CONSUMER 2 **

            //create the second consumer
            consumer2 = new Consumer(HOST_NAME, QUEUE_NAME);

            //this from will handle messages
            consumer2.onMessageReceived += handleMessage2;

            //start consuming
            consumer2.StartConsuming();
        }
Example #10
0
        public void ConsumerGetsMessage()
        {
            ProducerSendsMessage();

            Consumer consumer = new Consumer(KafkaServer, KafkaPort);
            consumer.Consume("test", 0, 0);
        }
Example #11
0
        void Loop(Action<string> producer, Action<Action<string>, Action> consumer, int producerCount, int consumerCount,
            int iterations)
        {
            var producers = new Producer[producerCount];
            var consumers = new Consumer[consumerCount];

            int remaining = iterations*producerCount;

            Stopwatch timer = Stopwatch.StartNew();

            for (int i = 0; i < consumerCount; i++)
            {
                consumers[i] = new Consumer(consumer, () => remaining == 0, () => Interlocked.Decrement(ref remaining));
                consumers[i].Start();
            }

            for (int i = 0; i < producerCount; i++)
            {
                producers[i] = new Producer(producer, iterations);
                producers[i].Start();
            }

            for (int i = 0; i < producerCount; i++)
                producers[i].Stop();

            for (int i = 0; i < consumerCount; i++)
                consumers[i].Stop();

            timer.Stop();

            Console.WriteLine("Total Time: " + timer.ElapsedMilliseconds + "ms");
        }
        public void ZkAwareProducerSends1Message()
        {
            var prodConfig = this.ZooKeeperBasedSyncProdConfig;

            int totalWaitTimeInMiliseconds = 0;
            int waitSingle = 100;
            var originalMessage = new Message(Encoding.UTF8.GetBytes("TestData"));

            var multipleBrokersHelper = new TestMultipleBrokersHelper(CurrentTestTopic);
            multipleBrokersHelper.GetCurrentOffsets(new[] { this.SyncProducerConfig1, this.SyncProducerConfig2, this.SyncProducerConfig3 });

            var mockPartitioner = new MockAlwaysZeroPartitioner();
            using (var producer = new Producer<string, Message>(prodConfig, mockPartitioner, new DefaultEncoder()))
            {
                var producerData = new ProducerData<string, Message>(
                    CurrentTestTopic, "somekey", new List<Message> { originalMessage });
                producer.Send(producerData);

                while (!multipleBrokersHelper.CheckIfAnyBrokerHasChanged(new[] { this.SyncProducerConfig1, this.SyncProducerConfig2, this.SyncProducerConfig3 }))
                {
                    totalWaitTimeInMiliseconds += waitSingle;
                    Thread.Sleep(waitSingle);
                    if (totalWaitTimeInMiliseconds > this.maxTestWaitTimeInMiliseconds)
                    {
                        Assert.Fail("None of the brokers changed their offset after sending a message");
                    }
                }

                totalWaitTimeInMiliseconds = 0;

                var consumerConfig = new ConsumerConfiguration(
                    multipleBrokersHelper.BrokerThatHasChanged.Host,
                    multipleBrokersHelper.BrokerThatHasChanged.Port);
                IConsumer consumer = new Consumer(consumerConfig);
                var request = new FetchRequest(CurrentTestTopic, multipleBrokersHelper.PartitionThatHasChanged, multipleBrokersHelper.OffsetFromBeforeTheChange);

                BufferedMessageSet response;

                while (true)
                {
                    Thread.Sleep(waitSingle);
                    response = consumer.Fetch(request);
                    if (response != null & response.Messages.Count() > 0)
                    {
                        break;
                    }

                    totalWaitTimeInMiliseconds += waitSingle;
                    if (totalWaitTimeInMiliseconds >= this.maxTestWaitTimeInMiliseconds)
                    {
                        break;
                    }
                }

                Assert.NotNull(response);
                Assert.AreEqual(1, response.Messages.Count());
                Assert.AreEqual(originalMessage.ToString(), response.Messages.First().ToString());
            }
        }
 public void Setup()
 {
     _apiPushServiceConfiguration = Substitute.For<IApiPushServiceConfiguration>();
     _pushSender = Substitute.For<IPushSender>();
     _subscriptionStorage = Substitute.For<ISubscriptionStorage>();
     Consumer consumer = new Consumer(_subscriptionStorage, _pushSender, _apiPushServiceConfiguration);
     dispatcher = new MessageDispatcher(consumer);
 }
Example #14
0
        /// <summary>
        /// OnTimer
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="args"></param>
        public void WeeklyTimer(object sender, System.Timers.ElapsedEventArgs args)
        {
            eventLogHealthService.WriteEntry("Monitoring the Weekly System", EventLogEntryType.Information);

            Consumer consumer = new Consumer();
            consumer.StartWeeklyActivity();

        }
Example #15
0
 public static long GetCurrentKafkaOffset(string topic, string address, int port, int partition)
 {
     var request = new OffsetRequest(topic, partition, DateTime.Now.AddDays(-5).Ticks, 10);
     var consumerConfig = new ConsumerConfiguration(address, port);
     IConsumer consumer = new Consumer(consumerConfig, address, port);
     IList<long> list = consumer.GetOffsetsBefore(request);
     return list.Sum();
 }
Example #16
0
        public void TestMethod1()
        {
            BoundedBuffer buf = new BoundedBuffer(4);

            Producer prod = new Producer(buf, 10);
            Consumer con = new Consumer(buf);

            Parallel.Invoke(prod.Run, con.Run);
        }
        static void ClassicCombineTest()
        {
            var p1 = new Producer<ExampleEventArgs1>(15, () => new ExampleEventArgs1());
            var p2 = new Producer<ExampleEventArgs2>(40, () => new ExampleEventArgs2());
            Consumer c = new Consumer(p1, p2);

            c.OnCombinedEvents += IncreaseCounter;

            Parallel.Invoke(() => p1.Run(1000), () => p2.Run(500));
        }
Example #18
0
        public void CtorInputValidation()
        {
            Assert.Throws(Is.TypeOf<ArgumentNullException>().And.Property("ParamName").EqualTo("connectionSettings"),
                () => new Consumer(null, factory));
            Assert.Throws(Is.TypeOf<ArgumentNullException>().And.Property("ParamName").EqualTo("factory"),
                () => new Consumer(new ConnectionSettings("consumertest"), null));

            consumer = new Consumer(new ConnectionSettings("consumertest"), factory, null);
            Assert.That(((IServiceProvider)consumer).GetService<ConsumerSettings>(), Is.Not.Null);
        }
Example #19
0
        public void FunctionsFieldsAndProperties2()
        {
            Consumer c;
            var container = ContainerFactory.Create();

            CompositionBatch batch = new CompositionBatch();
            batch.AddPart(new SubtractProvider());
            batch.AddPart(c = new Consumer());
            container.Compose(batch);

            Assert.AreEqual(-1, c.op(c.a, c.b), "1 - 2 == -1");
        }
Example #20
0
        public void ConsumerGetsOffsets()
        {
            OffsetRequest request = new OffsetRequest("test", 0, DateTime.Now.AddHours(-24).Ticks, 10);

            Consumer consumer = new Consumer(KafkaServer, KafkaPort);
            IList<long> list = consumer.GetOffsetsBefore(request);

            foreach (long l in list)
            {
                Console.Out.WriteLine(l);
            }
        }
Example #21
0
            public TradeAssortment Assure( Vendible vendible, Consumer consumer)
            {
                TradeAssortment assortment = Find(vendible, consumer);

                if (assortment == null)
                {
                    assortment = new TradeAssortment();
                    assortment.SetVendible(vendible);
                    assortment.SetAssortmentParts(consumer);
                }

                return assortment;
            }
Example #22
0
			public QueryContent(int id, Consumer<MyContent> callback)
			{
				Id = id;

				var queue = new ThreadPoolFiber();
				Channel<MyContent> channel = new ConsumerChannel<MyContent>(new SynchronousFiber(), callback);

				if (SynchronizationContext.Current != null)
				{
					channel = new SynchronizedChannel<MyContent>(queue, channel, SynchronizationContext.Current);
				}
				ResponseChannel = channel;
			}
        public void RunSoakTest()
        {
            var producer = new Producer();
            var consumer = new Consumer();

            var producerThread = new Thread(producer.Run);
            var consumerThread = new Thread(consumer.Run);

            producerThread.Start();
            consumerThread.Start();

            consumerThread.Join();
        }
Example #24
0
		public static string Sign(Consumer consumer,
		                          string url,
		                          IDictionary<string,string> postParams,
		                          Token token,
		                          string method,
		                          string realm) {
			RNGCryptoServiceProvider random = new RNGCryptoServiceProvider();
			byte[] nonce = new byte[4];
			random.GetBytes (nonce);
			UInt64 timestamp = Convert.ToUInt64((DateTime.UtcNow - new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc)).TotalSeconds);
			return Sign (consumer, url, postParams, token, method, realm,
			             timestamp.ToString(),
			             String.Format ("{0:X}{1:X}{2:X}{3:X}", nonce[0], nonce[1], nonce[2], nonce[3]));
		}
Example #25
0
File: play.cs Project: amongll/AVFX
 public static void Main(String[] args)
 {
     Console.WriteLine("Welcome to MLT.");
     Factory.init();
     Profile profile = new Profile("");
     Producer p = new Producer(profile, args[0], null);
     if (p.is_valid()) {
         Consumer c = new Consumer(profile, "sdl", null);
         c.set("rescale", "none");
         c.connect(p);
         c.start();
         while (!c.is_stopped())
             Thread.Sleep(300);
         c.stop();
     }
 }
        public Form1()
        {
            InitializeComponent();

            //create the producer
            producer = new Producer(HOST_NAME, QUEUE_NAME);

            //create the consumer
            consumer = new Consumer(HOST_NAME, QUEUE_NAME);

            //this from will handle messages
            consumer.onMessageReceived += handleMessage;

            //start consuming
            consumer.StartConsuming();
        }
Example #27
0
        public void Setup()
        {
            _apiPushServiceConfiguration = Substitute.For<IApiPushServiceConfiguration>();
            _apiPushServiceConfiguration.RetryDelayInSeconds.Returns(30);
            _apiPushServiceConfiguration.RetryAttempts.Returns(3);

            _pushSender = Substitute.For<IPushSender>();
            _subscriptionStorage = Substitute.For<ISubscriptionStorage>();
            _consumer = new Consumer(_subscriptionStorage, _pushSender, _apiPushServiceConfiguration);

            _itemUpdated = new ItemUpdated
            {
                ItemId = 1,
                PartnerId = 1,
                UpdatedAt = DateTime.Now
            };
        }
Example #28
0
        static void Main()
        {
            var dealer = new CarDealer();

            var michael = new Consumer("Michael");
            dealer.NewCarInfo += michael.NewCarIsHere;

            dealer.NewCar("Mercedes");

            var nick = new Consumer("Nick");
            dealer.NewCarInfo += nick.NewCarIsHere;

            dealer.NewCar("Ferrari");

            dealer.NewCarInfo -= michael.NewCarIsHere;

            dealer.NewCar("Toyota");
        }
Example #29
0
        static void Main()
        {
            var dealer = new CarDealer();

              var michael = new Consumer("Michael");
              WeakCarInfoEventManager.AddListener(dealer, michael);

              dealer.NewCar("Mercedes");

              var sebastian = new Consumer("Sebastian");
              WeakCarInfoEventManager.AddListener(dealer, sebastian);

              dealer.NewCar("Ferrari");

              WeakCarInfoEventManager.RemoveListener(dealer, michael);

              dealer.NewCar("Red Bull Racing");
        }
Example #30
0
        static void Main()
        {
            var dealer = new CarDealer();

            var michael = new Consumer("Michael");
            WeakCarInfoEventManager.AddListener(dealer, michael);

            dealer.NewCar("Mercedes");

            var nick = new Consumer("Nick");
            WeakCarInfoEventManager.AddListener(dealer, nick);

            dealer.NewCar("Ferrari");

            WeakCarInfoEventManager.RemoveListener(dealer, michael);

            dealer.NewCar("Toyota");
        }
 public JT808_UnificationPushToWebSocket_Consumer(Dictionary <string, object> config, ILoggerFactory loggerFactory) : base(config, loggerFactory)
 {
     Logger   = loggerFactory.CreateLogger <JT808_UnificationPushToWebSocket_Consumer>();
     consumer = new Consumer <string, byte[]>(Config, new StringDeserializer(Encoding.UTF8), new ByteArrayDeserializer());
     RegisterEvent();
 }
Example #32
0
 public SatisfactionLinkConsumer(Consumer provider, float amount, float efficiency)
 {
     this.provider   = provider;
     this.amount     = amount;
     this.efficiency = efficiency;
 }
Example #33
0
        public async Task <IActionResult> Edit(EditConsumerViewModel editConsumerVM)
        {
            if (ModelState.IsValid)
            {
                //Get consumer
                Consumer consumerToEdit = _context.Consumers.Single(c => c.ID == editConsumerVM.ID);
                // Get associated files
                consumerToEdit.Files = _context.Files.Where(f => f.ConsumerID == consumerToEdit.ID).ToList();

                consumerToEdit.LastName  = editConsumerVM.LastName;
                consumerToEdit.FirstName = editConsumerVM.FirstName;
                consumerToEdit.DOB       = editConsumerVM.DOB;

                // ONLY when a consumer's active state is changed (so it won't affect file statuses otherwise):
                if (editConsumerVM.Active != consumerToEdit.Active)
                {
                    // If consumer becomes inactive, add EndDate
                    if (editConsumerVM.Active == false)
                    {
                        consumerToEdit.Active  = false;
                        consumerToEdit.EndDate = editConsumerVM.EndDate;
                        if (consumerToEdit.Files.Count != 0)
                        {
                            // Change status of files to "Inactive" and set file ShredDate
                            foreach (File file in consumerToEdit.Files)
                            {
                                file.SetShredDate(editConsumerVM);
                                _context.Update(file);
                            }
                        }
                    }

                    // If inactive consumer becomes active, wipe EndDate and change status of files to "OK"
                    if (editConsumerVM.Active == true)
                    {
                        consumerToEdit.Active  = true;
                        consumerToEdit.EndDate = null;
                        editConsumerVM.EndDate = null;
                        if (consumerToEdit.Files.Count > 0)
                        {
                            foreach (File file in consumerToEdit.Files)
                            {
                                file.Status = Status.OK;
                                file.SetShredDate(editConsumerVM);
                                _context.Update(file);
                            }
                        }
                    }
                }

                // If the active state remains unchanged, but an inactive consumer's EndDate is changed:
                if (editConsumerVM.EndDate != consumerToEdit.EndDate)
                {
                    // NOT that we'll allow it to be wiped...
                    if (editConsumerVM.EndDate == null && editConsumerVM.Active == false)
                    {
                        consumerToEdit.EndDate = DateTime.Now;
                    }
                    else   // Otherwise, set the change to the consumer
                    {
                        consumerToEdit.EndDate = editConsumerVM.EndDate;
                    }
                    // And update their files' ShredDate
                    if (consumerToEdit.Files != null)
                    {
                        foreach (File file in consumerToEdit.Files)
                        {
                            file.SetShredDate(editConsumerVM);
                            _context.Update(file);
                        }
                    }
                }

                _context.Update(consumerToEdit);
                await _context.SaveChangesAsync();

                return(Redirect("/Consumers/Index"));
            }
            ;

            return(View(editConsumerVM));
        }
Example #34
0
 public void Dispose()
 {
     Consumer.Dispose();
 }
 public override void Dispose()
 {
     GC.SuppressFinalize(this);
     Consumer.Close(); // Commit offsets and leave the group cleanly.
     Consumer.Dispose();
 }
        static void Main(string[] args)
        {
            var builder = new ConfigurationBuilder()
                          .SetBasePath(Directory.GetCurrentDirectory())
                          .AddJsonFile("appsettings.json", optional: true, reloadOnChange: true)
                          .AddEnvironmentVariables();

            Configuration = builder.Build();

            Config = new Dictionary <string, object>
            {
                { "group.id", Configuration["GROUP_ID"] },
                { "bootstrap.servers", Configuration["KAFKA_END_POINT"] }
            };

            // Create the consumer
            using (var consumer = new Consumer <string, string>(Config, new StringDeserializer(Encoding.UTF8), new StringDeserializer(Encoding.UTF8)))
            {
                // Subscribe to the OnMessage event
                consumer.OnMessage += async(obj, msg) => {
                    Console.WriteLine($"Received Raw Consumption: {msg.Value}");
                    var data = JsonConvert.DeserializeObject <Consumption>(msg.Value);

                    // Parser mpan to get the MarketParticantId and LLF
                    var mpan = MpanParser.Split(data.Mpan);

                    if (mpan.Complete)
                    {
                        var transformed =
                            new Calculate(data,
                                          new GetFactors(MpanHelper.GetMarketParticipantId(mpan.Result.DistributionId), mpan.Result.LLF, data.Date, data.StartTime).Results)
                            .Transform();

                        Publish(transformed);
                    }
                };

                consumer.OnLog += (obj, e) => {
                };
                // Subscribe to the Kafka topic
                consumer.Subscribe(new List <string> {
                    Configuration["CONSUMER_TOPIC"]
                });

                // Handle Cancel Keypress
                var cancelled = false;
                Console.CancelKeyPress += (_, e) =>
                {
                    e.Cancel  = true; // Prevent the process from terminating
                    cancelled = true;
                };

                Console.WriteLine("Ctrl-C to exit");

                // Poll for messages
                while (!cancelled)
                {
                    consumer.Poll(-1);
                }
            }
        }
        public async Task TestE2E()
        {
            var cancel = new CancellationTokenSource();

            var directoryEndpoint = "http://localhost:8080";

            var producer1Endpoint          = "tcp://localhost:8181";
            var producer1HeartbeatEndpoint = "tcp://localhost:8282";

            var consumerEndpoint          = "tcp://localhost:8383";
            var consumerHeartbeatEndpoint = "tcp://localhost:8484";

            var producer2Endpoint          = "tcp://localhost:8585";
            var producer2HeartbeatEndpoint = "tcp://localhost:8686";


            //create directory
            IWebHost host = null;

            new Task(() =>
            {
                host = new WebHostBuilder()
                       .UseKestrel()
                       .UseUrls(directoryEndpoint)
                       .UseStartup <DirectoryStartup>()
                       .Build();

                host.Run();
            }, cancel.Token).Start();


            await Task.Delay(500);

            var directory = RestService.For <IDirectory>(directoryEndpoint);

            //create producers
            var configurationProducer1 = new ProducerConfiguration()
            {
                IsTest           = true,
                Endpoint         = producer1Endpoint,
                HeartbeatEnpoint = producer1HeartbeatEndpoint,
                Id = Guid.NewGuid()
            };
            var configurationProducer2 = new ProducerConfiguration()
            {
                IsTest           = true,
                Endpoint         = producer2Endpoint,
                HeartbeatEnpoint = producer2HeartbeatEndpoint,
                Id = Guid.NewGuid()
            };


            var producer1 = new AccidentProducer(configurationProducer1, directory, new JsonSerializerSettings());
            var producer2 = new AccidentProducer(configurationProducer2, directory, new JsonSerializerSettings());

            //start only one producer
            producer1.Start();

            await Task.Delay(500);

            var configurationConsumer1 = new ConsumerConfiguration <AccidentEvent>()
            {
                Topic             = "Paris.Business",
                Id                = Guid.NewGuid(),
                Endpoint          = consumerEndpoint,
                HeartbeatEndpoint = consumerHeartbeatEndpoint
            };

            var consumedEvents = new List <AccidentEvent>();

            var consumer = new Consumer <AccidentEvent>(configurationConsumer1, directory, new JsonSerializerSettings());

            consumer.GetSubscription()
            .Subscribe(ev =>
            {
                consumedEvents.Add(ev);
            });

            //start consumer
            consumer.Start();

            await Task.Delay(1000);

            //the consumer should have fetch and subscribe to a producer
            var stateOfTheWorld = await directory.GetStateOfTheWorld();

            var currentEventCount = consumedEvents.Count;

            //the producer shouold have register to the registry
            var producer = stateOfTheWorld.First();

            Assert.AreEqual(ProducerState.Alive, producer.State);

            //at least an event should have match the filter
            Assert.Greater(currentEventCount, 0);
            Assert.AreEqual(1, stateOfTheWorld.Count());

            //memorize current event count
            var eventCount = consumedEvents.Count;

            //kill the producer
            producer1.Stop();

            await Task.Delay(1000);

            //the directory should have heartbeat the consumer, the consumer should not have consume any more event
            stateOfTheWorld = await directory.GetStateOfTheWorld();

            producer = stateOfTheWorld.First();

            Assert.AreEqual(ProducerState.NotResponding, producer.State);
            Assert.AreEqual(eventCount, consumedEvents.Count);

            //start the second producer
            producer2.Start();

            await Task.Delay(1000);

            //the directory shoud have register the new producer, the consumer should have subscribe to the new consumer
            stateOfTheWorld = await directory.GetStateOfTheWorld();

            Assert.AreEqual(2, stateOfTheWorld.Count());
            Assert.Greater(consumedEvents.Count, currentEventCount);

            cancel.Cancel();

            producer2.Stop();
            await host.StopAsync();

            consumer.Stop();
        }
        static void Main(string[] args)
        {
            // The Kafka endpoint address
            string kafkaEndpoint = ConfigurationManager.AppSettings["broaker"].ToString(); 
            //"wn0-corpka.f3l4t1p4pmae3jfkg24ryvt0xa.px.internal.cloudapp.net:9092,wn1-corpka.f3l4t1p4pmae3jfkg24ryvt0xa.px.internal.cloudapp.net:9092";

            // The Kafka topic we'll be using
            string kafkaTopic = ConfigurationManager.AppSettings["kafkatopic"].ToString();
            //"iottopic";


            // Create the consumer configuration
            var consumerConfig = new Dictionary<string, object>
            {
                { "group.id", "myconsumer" },
                { "bootstrap.servers", kafkaEndpoint },
                {"auto.offset.reset" ,"earliest" }
            };

            // Create the consumerdetails
            using (var consumer = new Consumer<Null, string>(consumerConfig, null, new StringDeserializer(Encoding.UTF8)))
            {
                // Subscribe to the OnMessage event
                consumer.OnMessage += (obj, msg) =>
                {
                    Console.WriteLine($"Received: {msg.Value}");
                    try
                    {
                        using (var client = new WebClient())
                        {
                            if (!string.IsNullOrEmpty(msg.Value))
                            {
                                string message =msg.Value.Replace("Event ", "");
                                string url= ConfigurationManager.AppSettings["uiurl"].ToString();
                                client.DownloadString($"{url}/{message}");
                            }
                        }
                    }
                    catch (Exception ex)
                    {

                        Console.WriteLine($"Error: {ex.Message}");
                    }
                };
                // Subscribe to the Kafka topic
                consumer.Subscribe(new List<string>() { kafkaTopic });
                // Handle Cancel Keypress 
                var cancelled = false;
                Console.CancelKeyPress += (_, e) =>
                {
                    e.Cancel = true; // prevent the process from terminating.
                    cancelled = true;
                };
                Console.WriteLine("Ctrl-C to exit.");
                // Poll for messages
                while (!cancelled)
                {
                    consumer.Poll(TimeSpan.FromMinutes(1));
                }
            }
        }
Example #39
0
        protected internal static CodegenMethod CreateSortPropertiesCodegen(
            OrderByProcessorForgeImpl forge,
            CodegenClassScope classScope,
            CodegenNamedMethods namedMethods)
        {
            Consumer<CodegenMethod> code = method => {
                string[] expressions = null;
                bool[] descending = null;
                if (classScope.IsInstrumented) {
                    expressions = forge.ExpressionTexts;
                    descending = forge.DescendingFlags;
                }

                method.Block.DeclareVar<object[]>(
                    "sortProperties",
                    NewArrayByLength(typeof(object), ArrayLength(REF_GENERATINGEVENTS)));

                var elements = forge.OrderBy;
                var forEach = method.Block.DeclareVar<int>("count", Constant(0))
                    .ForEach(typeof(EventBean[]), "eventsPerStream", REF_GENERATINGEVENTS);

                if (forge.IsNeedsGroupByKeys) {
                    forEach.ExprDotMethod(
                        MEMBER_AGGREGATIONSVC,
                        "SetCurrentAccess",
                        ArrayAtIndex(Ref("groupByKeys"), Ref("count")),
                        ExprDotName(REF_EXPREVALCONTEXT, "AgentInstanceId"),
                        ConstantNull());
                }

                forEach.Apply(
                    Instblock(
                        classScope,
                        "qOrderBy",
                        Ref("eventsPerStream"),
                        Constant(expressions),
                        Constant(descending)));
                if (elements.Length == 1) {
                    forEach.AssignArrayElement(
                        "sortProperties",
                        Ref("count"),
                        LocalMethod(
                            CodegenLegoMethodExpression.CodegenExpression(
                                elements[0].ExprNode.Forge,
                                method,
                                classScope,
                                true),
                            Ref("eventsPerStream"),
                            REF_ISNEWDATA,
                            REF_EXPREVALCONTEXT));
                }
                else {
                    forEach.DeclareVar<object[]>(
                        "values",
                        NewArrayByLength(typeof(object), Constant(forge.OrderBy.Length)));
                    for (var i = 0; i < forge.OrderBy.Length; i++) {
                        forEach.AssignArrayElement(
                            "values",
                            Constant(i),
                            LocalMethod(
                                CodegenLegoMethodExpression.CodegenExpression(
                                    elements[i].ExprNode.Forge,
                                    method,
                                    classScope,
                                    true),
                                Ref("eventsPerStream"),
                                REF_ISNEWDATA,
                                REF_EXPREVALCONTEXT));
                    }

                    forEach.AssignArrayElement(
                        "sortProperties",
                        Ref("count"),
                        NewInstance<HashableMultiKey>(Ref("values")));
                }

                forEach.Apply(Instblock(classScope, "aOrderBy", Ref("sortProperties")))
                    .IncrementRef("count");
                method.Block.MethodReturn(StaticMethod(typeof(CompatExtensions), "AsList", Ref("sortProperties")));
            };
            return namedMethods.AddMethod(
                typeof(IList<object>),
                "CreateSortProperties",
                CodegenNamedParam.From(
                    typeof(EventBean[][]), REF_GENERATINGEVENTS.Ref,
                    typeof(object[]), "groupByKeys",
                    typeof(bool), REF_ISNEWDATA.Ref,
                    typeof(ExprEvaluatorContext), REF_EXPREVALCONTEXT.Ref,
                    typeof(AggregationService), MEMBER_AGGREGATIONSVC.Ref),
                typeof(OrderByProcessorImpl),
                classScope,
                code);
        }
Example #40
0
        public static CodegenMethod DetermineLocalMinMaxCodegen(
            OrderByProcessorForgeImpl forge,
            CodegenClassScope classScope,
            CodegenNamedMethods namedMethods)
        {
            var elements = forge.OrderBy;
            CodegenExpression comparator = classScope.AddOrGetDefaultFieldSharable(forge.IComparer);

            Consumer<CodegenMethod> code = method => {
                method.Block.DeclareVar<object>("localMinMax", ConstantNull())
                    .DeclareVar<EventBean>("outgoingMinMaxBean", ConstantNull())
                    .DeclareVar<int>("count", Constant(0));

                if (elements.Length == 1) {
                    var forEach = method.Block.ForEach(typeof(EventBean[]), "eventsPerStream", REF_GENERATINGEVENTS);

                    forEach.DeclareVar<object>(
                            "sortKey",
                            LocalMethod(
                                CodegenLegoMethodExpression.CodegenExpression(
                                    elements[0].ExprNode.Forge,
                                    method,
                                    classScope,
                                    true),
                                Ref("eventsPerStream"),
                                REF_ISNEWDATA,
                                REF_EXPREVALCONTEXT))
                        .IfCondition(
                            Or(
                                EqualsNull(Ref("localMinMax")),
                                Relational(
                                    ExprDotMethod(comparator, "Compare", Ref("localMinMax"), Ref("sortKey")),
                                    GT,
                                    Constant(0))))
                        .AssignRef("localMinMax", Ref("sortKey"))
                        .AssignRef("outgoingMinMaxBean", ArrayAtIndex(REF_OUTGOINGEVENTS, Ref("count")))
                        .BlockEnd()
                        .IncrementRef("count");
                }
                else {
                    method.Block.DeclareVar<object[]>(
                            "values",
                            NewArrayByLength(typeof(object), Constant(elements.Length)))
                        .DeclareVar<HashableMultiKey>(
                            "valuesMk",
                            NewInstance<HashableMultiKey>(Ref("values")));

                    var forEach = method.Block.ForEach(typeof(EventBean[]), "eventsPerStream", REF_GENERATINGEVENTS);

                    if (forge.IsNeedsGroupByKeys) {
                        forEach.ExprDotMethod(
                            MEMBER_AGGREGATIONSVC,
                            "SetCurrentAccess",
                            ArrayAtIndex(Ref("groupByKeys"), Ref("count")),
                            ExprDotMethod(REF_EXPREVALCONTEXT, "GetAgentInstanceId", ConstantNull()));
                    }

                    for (var i = 0; i < elements.Length; i++) {
                        forEach.AssignArrayElement(
                            "values",
                            Constant(i),
                            LocalMethod(
                                CodegenLegoMethodExpression.CodegenExpression(
                                    elements[i].ExprNode.Forge,
                                    method,
                                    classScope,
                                    true),
                                Ref("eventsPerStream"),
                                REF_ISNEWDATA,
                                REF_EXPREVALCONTEXT));
                    }

                    forEach.IfCondition(
                            Or(
                                EqualsNull(Ref("localMinMax")),
                                Relational(
                                    ExprDotMethod(comparator, "Compare", Ref("localMinMax"), Ref("valuesMk")),
                                    GT,
                                    Constant(0))))
                        .AssignRef("localMinMax", Ref("valuesMk"))
                        .AssignRef("values", NewArrayByLength(typeof(object), Constant(elements.Length)))
                        .AssignRef("valuesMk", NewInstance<HashableMultiKey>(Ref("values")))
                        .AssignRef("outgoingMinMaxBean", ArrayAtIndex(REF_OUTGOINGEVENTS, Ref("count")))
                        .BlockEnd()
                        .IncrementRef("count");
                }

                method.Block.MethodReturn(Ref("outgoingMinMaxBean"));
            };

            return namedMethods.AddMethod(
                typeof(EventBean),
                "DetermineLocalMinMax",
                CodegenNamedParam.From(
                    typeof(EventBean[]), REF_OUTGOINGEVENTS.Ref,
                    typeof(EventBean[][]), REF_GENERATINGEVENTS.Ref,
                    typeof(bool), NAME_ISNEWDATA,
                    typeof(ExprEvaluatorContext), NAME_EXPREVALCONTEXT,
                    typeof(AggregationService), MEMBER_AGGREGATIONSVC.Ref),
                typeof(OrderByProcessorImpl),
                classScope,
                code);
        }
Example #41
0
 public async Task DeleteAsync(Consumer consumer)
 {
     throw new NotImplementedException();
 }
 public OneToOneBlockingCollectionThroughputTest()
 {
     _queue        = new BlockingCollection <PerfEvent>(_bufferSize);
     _eventHandler = new AdditionEventHandler();
     _consumer     = new Consumer(_queue, _eventHandler);
 }
Example #43
0
            public async Task <ConsumerDTO> Handle(Command request,
                                                   CancellationToken cancellationToken)
            {
                if (await _context.Users.Where(x => x.Email == request.Email).AnyAsync())
                {
                    throw new RestException(HttpStatusCode.BadRequest, new { Email = "Email already in use." });
                }

                if (await _context.Users.Where(x => x.UserName == request.Username).AnyAsync())
                {
                    throw new RestException(HttpStatusCode.BadRequest, new { Username = "******" });
                }

                if (await _context.tblConsumers.Where(x => x.MeterId == request.MeterId).AnyAsync())
                {
                    throw new RestException(HttpStatusCode.BadRequest, new { MeterId = "Meter Id already in use." });
                }

                if (await _context.tblConsumers.Where(x => x.ConsumerId == request.ConsumerId).AnyAsync())
                {
                    throw new RestException(HttpStatusCode.BadRequest, new { ConsumerId = "Consumer Id already in use." });
                }

                // Creating Basic User

                var user = new AppUser
                {
                    DisplayName = request.DisplayName,
                    Email       = request.Email,
                    UserName    = request.Username
                };

                var UserCreationResult = await _userManager.CreateAsync(user, request.Password);

                var AssignRoleToUser = await _userManager.AddToRoleAsync(user, "Consumer");

                if (UserCreationResult.Succeeded && AssignRoleToUser.Succeeded)
                {
                    //Adding Consumer Information to User

                    var consumer = new Consumer()
                    {
                        ConsumerId     = request.ConsumerId,
                        NationalId     = request.NationalId,
                        MeterCapacity  = request.MeterCapacity,
                        MeterId        = request.MeterId,
                        TypeOfConsumer = request.TypeOfConsumer,
                        AppIdFK        = await _userManager.GetUserIdAsync(user)
                    };

                    await _context.tblConsumers.AddAsync(consumer);
                }

                var ConsumerCreationResult = await _context.SaveChangesAsync() > 0;

                if (ConsumerCreationResult)
                {
                    try
                    {
                        return(new ConsumerDTO
                        {
                            DisplayName = user.DisplayName,
                            Token = _jwtGenerator.CreateToken(user),
                            Username = user.UserName,
                            Image = null,
                            ConsumerId = user.Consumer.ConsumerId,
                            MeterCapacity = user.Consumer.MeterCapacity,
                            MeterId = user.Consumer.MeterId,
                            TypeOfConsumer = user.Consumer.TypeOfConsumer,
                            Roles = await _userManager.GetRolesAsync(user)
                        });
                    }
                    catch (Exception exp)
                    {
                        throw new RestException(HttpStatusCode.BadRequest, "Problem Returning ConsumerDTO");
                    }
                }

                throw new RestException(HttpStatusCode.InternalServerError, "Problem Creating Consumer");
            }
Example #44
0
//JAVA TO C# CONVERTER TODO TASK: Most Java annotations will not have direct .NET equivalent attributes:
//ORIGINAL LINE: @Override public void bulk(@Nonnull Consumer<Log> consumer)
        public override void Bulk(Consumer <Log> consumer)
        {
            consumer.accept(this);
        }
        public async Task TestProducerConsumer()
        {
            var cancel = new CancellationTokenSource();

            var directoryEndpoint          = "http://localhost:8080";
            var producer1Endpoint          = "tcp://localhost:8181";
            var producer1HeartbeatEndpoint = "tcp://localhost:8282";

            IWebHost host = null;

            new Task(() =>
            {
                host = new WebHostBuilder()
                       .UseKestrel()
                       .UseUrls(directoryEndpoint)
                       .UseStartup <DirectoryStartup>()
                       .Build();

                host.Run();
            }, cancel.Token).Start();


            await Task.Delay(500);

            var directory = RestService.For <IDirectory>(directoryEndpoint);

            var configurationProducer1 = new ProducerConfiguration()
            {
                IsTest           = true,
                Endpoint         = producer1Endpoint,
                HeartbeatEnpoint = producer1HeartbeatEndpoint,
                Id = Guid.NewGuid()
            };

            var producer1 = new AccidentProducer(configurationProducer1, directory, new JsonSerializerSettings());

            producer1.Start();

            await Task.Delay(500);

            var configurationConsumer1 = new ConsumerConfiguration <AccidentEvent>()
            {
                Topic = "Paris.Business",
                Id    = Guid.NewGuid()
            };

            var consumedEvents = new List <AccidentEvent>();

            var consumer = new Consumer <AccidentEvent>(configurationConsumer1, directory, new JsonSerializerSettings());

            consumer.GetSubscription()
            .Subscribe(ev =>
            {
                consumedEvents.Add(ev);
            });


            consumer.Start();

            await Task.Delay(500);

            cancel.Cancel();

            await host.StopAsync();

            producer1.Stop();
            consumer.Stop();

            Assert.IsTrue(consumedEvents.Count > 0);
        }
Example #46
0
 public void Cleanup()
 {
     Consumer.Dispose();
 }
Example #47
0
    static void Main(string[] args)
    {
        var consumer = new Consumer(() => new EFEntityRepository());

        consumer.OutputData();
    }
Example #48
0
 public void TraverseExpressions(Consumer<CodegenExpression> consumer)
 {
 }
 public UserRemoteDbManager(Consumer consumer, MySqlConnection connection, SQLiteConnection localConn) : base(consumer, connection, localConn)
 {
 }
Example #50
0
        public static void Timestamps(string bootstrapServers, string singlePartitionTopic, string partitionedTopic)
        {
            LogToFile("start Timestamps");

            var producerConfig = new ProducerConfig
            {
                BootstrapServers = bootstrapServers
            };

            var consumerConfig = new ConsumerConfig
            {
                GroupId          = Guid.NewGuid().ToString(),
                BootstrapServers = bootstrapServers,
                SessionTimeoutMs = 6000
            };

            var drs_beginProduce = new List <DeliveryReport <Null, string> >();
            var drs_task         = new List <DeliveryResult <Null, string> >();

            using (var producer = new Producer <Null, string>(producerConfig))
            {
                // --- ProduceAsync, serializer case.

                drs_task.Add(producer.ProduceAsync(
                                 singlePartitionTopic,
                                 new Message <Null, string> {
                    Value = "testvalue"
                }).Result);

                // TimestampType: CreateTime
                drs_task.Add(producer.ProduceAsync(
                                 new TopicPartition(singlePartitionTopic, 0),
                                 new Message <Null, string>
                {
                    Value     = "test-value",
                    Timestamp = new Timestamp(new DateTime(2008, 11, 12, 0, 0, 0, DateTimeKind.Utc))
                }).Result);

                // TimestampType: CreateTime (default)
                drs_task.Add(producer.ProduceAsync(
                                 new TopicPartition(singlePartitionTopic, 0),
                                 new Message <Null, string> {
                    Value = "test-value"
                }).Result);

                // TimestampType: LogAppendTime
                Assert.Throws <ArgumentException>(() =>
                                                  producer.ProduceAsync(
                                                      new TopicPartition(singlePartitionTopic, 0),
                                                      new Message <Null, string>
                {
                    Value     = "test-value",
                    Timestamp = new Timestamp(DateTime.Now, TimestampType.LogAppendTime)
                }).Result);

                // TimestampType: NotAvailable
                Assert.Throws <ArgumentException>(() =>
                                                  producer.ProduceAsync(
                                                      new TopicPartition(singlePartitionTopic, 0),
                                                      new Message <Null, string>
                {
                    Value     = "test-value",
                    Timestamp = new Timestamp(10, TimestampType.NotAvailable)
                }).Result);

                Action <DeliveryReport <Null, string> > dh
                    = (DeliveryReport <Null, string> dr) => drs_beginProduce.Add(dr);


                // --- begin produce, serializer case.

                producer.BeginProduce(
                    singlePartitionTopic,
                    new Message <Null, string> {
                    Value = "testvalue"
                }, dh);

                // TimestampType: CreateTime
                producer.BeginProduce(
                    new TopicPartition(singlePartitionTopic, 0),
                    new Message <Null, string>
                {
                    Value     = "test-value",
                    Timestamp = new Timestamp(new DateTime(2008, 11, 12, 0, 0, 0, DateTimeKind.Utc))
                },
                    dh);

                // TimestampType: CreateTime (default)
                producer.BeginProduce(
                    new TopicPartition(singlePartitionTopic, 0),
                    new Message <Null, string> {
                    Value = "test-value"
                },
                    dh);

                // TimestampType: LogAppendTime
                Assert.Throws <ArgumentException>(() => producer.BeginProduce(
                                                      new TopicPartition(singlePartitionTopic, 0),
                                                      new Message <Null, string>
                {
                    Value     = "test-value",
                    Timestamp = new Timestamp(DateTime.Now, TimestampType.LogAppendTime)
                },
                                                      dh));

                // TimestampType: NotAvailable
                Assert.Throws <ArgumentException>(() => producer.BeginProduce(
                                                      new TopicPartition(singlePartitionTopic, 0),
                                                      new Message <Null, string>
                {
                    Value     = "test-value",
                    Timestamp = new Timestamp(10, TimestampType.NotAvailable)
                },
                                                      dh));

                Assert.Equal(0, producer.Flush(TimeSpan.FromSeconds(10)));
            }

            var drs2_beginProduce = new List <DeliveryReport>();
            var drs2_task         = new List <DeliveryResult>();

            using (var producer = new Producer(producerConfig))
            {
                // --- ProduceAsync, byte[] case.

                drs2_task.Add(producer.ProduceAsync(
                                  singlePartitionTopic,
                                  new Message {
                    Timestamp = Timestamp.Default
                }).Result);

                // TimestampType: CreateTime
                drs2_task.Add(producer.ProduceAsync(
                                  singlePartitionTopic,
                                  new Message {
                    Timestamp = new Timestamp(new DateTime(2008, 11, 12, 0, 0, 0, DateTimeKind.Utc))
                }).Result);

                // TimestampType: CreateTime (default)
                drs2_task.Add(producer.ProduceAsync(
                                  singlePartitionTopic,
                                  new Message {
                    Timestamp = Timestamp.Default
                }).Result);

                // TimestampType: LogAppendTime
                Assert.Throws <ArgumentException>(() =>
                                                  producer.ProduceAsync(
                                                      singlePartitionTopic,
                                                      new Message {
                    Timestamp = new Timestamp(DateTime.Now, TimestampType.LogAppendTime)
                }).Result);

                // TimestampType: NotAvailable
                Assert.Throws <ArgumentException>(() =>
                                                  producer.ProduceAsync(
                                                      singlePartitionTopic,
                                                      new Message {
                    Timestamp = new Timestamp(10, TimestampType.NotAvailable)
                }).Result);


                // --- begin produce, byte[] case.

                Action <DeliveryReport> dh = (DeliveryReport dr) => drs2_beginProduce.Add(dr);

                producer.BeginProduce(
                    singlePartitionTopic, new Message {
                    Timestamp = Timestamp.Default
                }, dh);

                // TimestampType: CreateTime
                producer.BeginProduce(
                    singlePartitionTopic,
                    new Message {
                    Timestamp = new Timestamp(new DateTime(2008, 11, 12, 0, 0, 0, DateTimeKind.Utc))
                },
                    dh);

                // TimestampType: CreateTime (default)
                producer.BeginProduce(
                    singlePartitionTopic,
                    new Message {
                    Timestamp = Timestamp.Default
                }, dh);

                // TimestampType: LogAppendTime
                Assert.Throws <ArgumentException>(() =>
                                                  producer.BeginProduce(
                                                      singlePartitionTopic,
                                                      new Message {
                    Timestamp = new Timestamp(DateTime.Now, TimestampType.LogAppendTime)
                }, dh));

                // TimestampType: NotAvailable
                Assert.Throws <ArgumentException>(() =>
                                                  producer.BeginProduce(singlePartitionTopic,
                                                                        new Message {
                    Timestamp = new Timestamp(10, TimestampType.NotAvailable)
                }, dh));

                Assert.Equal(0, producer.Flush(TimeSpan.FromSeconds(10)));
            }

            using (var consumer = new Consumer <Null, string>(consumerConfig))
            {
                // serializing async

                assertCloseToNow(consumer, drs_task[0].TopicPartitionOffset);

                consumer.Assign(new List <TopicPartitionOffset>()
                {
                    drs_task[1].TopicPartitionOffset
                });
                var record = consumer.Consume(TimeSpan.FromSeconds(10));
                Assert.NotNull(record.Message);
                Assert.Equal(TimestampType.CreateTime, record.Message.Timestamp.Type);
                Assert.Equal(record.Message.Timestamp, new Timestamp(new DateTime(2008, 11, 12, 0, 0, 0, DateTimeKind.Utc)));

                assertCloseToNow(consumer, drs_task[2].TopicPartitionOffset);

                // serializing deliveryhandler

                assertCloseToNow(consumer, drs_beginProduce[0].TopicPartitionOffset);

                consumer.Assign(new List <TopicPartitionOffset>()
                {
                    drs_beginProduce[1].TopicPartitionOffset
                });
                record = consumer.Consume(TimeSpan.FromSeconds(10));
                Assert.NotNull(record.Message);
                Assert.Equal(TimestampType.CreateTime, record.Message.Timestamp.Type);
                Assert.Equal(record.Message.Timestamp, new Timestamp(new DateTime(2008, 11, 12, 0, 0, 0, DateTimeKind.Utc)));

                assertCloseToNow(consumer, drs_beginProduce[2].TopicPartitionOffset);
            }

            using (var consumer = new Consumer(consumerConfig))
            {
                ConsumeResult record;

                // non-serializing async

                assertCloseToNow_byte(consumer, drs2_task[0].TopicPartitionOffset);

                consumer.Assign(new List <TopicPartitionOffset>()
                {
                    drs2_task[1].TopicPartitionOffset
                });
                record = consumer.Consume(TimeSpan.FromSeconds(10));
                Assert.NotNull(record.Message);
                Assert.Equal(TimestampType.CreateTime, record.Message.Timestamp.Type);
                Assert.Equal(record.Message.Timestamp, new Timestamp(new DateTime(2008, 11, 12, 0, 0, 0, DateTimeKind.Utc)));

                assertCloseToNow_byte(consumer, drs2_task[2].TopicPartitionOffset);

                // non-serializing deliveryhandler

                assertCloseToNow_byte(consumer, drs2_beginProduce[0].TopicPartitionOffset);

                consumer.Assign(new List <TopicPartitionOffset>()
                {
                    drs2_beginProduce[1].TopicPartitionOffset
                });
                record = consumer.Consume(TimeSpan.FromSeconds(10));
                Assert.NotNull(record.Message);
                Assert.Equal(TimestampType.CreateTime, record.Message.Timestamp.Type);
                Assert.Equal(record.Message.Timestamp, new Timestamp(new DateTime(2008, 11, 12, 0, 0, 0, DateTimeKind.Utc)));

                assertCloseToNow_byte(consumer, drs2_beginProduce[2].TopicPartitionOffset);
            }

            Assert.Equal(0, Library.HandleCount);
            LogToFile("end   Timestamps");
        }
Example #51
0
 public abstract void TraverseExpressions(
     Consumer<CodegenExpression> consumer);
Example #52
0
        public void SimpleSyncProducerSends2MessagesAndConsumerConnectorGetsThemBack()
        {
            var prodConfig     = this.SyncProducerConfig1;
            var consumerConfig = this.ZooKeeperBasedConsumerConfig;
            var consConf       = this.ConsumerConfig1;

            // first producing
            string payload1 = "kafka 1.";

            byte[] payloadData1 = Encoding.UTF8.GetBytes(payload1);
            var    msg1         = new Message(payloadData1);

            string payload2 = "kafka 2.";

            byte[] payloadData2 = Encoding.UTF8.GetBytes(payload2);
            var    msg2         = new Message(payloadData2);

            var producerRequest = new ProducerRequest(CurrentTestTopic, 0, new List <Message> {
                msg1, msg2
            });

            using (var producer = new SyncProducer(prodConfig))
            {
                producer.Send(producerRequest);
            }

            var  consumer = new Consumer(consConf);
            long offset   = 0;
            var  result   = consumer.Fetch(
                new FetchRequest(CurrentTestTopic, 0, offset, 400));

            foreach (var resultItem in result)
            {
                offset += resultItem.Offset;
            }

            // now consuming
            var resultMessages = new List <Message>();

            using (IConsumerConnector consumerConnector = new ZookeeperConsumerConnector(consumerConfig, true))
            {
                var topicCount = new Dictionary <string, int> {
                    { CurrentTestTopic, 1 }
                };
                var messages = consumerConnector.CreateMessageStreams(topicCount);
                var sets     = messages[CurrentTestTopic];
                try
                {
                    foreach (var set in sets)
                    {
                        foreach (var message in set)
                        {
                            resultMessages.Add(message);
                        }
                    }
                }
                catch (ConsumerTimeoutException)
                {
                    // do nothing, this is expected
                }
            }

            Assert.AreEqual(2, resultMessages.Count);
            Assert.AreEqual(msg1.ToString(), resultMessages[0].ToString());
            Assert.AreEqual(msg2.ToString(), resultMessages[1].ToString());
        }
Example #53
0
        private void Consume(CancellationToken token, BlockingCollection <MessageProxy <RowChange> > accumulatedChanges, string topic, string table)
        {
            var conf = new Dictionary <string, object>
            {
                { "group.id", $"{table}-consumer-group" },
                { "statistics.interval.ms", 60000 },
                { "bootstrap.servers", _kafkaBootstrapServers }
            };

            foreach (var confPair in conf)
            {
                Console.WriteLine(topic + " - " + confPair.Key + ": " + confPair.Value);
            }

            using (var consumer = new Consumer <Null, string>(conf, null, new StringDeserializer(Encoding.UTF8)))
            {
                consumer.OnError += (_, msg)
                                    => Console.WriteLine($"{topic} - Error: {msg.Reason}");

                consumer.OnConsumeError += (_, msg)
                                           => Console.WriteLine($"{topic} - Consume error: {msg.Error.Reason}");

                consumer.OnPartitionsAssigned += (_, partitions) =>
                {
                    Console.WriteLine($"{topic} - Assigned partitions: [{string.Join(", ", partitions)}], member id: {consumer.MemberId}");
                    consumer.Assign(partitions);
                };

                consumer.OnPartitionsRevoked += (_, partitions) =>
                {
                    Console.WriteLine($"{topic} - Revoked partitions: [{string.Join(", ", partitions)}]");
                    consumer.Unassign();
                };

                Console.WriteLine($"Subscribing to topic {topic}");
                consumer.Subscribe(topic);
                int secondsWithoutMessage = 0;

                while (!token.IsCancellationRequested)
                {
                    Message <Null, string> msg = null;
                    if (consumer.Consume(out msg, TimeSpan.FromSeconds(1)))
                    {
                        AddToBuffer(consumer, msg, accumulatedChanges);
                        secondsWithoutMessage = 0;
                    }
                    else
                    {
                        secondsWithoutMessage++;
                        if (secondsWithoutMessage % 30 == 0)
                        {
                            Console.WriteLine($"{topic}: No messages in last {secondsWithoutMessage} seconds");
                        }

                        Task.Delay(100).Wait();
                    }
                }
            }

            accumulatedChanges.CompleteAdding(); // notifies consumers that no more messages will come
        }
Example #54
0
 public void TraverseExpressions(Consumer<CodegenExpression> consumer) {
     foreach (var statement in _statements) {
         statement.TraverseExpressions(consumer);
     }
 }
Example #55
0
        /// <summary>
        ///     In this example
        ///         - offsets are manually committed.
        ///         - no extra thread is created for the Poll (Consume) loop.
        /// </summary>
        public static void Run_Consume(string brokerList, List <string> topics, CancellationToken cancellationToken)
        {
            var config = new ConsumerConfig
            {
                BootstrapServers     = brokerList,
                GroupId              = "csharp-consumer",
                EnableAutoCommit     = false,
                StatisticsIntervalMs = 5000,
                SessionTimeoutMs     = 6000,
                AutoOffsetReset      = AutoOffsetResetType.Earliest,
                EnablePartitionEof   = true
            };

            const int commitPeriod = 5;

            using (var consumer = new Consumer <Ignore, string>(config))
            {
                // Note: All event handlers are called on the main .Consume thread.

                // Raised when the consumer has been notified of a new assignment set.
                // You can use this event to perform actions such as retrieving offsets
                // from an external source / manually setting start offsets using
                // the Assign method. You can even call Assign with a different set of
                // partitions than those in the assignment. If you do not call Assign
                // in a handler of this event, the consumer will be automatically
                // assigned to the partitions of the assignment set and consumption
                // will start from last committed offsets or in accordance with
                // the auto.offset.reset configuration parameter for partitions where
                // there is no committed offset.
                consumer.OnPartitionsAssigned += (_, partitions)
                                                 => Console.WriteLine($"Assigned partitions: [{string.Join(", ", partitions)}], member id: {consumer.MemberId}");

                // Raised when the consumer's current assignment set has been revoked.
                consumer.OnPartitionsRevoked += (_, partitions)
                                                => Console.WriteLine($"Revoked partitions: [{string.Join(", ", partitions)}]");

                consumer.OnError += (_, e)
                                    => Console.WriteLine($"Error: {e.Reason}");

                consumer.OnStatistics += (_, json)
                                         => Console.WriteLine($"Statistics: {json}");

                consumer.Subscribe(topics);

                while (!cancellationToken.IsCancellationRequested)
                {
                    try
                    {
                        var consumeResult = consumer.Consume(cancellationToken);

                        if (consumeResult.IsPartitionEOF)
                        {
                            Console.WriteLine(
                                $"Reached end of topic {consumeResult.Topic}, partition {consumeResult.Partition}.");

                            continue;
                        }

                        Console.WriteLine($"Received message at {consumeResult.TopicPartitionOffset}: {consumeResult.Value}");

                        if (consumeResult.Offset % commitPeriod == 0)
                        {
                            // The Commit method sends a "commit offsets" request to the Kafka
                            // cluster and synchronously waits for the response. This is very
                            // slow compared to the rate at which the consumer is capable of
                            // consuming messages. A high performance application will typically
                            // commit offsets relatively infrequently and be designed handle
                            // duplicate messages in the event of failure.
                            var committedOffsets = consumer.Commit(consumeResult);
                            Console.WriteLine($"Committed offset: {committedOffsets}");
                        }
                    }
                    catch (ConsumeException e)
                    {
                        Console.WriteLine($"Consume error: {e.Error}");
                    }
                }

                consumer.Close();
            }
        }
Example #56
0
 public GuestHandler(GuestVM guestVm)
 {
     _guestVm  = guestVm;
     _consumer = _guestVm.ConsumerGuest;
 }
        public static async Task Consumer_OffsetsForTimes(string bootstrapServers, string topic, string partitionedTopic)
        {
            const int N         = 10;
            const int Partition = 0;

            var messages = await ProduceMessages(bootstrapServers, topic, Partition, N);

            var consumerConfig = new Dictionary <string, object>
            {
                { "group.id", Guid.NewGuid().ToString() },
                { "bootstrap.servers", bootstrapServers },
                { "api.version.request", true }
            };

            var firstMessage = messages[0];
            var lastMessage  = messages[N - 1];

            using (var consumer = new Consumer <string, string>(consumerConfig, new StringDeserializer(Encoding.UTF8), new StringDeserializer(Encoding.UTF8)))
            {
                // NOTE: When calling OffsetsForTimes a proper timeout for must be set.
                // If it will be too short, we'll get an exception here or incorrect result.
                // See librdkafka implementation for details https://github.com/edenhill/librdkafka/blob/master/src/rdkafka.c#L2475
                var timeout = TimeSpan.FromSeconds(10);

                // Getting the offset for the first produced message timestamp
                var result = consumer.OffsetsForTimes(
                    new[] { new TopicPartitionTimestamp(firstMessage.TopicPartition, firstMessage.Timestamp) },
                    timeout)
                             .ToList();

                Assert.Equal(result.Count, 1);
                Assert.Equal(result[0].Offset, firstMessage.Offset);
                Assert.False(result[0].Error.HasError);

                // Getting the offset for the last produced message timestamp
                result = consumer.OffsetsForTimes(
                    new[] { new TopicPartitionTimestamp(lastMessage.TopicPartition, lastMessage.Timestamp) },
                    timeout)
                         .ToList();

                Assert.Equal(result.Count, 1);
                Assert.Equal(result[0].Offset, lastMessage.Offset);
                Assert.False(result[0].Error.HasError);

                // Getting the offset for the timestamp that very far in the past
                var unixTimeEpoch = Timestamp.UnixTimeEpoch;
                result = consumer.OffsetsForTimes(
                    new[] { new TopicPartitionTimestamp(new TopicPartition(topic, Partition), new Timestamp(unixTimeEpoch, TimestampType.CreateTime)) },
                    timeout)
                         .ToList();

                Assert.Equal(result.Count, 1);
                Assert.Equal(result[0].Offset, 0);
                Assert.False(result[0].Error.HasError);

                // Getting the offset for the timestamp that very far in the future
                result = consumer.OffsetsForTimes(
                    new[] { new TopicPartitionTimestamp(new TopicPartition(topic, Partition), new Timestamp(int.MaxValue, TimestampType.CreateTime)) },
                    timeout)
                         .ToList();

                Assert.Equal(result.Count, 1);
                Assert.Equal(result[0].Offset, 0);
                Assert.False(result[0].Error.HasError);
            }
        }
        public static void WatermarkOffsets(string bootstrapServers, string topic, string partitionedTopic)
        {
            var producerConfig = new Dictionary <string, object>
            {
                { "bootstrap.servers", bootstrapServers }
            };

            var testString = "hello world";

            Message <Null, string> dr;

            using (var producer = new Producer <Null, string>(producerConfig, null, new StringSerializer(Encoding.UTF8)))
            {
                dr = producer.ProduceAsync(topic, null, testString).Result;
                producer.Flush();

                var getOffsets = producer.GetWatermarkOffsets(new TopicPartition(topic, 0));

                // statistics.interval.ms is not set, so this should always be invalid.
                Assert.Equal(getOffsets.Low, Offset.Invalid);

                // no message has been consumed from broker (this is a producer), so this should always be invalid.
                Assert.Equal(getOffsets.High, Offset.Invalid);

                var queryOffsets = producer.QueryWatermarkOffsets(new TopicPartition(topic, 0));
                Assert.NotEqual(queryOffsets.Low, Offset.Invalid);
                Assert.NotEqual(queryOffsets.High, Offset.Invalid);

                // TODO: can anything be said about the high watermark offset c.f. dr.Offset?
                //       I have seen queryOffsets.High < dr.Offset and also queryOffsets.High = dr.Offset + 1.
                //       The former only once (or was I in error?). request.required.acks has a default value
                //       of 1, so with only one broker, I assume the former should never happen.
                Console.WriteLine($"Query Offsets: [{queryOffsets.Low} {queryOffsets.High}]. DR Offset: {dr.Offset}");
                Assert.True(queryOffsets.Low < queryOffsets.High);
            }

            var consumerConfig = new Dictionary <string, object>
            {
                { "group.id", "watermark-offset-cg" },
                { "bootstrap.servers", bootstrapServers },
                { "session.timeout.ms", 6000 }
            };

            using (var consumer = new Consumer(consumerConfig))
            {
                consumer.Assign(new List <TopicPartitionOffset>()
                {
                    dr.TopicPartitionOffset
                });
                Message msg;
                Assert.True(consumer.Consume(out msg, TimeSpan.FromSeconds(10)));

                var getOffsets = consumer.GetWatermarkOffsets(dr.TopicPartition);
                Assert.Equal(getOffsets.Low, Offset.Invalid);
                // the offset of the next message to be read.
                Assert.Equal((long)getOffsets.High, dr.Offset + 1);

                var queryOffsets = consumer.QueryWatermarkOffsets(dr.TopicPartition);
                Assert.NotEqual(queryOffsets.Low, Offset.Invalid);
                Assert.Equal(getOffsets.High, queryOffsets.High);
            }
        }
Example #59
0
        /// <summary>
        ///  Get offset
        /// </summary>
        public void RefreshAndGetOffset(short versionId, string clientId, int correlationId, string topic, int partitionId, bool forceRefreshOffsetCache, out long earliestOffset, out long latestOffset)
        {
            earliestOffset = -1;
            latestOffset   = -1;
            if (!forceRefreshOffsetCache && this.TopicOffsetEarliest.ContainsKey(topic) && this.TopicOffsetEarliest[topic].ContainsKey(partitionId))
            {
                earliestOffset = this.TopicOffsetEarliest[topic][partitionId];
            }
            if (!forceRefreshOffsetCache && this.TopicOffsetLatest.ContainsKey(topic) && this.TopicOffsetLatest[topic].ContainsKey(partitionId))
            {
                latestOffset = this.TopicOffsetLatest[topic][partitionId];
            }
            if (!forceRefreshOffsetCache && earliestOffset != -1 && latestOffset != -1)
            {
                return;
            }
            //Get
            using (Consumer consumer = this.GetConsumer(topic, partitionId))
            {
                Dictionary <string, List <PartitionOffsetRequestInfo> > offsetRequestInfoEarliest = new Dictionary <string, List <PartitionOffsetRequestInfo> >();
                List <PartitionOffsetRequestInfo> offsetRequestInfoForPartitionsEarliest          = new List <PartitionOffsetRequestInfo>();
                offsetRequestInfoForPartitionsEarliest.Add(new PartitionOffsetRequestInfo(partitionId, OffsetRequest.EarliestTime, 1));
                offsetRequestInfoEarliest.Add(topic, offsetRequestInfoForPartitionsEarliest);
                OffsetRequest offsetRequestEarliest = new OffsetRequest(offsetRequestInfoEarliest);
                //Earliest
                OffsetResponse offsetResponseEarliest = consumer.GetOffsetsBefore(offsetRequestEarliest);
                List <PartitionOffsetsResponse> partitionOffsetEaliest = null;
                if (offsetResponseEarliest.ResponseMap.TryGetValue(topic, out partitionOffsetEaliest))
                {
                    foreach (var p in partitionOffsetEaliest)
                    {
                        if (p.Error == ErrorMapping.NoError && p.PartitionId == partitionId)
                        {
                            earliestOffset = p.Offsets[0];
                            //Cache
                            if (!this.TopicOffsetEarliest.ContainsKey(topic))
                            {
                                this.TopicOffsetEarliest.TryAdd(topic, new ConcurrentDictionary <int, long>());
                            }
                            this.TopicOffsetEarliest[topic][partitionId] = earliestOffset;
                        }
                    }
                }

                //Latest
                Dictionary <string, List <PartitionOffsetRequestInfo> > offsetRequestInfoLatest = new Dictionary <string, List <PartitionOffsetRequestInfo> >();
                List <PartitionOffsetRequestInfo> offsetRequestInfoForPartitionsLatest          = new List <PartitionOffsetRequestInfo>();
                offsetRequestInfoForPartitionsLatest.Add(new PartitionOffsetRequestInfo(partitionId, OffsetRequest.LatestTime, 1));
                offsetRequestInfoLatest.Add(topic, offsetRequestInfoForPartitionsLatest);
                OffsetRequest offsetRequestLatest = new OffsetRequest(offsetRequestInfoLatest);

                OffsetResponse offsetResponseLatest = consumer.GetOffsetsBefore(offsetRequestLatest);
                List <PartitionOffsetsResponse> partitionOffsetLatest = null;
                if (offsetResponseLatest.ResponseMap.TryGetValue(topic, out partitionOffsetLatest))
                {
                    foreach (var p in partitionOffsetLatest)
                    {
                        if (p.Error == ErrorMapping.NoError && p.PartitionId == partitionId)
                        {
                            latestOffset = p.Offsets[0];
                            //Cache
                            if (!this.TopicOffsetLatest.ContainsKey(topic))
                            {
                                this.TopicOffsetLatest.TryAdd(topic, new ConcurrentDictionary <int, long>());
                            }
                            this.TopicOffsetLatest[topic][partitionId] = latestOffset;
                        }
                    }
                }
            }
        }
Example #60
0
 public CodegenBlock Apply(Consumer<CodegenBlock> consumer)
 {
     CheckClosed();
     consumer.Invoke(this);
     return this;
 }