public void Execute(AutoResetEvent block) { const int filesToCreate = 100; var filesProcessed = 0; // This setup will emit files on another thread var producesInts = new ObservingProducer <int>(); producesInts.Produces(Observable.Range(1, filesToCreate)).Attach(new FileConsumer <int>()); producesInts.Start(); // This setup will output the contents of loaded files to the console var fileProducer = new FileProducer <int>(); var logger = new ActionConsumer <int>(i => { filesProcessed++; Console.WriteLine(filesProcessed); if (filesProcessed >= filesToCreate) { block.Set(); } }); fileProducer.Attach(logger); fileProducer.Start(); }
public static void BulkCopyUsersWithConsumer(int trials, bool trace = false) { var users = ResetUsers(trials); var consumer = new BulkCopyConsumer <User>(100); var block = new ManualResetEvent(false); var producer = new ObservingProducer <User>().Produces(users, onCompleted: () => { while (consumer.Consumed < trials) { Thread.Sleep(100); } block.Set(); }); producer.Consumes(consumer); var sw = Stopwatch.StartNew(); producer.Start(); block.WaitOne(); var elapsed = sw.Elapsed; var count = AssertInsertCount(users.Count, elapsed, trace); if (trace) { Console.WriteLine("Inserting {0} records took {1}", count, elapsed); } }
public void Execute(AutoResetEvent block) { var producer = new ObservingProducer <int>(); producer.Produces(Observable.Range(1, 10000), onCompleted: () => block.Set()); producer.Attach(new ActionBatchingConsumer <int>(i => Console.WriteLine(i.Count), itemsPerBatch: 1000)); producer.Start(); }
public void Execute(AutoResetEvent block) { var producer = new ObservingProducer <int>(); producer.Produces(Observable.Range(1, 10000), onCompleted: () => block.Set()); producer.Attach(Console.WriteLine); producer.Start(); }
public void Execute(AutoResetEvent block) { var producer = new ObservingProducer<int>(); var consumer = new DelegatingConsumer<int>(i => Console.WriteLine(i)); producer.Attach(consumer); producer.Produces(Observable.Range(1, 10000), onCompleted: () => block.Set()); producer.Start(); }
public void Receives_payload_in_batches_by_size() { var block = new AutoResetEvent(false); var producer = new ObservingProducer <int>(); var consumer = new DelegatingBatchingConsumer <int>(ints => Assert.AreEqual(1000, ints.Count)); producer.Attach(consumer); producer.Produces(Observable.Range(1, 10000), onCompleted: () => block.Set()); producer.Start(); block.WaitOne(); }
public void Receives_payload_in_batches_by_size() { var block = new AutoResetEvent(false); var producer = new ObservingProducer<int>(); var consumer = new DelegatingBatchingConsumer<int>(ints => Assert.AreEqual(1000, ints.Count)); producer.Attach(consumer); producer.Produces(Observable.Range(1, 10000), onCompleted: () => block.Set()); producer.Start(); block.WaitOne(); }
public void Receives_payload_in_batches_by_size_or_interval_with_payload_smaller_than_size() { var block = new AutoResetEvent(false); var producer = new ObservingProducer <int>(); var consumer = new DelegatingBatchingConsumer <int>(ints => { Assert.AreEqual(500, ints.Count); block.Set(); }, 1000, TimeSpan.FromSeconds(3)); producer.Attach(consumer); producer.Produces(Observable.Range(1, 500)); producer.Start(); block.WaitOne(); }
public void Execute(AutoResetEvent block) { var producer = new ObservingProducer<int>(); var consumer = new DelegatingConsumer<int>(i => Console.WriteLine(i)); var serializer = new BinarySerializer(); var outbound = new ProtocolProducer<int>(serializer); // This is a producer of a data stream that consumes T events (serializer) var inbound = new ProtocolConsumer<int>(serializer); // This is a consumer of a data stream that produces T events (deserializer) outbound.Attach(inbound); // Typically you'd put an enqueing consumer here to shuttle serialized events off-network inbound.Attach(consumer); producer.Attach(outbound); producer.Produces(Observable.Range(1, 10000), onCompleted: () => block.Set()); producer.Start(); }
public void Receives_payload_in_batches_by_size_or_interval_with_payload_smaller_than_size() { var block = new AutoResetEvent(false); var producer = new ObservingProducer<int>(); var consumer = new DelegatingBatchingConsumer<int>(ints => { Assert.AreEqual(500, ints.Count); block.Set(); }, 1000, TimeSpan.FromSeconds(3)); producer.Attach(consumer); producer.Produces(Observable.Range(1, 500)); producer.Start(); block.WaitOne(); }
public void Receives_payload_in_batches_by_interval() { var block = new AutoResetEvent(false); var producer = new ObservingProducer<int>(); var consumer = new DelegatingBatchingConsumer<int>(ints => { Console.WriteLine("{0} in one second.", ints.Count); block.Set(); }, TimeSpan.FromSeconds(1)); producer.Attach(consumer); producer.Produces(Observable.Range(1, 1000000)); producer.Start(); block.WaitOne(); }
public void Receives_payload_in_batches_by_interval() { var block = new AutoResetEvent(false); var producer = new ObservingProducer <int>(); var consumer = new DelegatingBatchingConsumer <int>(ints => { Console.WriteLine("{0} in one second.", ints.Count); block.Set(); }, TimeSpan.FromSeconds(1)); producer.Attach(consumer); producer.Produces(Observable.Range(1, 1000000)); producer.Start(); block.WaitOne(); }
public void Messages_are_queued() { var client = new AmazonSQSClient(ConfigurationManager.AppSettings["AWSKey"], ConfigurationManager.AppSettings["AWSSecret"]); var queue = Guid.NewGuid().ToString(); var consumer = new SimpleQueueConsumer<StringEvent>(client, queue); var block = new AutoResetEvent(false); var producer = new ObservingProducer<StringEvent>(); producer.Attach(consumer); producer.Produces(GetPayload().ToObservable()); producer.Start(); block.WaitOne(TimeSpan.FromSeconds(5)); client.DeleteQueue(new DeleteQueueRequest().WithQueueUrl(consumer.QueueUrl)); }
public void Execute(AutoResetEvent block) { var producer = new ObservingProducer <int>(); var consumer = new DelegatingConsumer <int>(i => Console.WriteLine(i)); var serializer = new BinarySerializer(); var outbound = new ProtocolProducer <int>(serializer); // This is a producer of a data stream that consumes T events (serializer) var inbound = new ProtocolConsumer <int>(serializer); // This is a consumer of a data stream that produces T events (deserializer) outbound.Attach(inbound); // Typically you'd put an enqueing consumer here to shuttle serialized events off-network inbound.Attach(consumer); producer.Attach(outbound); producer.Produces(Observable.Range(1, 10000), onCompleted: () => block.Set()); producer.Start(); }
public void Messages_are_queued() { var block = new AutoResetEvent(false); var serializer = new ProtocolBuffersSerializer(); var socket = new ZmqConsumer <StringEvent>("tcp://127.0.0.1:5000", serializer); var consumer = new DelegatingConsumer <StringEvent>(Console.WriteLine, socket); // logs to console before forwarding var producer = new ObservingProducer <StringEvent>(); producer.Attach(consumer); producer.Produces(GetPayload(), onCompleted: () => block.Set()); producer.Start(); block.WaitOne(); socket.Dispose(); }
public void Messages_are_queued() { var block = new AutoResetEvent(false); var serializer = new ProtocolBuffersSerializer(); var socket = new ZmqConsumer<StringEvent>("tcp://127.0.0.1:5000", serializer); var consumer = new DelegatingConsumer<StringEvent>(Console.WriteLine, socket); // logs to console before forwarding var producer = new ObservingProducer<StringEvent>(); producer.Attach(consumer); producer.Produces(GetPayload(), onCompleted: () => block.Set()); producer.Start(); block.WaitOne(); socket.Dispose(); }
public void Messages_are_queued() { var client = new AmazonSQSClient(ConfigurationManager.AppSettings["AWSKey"], ConfigurationManager.AppSettings["AWSSecret"]); var queue = Guid.NewGuid().ToString(); var consumer = new SimpleQueueConsumer <StringEvent>(client, queue); var block = new AutoResetEvent(false); var producer = new ObservingProducer <StringEvent>(); producer.Attach(consumer); producer.Produces(GetPayload().ToObservable()); producer.Start(); block.WaitOne(TimeSpan.FromSeconds(5)); client.DeleteQueue(new DeleteQueueRequest().WithQueueUrl(consumer.QueueUrl)); }