private void testRequestSafetyWithTasks(bool useOldRequestStyle) { int MAX_DELAY = 1000; int TEST_COUNT = 300; ThreadPool.SetMinThreads(300, 300); Stopwatch sw = new Stopwatch(); byte[] response = Encoding.UTF8.GetBytes("reply"); using (new NATSServer()) { Options opts = utils.DefaultTestOptions; opts.UseOldRequestStyle = useOldRequestStyle; using (IConnection c1 = new ConnectionFactory().CreateConnection(opts), c2 = new ConnectionFactory().CreateConnection(opts)) { // Try parallel requests and check the performance. using (IAsyncSubscription s = c1.SubscribeAsync("foo", (sender, args) => { // We cannot block this NATS thread... so copy our data, and spawn a thread // to handle a delay and responding. TestReplier t = new TestReplier(c1, MAX_DELAY, Encoding.UTF8.GetString(args.Message.Data), args.Message.Reply, sw); Task.Run(async() => { await t.processAsync(); }); })) { c1.Flush(); // Depending on resources, Tasks can be queueud up for quite while. Task[] tasks = new Task[TEST_COUNT]; Random r = new Random(); for (int i = 0; i < TEST_COUNT; i++) { tasks[i] = new Task(async() => { // randomly delay for a bit to test potential timing issues. await Task.Delay(r.Next(100, 500)); c2.Request("foo", null, MAX_DELAY * 2); }); } sw.Start(); // start all of the threads at the same time. for (int i = 0; i < TEST_COUNT; i++) { tasks[i].Start(); } Task.WaitAll(tasks); sw.Stop(); // check that we didn't process the requests consecutively. Assert.True(sw.ElapsedMilliseconds < (MAX_DELAY * 2)); } } } }
private void testRequestSafetyWithThreads(bool useOldRequestStyle) { int MAX_DELAY = 1000; int TEST_COUNT = 300; Stopwatch sw = new Stopwatch(); byte[] response = Encoding.UTF8.GetBytes("reply"); ThreadPool.SetMinThreads(300, 300); using (new NATSServer()) { Options opts = utils.DefaultTestOptions; opts.UseOldRequestStyle = useOldRequestStyle; using (IConnection c1 = new ConnectionFactory().CreateConnection(opts), c2 = new ConnectionFactory().CreateConnection(opts)) { using (IAsyncSubscription s = c1.SubscribeAsync("foo", (sender, args) => { // We cannot block this thread... so copy our data, and spawn a thread // to handle a delay and responding. TestReplier t = new TestReplier(c1, MAX_DELAY, Encoding.UTF8.GetString(args.Message.Data), args.Message.Reply, sw); new Thread(() => { t.process(); }).Start(); })) { c1.Flush(); // use lower level threads over tasks here for predictibility Thread[] threads = new Thread[TEST_COUNT]; Random r = new Random(); for (int i = 0; i < TEST_COUNT; i++) { threads[i] = new Thread((() => { // randomly delay for a bit to test potential timing issues. Thread.Sleep(r.Next(100, 500)); c2.Request("foo", null, MAX_DELAY * 2); })); } // sleep for one second to allow the threads to initialize. Thread.Sleep(1000); sw.Start(); // start all of the threads at the same time. for (int i = 0; i < TEST_COUNT; i++) { threads[i].Start(); } // wait for every thread to stop. for (int i = 0; i < TEST_COUNT; i++) { threads[i].Join(); } sw.Stop(); // check that we didn't process the requests consecutively. Assert.True(sw.ElapsedMilliseconds < (MAX_DELAY * 2)); } } } }