public async Task PubAndUnSub() { if (Directory.Exists("/home/vsts/work")) { return; } int count = 0; var options = SpiderFactory.GetRequiredService <ISpiderOptions>(); var logger = SpiderFactory.GetRequiredService <ILogger <KafkaMessageQueue> >(); var mq = new KafkaMessageQueue(options, logger); mq.Subscribe("PubAndUnSub", msg => { Interlocked.Increment(ref count); }); int i = 0; Task.Factory.StartNew(async() => { for (; i < 50; ++i) { await mq.PublishAsync("PubAndUnSub", "a"); await Task.Delay(100); } }).ConfigureAwait(false).GetAwaiter(); await Task.Delay(1500); mq.Unsubscribe("PubAndUnSub"); while (i < 50) { await Task.Delay(100); } Assert.True(count < 100); }
public void RetryDownloadTimes() { var spider = SpiderFactory.Create <Spider>(); spider.NewGuidId(); spider.Name = "RetryDownloadTimes"; spider.RetryDownloadTimes = 5; spider.EmptySleepTime = 15; spider.DownloaderSettings.Type = DownloaderType.Exception; var scheduler = new QueueDistinctBfsScheduler(); spider.Scheduler = scheduler; spider.AddRequests("http://www.RetryDownloadTimes.com"); spider.Run(); var statisticsStore = SpiderFactory.GetRequiredService <IStatisticsStore>(); var s = statisticsStore.GetSpiderStatisticsAsync(spider.Id).Result; Assert.Equal(1, s.Total); Assert.Equal(1, s.Failed); Assert.Equal(0, s.Success); var dss = statisticsStore.GetDownloadStatisticsListAsync(1, 10).Result; var ds = dss[0]; Assert.Equal(6, ds.Failed); Assert.Equal(0, ds.Success); }
public void ParallelPubAndSub() { if (Directory.Exists("/home/vsts/work")) { return; } int count = 0; var options = SpiderFactory.GetRequiredService <ISpiderOptions>(); var logger = SpiderFactory.GetRequiredService <ILogger <KafkaMessageQueue> >(); var mq = new KafkaMessageQueue(options, logger); mq.Subscribe("ParallelPubAndSub", msg => { Interlocked.Increment(ref count); }); Parallel.For(0, 100, async(i) => { await mq.PublishAsync("ParallelPubAndSub", "a"); }); int j = 0; while (count < 100 && j < 150) { Thread.Sleep(500); ++j; } Assert.Equal(100, count); }