public void LoadPerformace() { Spider spider = new DefaultSpider("test"); spider.Monitor = new LogMonitor(); Extension.Scheduler.RedisScheduler scheduler = GetRedisScheduler(spider.Identity); scheduler.Dispose(); var start = DateTime.Now; for (int i = 0; i < 40000; i++) { scheduler.Push(new Request("http://www.a.com/" + i, null)); } var end = DateTime.Now; double seconds = (end - start).TotalSeconds; scheduler.Dispose(); var start1 = DateTime.Now; HashSet <Request> list = new HashSet <Request>(); for (int i = 0; i < 40000; i++) { list.Add(new Request("http://www.a.com/" + i, null)); } scheduler.Reload(list); var end1 = DateTime.Now; double seconds1 = (end1 - start1).TotalSeconds; Assert.True(seconds1 < seconds); scheduler.Dispose(); }
public void Load() { QueueDuplicateRemovedScheduler scheduler = new QueueDuplicateRemovedScheduler(); ISpider spider = new DefaultSpider("test", new Site()); scheduler.Push(new Request("http://www.a.com/", null) { Site = spider.Site }); scheduler.Push(new Request("http://www.b.com/", null) { Site = spider.Site }); scheduler.Push(new Request("http://www.c.com/", null) { Site = spider.Site }); scheduler.Push(new Request("http://www.d.com/", null) { Site = spider.Site }); Extension.Scheduler.RedisScheduler redisScheduler = GetRedisScheduler(spider.Identity); redisScheduler.Dispose(); redisScheduler.Reload(scheduler.All); Assert.Equal("http://www.d.com/", redisScheduler.Poll().Url.ToString()); Assert.Equal("http://www.c.com/", redisScheduler.Poll().Url.ToString()); Assert.Equal("http://www.b.com/", redisScheduler.Poll().Url.ToString()); Assert.Equal("http://www.a.com/", redisScheduler.Poll().Url.ToString()); redisScheduler.Dispose(); }