public void LoadPerformace() { Extension.Scheduler.RedisScheduler scheduler = new Extension.Scheduler.RedisScheduler("localhost", ""); ISpider spider = new DefaultSpider("test", new Site()); scheduler.Init(spider); scheduler.Clear(); var start = DateTime.Now; for (int i = 0; i < 40000; i++) { scheduler.Push(new Request("http://www.a.com/" + i, 1, null)); } var end = DateTime.Now; double seconds = (end - start).TotalSeconds; scheduler.Clear(); var start1 = DateTime.Now; HashSet <Request> list = new HashSet <Request>(); for (int i = 0; i < 40000; i++) { list.Add(new Request("http://www.a.com/" + i, 1, null)); } scheduler.Load(list); var end1 = DateTime.Now; double seconds1 = (end1 - start1).TotalSeconds; Assert.True(seconds1 < seconds); scheduler.Clear(); }
public void Load() { QueueDuplicateRemovedScheduler scheduler = new QueueDuplicateRemovedScheduler(); ISpider spider = new DefaultSpider("test", new Site()); scheduler.Init(spider); scheduler.Push(new Request("http://www.a.com/", 1, null)); scheduler.Push(new Request("http://www.b.com/", 1, null)); scheduler.Push(new Request("http://www.c.com/", 1, null)); scheduler.Push(new Request("http://www.d.com/", 1, null)); Extension.Scheduler.RedisScheduler redisScheduler = new Extension.Scheduler.RedisScheduler("localhost", ""); redisScheduler.Init(spider); redisScheduler.Clear(); redisScheduler.Load(scheduler.ToList()); Assert.Equal("http://www.d.com/", redisScheduler.Poll().Url.ToString()); Assert.Equal("http://www.c.com/", redisScheduler.Poll().Url.ToString()); Assert.Equal("http://www.b.com/", redisScheduler.Poll().Url.ToString()); Assert.Equal("http://www.a.com/", redisScheduler.Poll().Url.ToString()); redisScheduler.Clear(); }