static void Main(string[] args) { Crawler myCrawler = new Crawler(); string startUrls = "http://www.baidu.com/"; if (args.Length >= 1) { startUrls = args[0]; } myCrawler.urls.Add(startUrls, false);//加入初始界面 //new Thread(myCrawler.Crawl).Start(); Task[] tasks = { Task.Run(() => myCrawler.Crawl()), Task.Run(() => myCrawler.Crawl()), }; Task.WaitAll(tasks); }
static void Main(string[] args) { if (args.Length != 0) { Crawler.Crawl(args[0], true, Tuple.Create(true, 10)); } else { Crawler.Crawl("https://github.com/", true, Tuple.Create(true, 10)); } }
static void Main(string[] args) { Crawler myCrawler = new Crawler(); string startUrl = "http://www.cnblogs.com/dstang2000/"; if (args.Length >= 1) { startUrl = args[0]; } myCrawler.urls.Add(startUrl, false); //加入初始页面 // 原代码 new Thread(myCrawler.Crawl).Start(); //开始爬行 myCrawler.Crawl(); //开始爬行 }