// by Noseratio - http://stackoverflow.com/a/23819021/1768303 // test: web-scrap a list of URLs static async Task ScrapSitesAsync(string[] urls, CancellationToken token) { using (var pool = new WebBrowserPool(maxParallel: 2, token: token)) { foreach (var url in urls) { Console.WriteLine("URL:\n" + url); // cancel in 30s or when the main token is signalled string html = await pool.ScrapSiteAsync(url, (int)TimeSpan.FromSeconds(30).TotalMilliseconds); Console.WriteLine("HTML:\n" + html); } } }
// by Noseratio - http://stackoverflow.com/a/23819021/1768303 // test: web-scrap a list of URLs static async Task ScrapSitesAsync(string[] urls, CancellationToken token) { using (var pool = new WebBrowserPool(maxParallel: 2, token: token)) { // cancel each site in 30s or when the main token is signalled var timeout = (int)TimeSpan.FromSeconds(30).TotalMilliseconds; var results = urls.ToDictionary( url => url, url => pool.ScrapSiteAsync(url, timeout)); await Task.WhenAll(results.Values); foreach (var url in results.Keys) { Console.WriteLine("URL:\n" + url); string html = results[url].Result; Console.WriteLine("HTML:\n" + html); } } }