internal static void scrapeProxyPages() { Parallel.ForEach(directory, item => { using (WebClient wb = new WebClient()) { string url = item; string html = wb.DownloadString(url); var temp = ScraperLogic.NumberExtractor2(html); Parallel.ForEach(temp, link => { proxyPages.Add(link); }); } }); }
private async void button3_Click(object sender, EventArgs e) { status = scrapeStatus.scrapingProxies; await Task.Run(() => { ScraperLogic.buildScrapeList(); ScraperLogic.scrapeProxyPages(); proxyList = new ConcurrentBag <string>(ScraperLogic.scrapeProxies().Distinct().ToList()); textBox1.BeginInvoke(new Action(() => { textBox1.Lines = proxyList.ToArray(); })); }); status = scrapeStatus.testingProxies; testProxyList(); }
private async void button1_Click(object sender, EventArgs e) { status = scrapeStatus.scrapingProxies; await Task.Run(() => { label1.BeginInvoke(new Action(() => { label1.ForeColor = Color.DarkGreen; label1.Text = "Scraping Proxies"; })); ScraperLogic.buildScrapeList(); ScraperLogic.scrapeProxyPages(); proxyList = new ConcurrentBag <string>(ScraperLogic.scrapeProxies().Distinct().ToList()); textBox1.BeginInvoke(new Action(() => { textBox1.Lines = proxyList.ToArray(); })); }); }
internal static ConcurrentBag <string> scrapeProxies() { ConcurrentBag <string> returnList = new ConcurrentBag <string>(); Parallel.ForEach(proxyPages, item => { using (WebClient wb = new WebClient()) { string url = item; string html = wb.DownloadString(url); var temp = ScraperLogic.proxyGetter(html); foreach (var proxy in temp) { proxies.Add(proxy); returnList.Add(proxy); } } }); return(returnList); }