private void LeechOptions_FormClosing(object sender, FormClosingEventArgs e) { if (radioButton1.Checked) { MainForm._leechOption = "emailpass"; } if (radioButton2.Checked) { MainForm._leechOption = "userpass"; } if (radioButton3.Checked) { MainForm._leechOption = "proxies"; } if (radioButton4.Checked) { MainForm._leechOption = "emailonly"; } if (radioButton5.Checked) { if (string.IsNullOrEmpty(textBox1.Text)) { ProgramUtils.ShowErrorMessage("Please insert a valid custom regex.", false); } else { MainForm._leechOption = "custom"; MainForm._customRegex = textBox1.Text; } } }
private void GetLinks(object sender, DoWorkEventArgs e) { try { if (_useCustomLinks) { ScrapeResult(_customLinks, null); } else { foreach (string engine in textBox3.Lines) { foreach (string website in textBox4.Lines) { foreach (string keyword in textBox2.Lines) { _currentRetry = 1; Scrape(engine, website, keyword, _currentRetry); } } } } } catch (Exception ex) { if (_showScrapingErrors) { ProgramUtils.ShowErrorMessage(ex.Message + ex.StackTrace, true); } label13.Text = (++_scraperErrors).ToString(); } }
private void AppendResult(string response) { try { if (_leechOption == "emailpass") { GetResult(response, @"([a-zA-Z0-9_\-\.]+)@([a-zA-Z0-9_\-\.]+)\.([a-zA-Z]{2,5}):([a-zA-Z0-9_\-\.]+)", "combos"); } else if (_leechOption == "userpass") { GetResult(response, @"[a-z0-9_-]{3,16}:([a-zA-Z0-9_\-\.]+)", "combos"); } else if (_leechOption == "proxies") { GetResult(response, @"(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})(?=[^\d])\s*:?\s*(\d{2,5})", "proxies"); } else if (_leechOption == "emailonly") { GetResult(response, @"([a-zA-Z0-9_\-\.]+)@([a-zA-Z0-9_\-\.]+)\.([a-zA-Z]{2,5})", "emails"); } else if (_leechOption == "custom") { GetResult(response, _customRegex, "result"); } } catch (Exception ex) { if (_showScrapingErrors) { ProgramUtils.ShowErrorMessage(ex.Message + ex.StackTrace, true); } label14.Text = (++_scraperErrors).ToString(); } }
private void button1_Click(object sender, EventArgs e) { SetGuiElements(false); SetInformations(true); try { _worker.DoWork += new DoWorkEventHandler(GetLinks); _worker.RunWorkerCompleted += (object send, RunWorkerCompletedEventArgs ev) => { SetGuiElements(true); SetInformations(false); button2.Enabled = false; }; _worker.RunWorkerAsync(); } catch (Exception ex) { if (_showProgramErrors) { ProgramUtils.ShowErrorMessage(ex.Message + ex.StackTrace, false); } label13.Text = (++_programErrors).ToString(); } }
private void ScrapeResult(string[] links, HttpRequest req) { try { if (req == null) { again : using (req = new HttpRequest()) { req.UserAgent = Utils.Randomizer.RandomUserAgent(); if (_useProxies) { ProxyClient client = RandomProxy(); if (client != null) { req.Proxy = client; } else { goto again; } } req.EnableEncodingContent = true; req.IgnoreInvalidCookie = true; req.IgnoreProtocolErrors = true; if (_useRetries) { req.Reconnect = true; req.ReconnectDelay = _timeout; req.ReconnectLimit = _retries; } req.SslProtocols = SslProtocols.Tls12; req.SslCertificateValidatorCallback += (sender, certificate, chain, sslPolicyErrors) => true; req.UseCookies = true; req.ConnectTimeout = _timeout; req.ReadWriteTimeout = _timeout; req.Cookies = new CookieStorage(); req.AllowAutoRedirect = true; req.MaximumAutomaticRedirections = 10; req.AddHeader("Upgrade-Insecure-Requests", "1"); req.AddHeader("Accept", "*/*"); _links.Text = string.Join(Environment.NewLine, links.Distinct()); label18.Text = $"Got {_links.Lines.Length} links, scraping result..."; foreach (string link in _links.Lines) { if (_useCustomLinks) { label24.Text = link; } string response = req.Get(link).ToString(); if (link.Contains("anonfiles.com")) { MatchCollection regex = Regex.Matches(response, @"(https:\/\/.*.anonfiles.com\/.*)"); List <string> arr = regex.OfType <Match>().Select(m => m.Value).Distinct().ToList(); List <string> anonlinks = new List <string>(); if (arr.Count > 0 && arr.Last() != Environment.NewLine) { arr.Add(Environment.NewLine); anonlinks.Add(string.Join(Environment.NewLine, arr).Replace("> <img", string.Empty).Replace("\"", string.Empty)); } foreach (string anonlink in anonlinks) { string respo = req.Get(anonlink).ToString(); AppendResult(respo); } } else { if (_useCustomLinks) { label24.Text = link; } AppendResult(response); } } } } else { _links.Text = string.Join(Environment.NewLine, links.Distinct()); label18.Text = $"Got {_links.Lines.Length} links, scraping result..."; foreach (string link in _links.Lines) { if (_useCustomLinks) { label24.Text = link; } string response = req.Get(link).ToString(); if (link.Contains("anonfiles.com")) { MatchCollection regex = Regex.Matches(response, @"(https:\/\/.*.anonfiles.com\/.*)"); List <string> arr = regex.OfType <Match>().Select(m => m.Value).Distinct().ToList(); List <string> anonlinks = new List <string>(); if (arr.Count > 0 && arr.Last() != Environment.NewLine) { arr.Add(Environment.NewLine); anonlinks.Add(string.Join(Environment.NewLine, arr).Replace("> <img", string.Empty).Replace("\"", string.Empty)); } foreach (string anonlink in anonlinks) { string respo = req.Get(anonlink).ToString(); AppendResult(respo); } } else { if (_useCustomLinks) { label24.Text = link; } AppendResult(response); } } } } catch (Exception ex) { if (_showScrapingErrors) { ProgramUtils.ShowErrorMessage(ex.Message + ex.StackTrace, true); } label14.Text = (++_scraperErrors).ToString(); } }
private void Scrape(string engine, string website, string keyword, int retry) { if (_past24Hours) { label18.Text = "Adjusting search engines..."; if (engine.Contains("bing")) { engine = "https://www.bing.com/search?filters=ex1%3a%22ez1%22&q="; } else if (engine.Contains("yahoo")) { engine = "https://search.yahoo.com/search?age=1d&btf=d&q="; } else if (engine.Contains("yandex")) { engine = "https://yandex.com/search/?within=77&text="; } else if (engine.Contains("google")) { engine = "https://www.google.com/search?tbs=qdr:d&q="; } else if (engine.Contains("duckduckgo")) { engine = "https://duckduckgo.com/?df=d&ia=web&q="; } else if (engine.Contains("aol")) { engine = "https://search.aol.com/aol/search?age=1d&btf=d&q="; } else if (engine.Contains("rambler")) { engine = "https://nova.rambler.ru/search?period=day&query="; } } again : try { using (HttpRequest req = new HttpRequest()) { req.UserAgent = Utils.Randomizer.RandomUserAgent(); if (_useProxies) { ProxyClient client = RandomProxy(); if (client != null) { req.Proxy = client; } else { goto again; } } req.EnableEncodingContent = true; req.IgnoreInvalidCookie = true; req.IgnoreProtocolErrors = true; if (_useRetries) { req.Reconnect = true; req.ReconnectDelay = _timeout; req.ReconnectLimit = _retries; } req.SslProtocols = SslProtocols.Tls12; req.SslCertificateValidatorCallback += (sender, certificate, chain, sslPolicyErrors) => true; req.UseCookies = true; req.ConnectTimeout = _timeout; req.ReadWriteTimeout = _timeout; req.Cookies = new CookieStorage(); req.AllowAutoRedirect = true; req.MaximumAutomaticRedirections = 10; req.AddHeader("Upgrade-Insecure-Requests", "1"); req.AddHeader("Accept", "*/*"); label16.Text = keyword; label15.Text = website; label18.Text = "Scraping links..."; label19.Text = retry.ToString(); label21.Text = engine; string response = req.Get($"{engine}{keyword}+site:{website}").ToString(); MatchCollection regex = Regex.Matches(response, $@"(https:\/\/{website}\/\w+)"); if (regex.Count != 0) { string[] arr = regex.OfType <Match>().Select(m => m.Value).ToArray(); ScrapeResult(arr, req); } } } catch (Exception ex) { if (_showScrapingErrors) { ProgramUtils.ShowErrorMessage(ex.Message + ex.StackTrace, true); } label14.Text = (++_scraperErrors).ToString(); if (_useRetries) { if (retry <= _retries) { retry++; goto again; } } } }