private static async Task DoCrawl(Smithy smithy) { var config = new CrawlConfiguration(); if (!string.IsNullOrEmpty(smithy.User) && !string.IsNullOrEmpty(smithy.Pass)) { config.MaxConcurrentThreads = smithy.Threads; config.MaxCrawlDepth = smithy.Depth; config.MinCrawlDelayPerDomainMilliSeconds = smithy.Delay; config.MaxPagesToCrawl = smithy.MaxPages; config.MaxRetryCount = 1; //HttpServicePointConnectionLimit = 2000, config.HttpRequestTimeoutInSeconds = smithy.Timeout; config.LoginUser = smithy.User; config.LoginPassword = smithy.Pass; } if (!string.IsNullOrEmpty(smithy.User) || !string.IsNullOrEmpty(smithy.Pass)) { if (string.IsNullOrEmpty(smithy.Pass) || string.IsNullOrEmpty(smithy.User)) { Console.WriteLine("Please specify both a username and a password if using basic auth"); System.Environment.Exit(1); } } else { config.MaxConcurrentThreads = smithy.Threads; config.MaxCrawlDepth = smithy.Depth; config.MinCrawlDelayPerDomainMilliSeconds = smithy.Delay; config.MaxPagesToCrawl = smithy.MaxPages; config.MaxRetryCount = 1; //HttpServicePointConnectionLimit = 2000, config.HttpRequestTimeoutInSeconds = smithy.Timeout; } var crawler = new PoliteWebCrawler(config); crawler.PageCrawlCompleted += PageCrawlCompleted; var crawlResult = await crawler.CrawlAsync(new Uri(smithy.Url)); }
static async Task Main(string[] args) { Stopwatch watch = new Stopwatch(); watch.Start(); // Parse arguments passed Parser parser = new Parser(with => { with.CaseInsensitiveEnumValues = true; with.CaseSensitive = false; with.HelpWriter = null; }); ParserResult <Options> parserResult = parser.ParseArguments <Options>(args); parserResult.WithParsed <Options>(o => { Options.Instance = o; }) .WithNotParsed(errs => DisplayHelp(parserResult, errs)); Options options = Options.Instance; try { smithy = new Smithy(); if (options.Quiet) { Log.Logger = new LoggerConfiguration() .MinimumLevel.Error() .WriteTo.Console() .CreateLogger(); } else { Log.Logger = new LoggerConfiguration() .MinimumLevel.Information() .WriteTo.Console() .CreateLogger(); } await DoCrawl(smithy); Uri uri = new Uri(smithy.Url); string safeUri = uri.Authority; safeUri = safeUri.Replace('.', '_'); if (string.IsNullOrEmpty(smithy.Output)) { smithy.Output = "wordlist_" + safeUri + DateTime.Now.ToString("_HH-mm-ss") + ".txt"; if (smithy.Output.Length > 250) { smithy.Output = "wordlist_" + DateTime.Now.ToString("M-dd-yyyy_HH-mm-ss") + ".txt"; } } Console.WriteLine($"\n[+] Crawl finished, writing to file: {smithy.Output}\n"); using (StreamWriter outfile = new StreamWriter(smithy.Output)) { foreach (var word in wordlist) { await outfile.WriteLineAsync(word); } } watch.Stop(); Console.WriteLine("Execution time: " + watch.ElapsedMilliseconds / 1000 + " Seconds"); } catch (Exception e) { Console.WriteLine(e); } }