public void TestChainOneLink() { MockRepository mocks = new MockRepository(); var web = mocks.StrictMock<IWebInteractor>(); var site = new Website("www.whocares.com", "whatever"); var mockFSI = MockRepository.GenerateStub<IFileSystemInteractor>(); CrawlResult testResult = new CrawlResult(); testResult.ReturnCode = 200; testResult.ReturnStatus = "OK"; testResult.Html = "href=\"/csse.html\""; var resultTwo = new CrawlResult(); resultTwo.ReturnCode = 200; resultTwo.ReturnStatus = "OK"; resultTwo.Html = ""; Expect.On(web).Call(web.GetPage("www.test.com")).Return(testResult); Expect.On(web).Call(web.GetPage("www.test.com/csse.html")).Return(resultTwo); mocks.ReplayAll(); Bot b = new Bot(new Website("www.test.com","simplepath"),null,null,web,mockFSI); List<CrawlResult> results = b.CrawlSite(2); mocks.VerifyAll(); Assert.AreEqual(2,results.Count); }
public void TestBotCallsCrawlSiteOnWebInteractor() { MockRepository mocks = new MockRepository(); var web = MockRepository.GenerateStub<IWebInteractor>(); var site = new Website("www.whocares.com", "whatever"); var mockFSI = MockRepository.GenerateStub<FileSystemInteractor>(); var useableBot = new Bot(site, null, null, web, mockFSI); useableBot.CrawlSite(1); web.AssertWasCalled(s => s.GetPage("www.whocares.com")); }
public CrawlerController(String path, int depth, string email) { Assembly assem = Assembly.GetEntryAssembly(); AssemblyName aName = assem.GetName(); Version ver = aName.Version; Console.Out.WriteLine("Application {0}, Version {1}", aName.Name, ver.ToString()); //String outputPath = path.Substring(path.IndexOf('.') + 1,path.LastIndexOf('.') - path.IndexOf('.') -1) + "-" + DateTime.Now.Month.ToString().PadLeft(2, '0') + "-" + DateTime.Now.Day.ToString().PadLeft(2, '0') + "-" + DateTime.Now.Year.ToString() + "_" + DateTime.Now.Hour.ToString() + "-" + DateTime.Now.Minute.ToString() ; string outputPath = string.Format("{0}_{1}", path, DateTime.Now.ToString("hh-mm_MM-dd-yyyy")); String arguments = path + " -g -r" + depth.ToString() + " -O " + outputPath; //DEBUG - Console.WriteLine("Running httrack with arguments: " + arguments); //Process p = Process.Start(Directory.GetCurrentDirectory() + "/httrack/httrack.exe", arguments); //p.WaitForExit(); //Directory.CreateDirectory(output); String foldername = outputPath; outputPath = Directory.GetCurrentDirectory() + "\\" + outputPath; Directory.CreateDirectory(outputPath); //initialize the website Website site = new Website(path,outputPath); //initialize the log Log log = new Log(outputPath + "\\log.txt"); log.writeInfo("Log created correctly"); log.writeInfo("Website: " + path + " == CrawlLevel: " + depth.ToString()); log.writeInfo("Running version: " + aName.Version.ToString()); //initalize the database accessor class log.writeDebug("Creating database object"); DatabaseAccessor dbAccess = null; try { dbAccess = new DatabaseAccessor(log, ConfigReader.ReadDatabaseAccessorString()); } catch(Exception e) { Console.Out.WriteLine("Error creating database connection: " + e.Message); Console.Out.WriteLine("Reverting to default CrawlID"); log.writeError("Error creating database connection: " + e.Message); log.writeError("Reverting to default CrawlID"); } if (dbAccess != null) dbAccess.addWebsite(path, null, null, null); int crawlID; if (dbAccess != null) { crawlID = dbAccess.newCrawl(path, email); } else { crawlID = 0; } var fsi = new FileSystemInteractor(log); Bot b = new Bot(site, log, null, new WebInteractor(log), fsi); b.CrawlSite(depth); //Parse website WebsiteParser siteparser = new WebsiteParser(site, dbAccess, crawlID, log,fsi); List<String> result = siteparser.analyzeSite(); //Try to analyse an SSL certificate, if there is one log.writeDebug("Creating SSL object"); CrawlerPlugin ssl = new SSLConfirmationPlugin(site, dbAccess, crawlID, log); result.AddRange(ssl.analyzeSite()); //Get headers CrawlerPlugin headers = new HttpHeaderPlugin(site, dbAccess, crawlID, log); result.AddRange(headers.analyzeSite()); //HTML Parser log.writeDebug("Creating HTML parsing object"); HTMLParsingModule HTMLParser = new HTMLParsingModule(site, dbAccess, crawlID, log); result.AddRange(HTMLParser.analyzeSite()); log.writeDebug("Done parsing HTML"); //Write to database dbAccess.AddVulnerabilities(crawlID, result); //notify log.writeDebug("Preparing to send message"); try { NotifyClient.sendMessage(ConfigReader.ReadEmailAddress()); } catch(Exception e) { Console.Out.WriteLine("Error in Notify client: " + e.Message); log.writeError("Error in Notify client: " + e.Message); } log.writeDebug("Done sending notification"); log.writeDebug("Destroying log....program exiting"); log.destroy(); }
public void TestStatus404() { MockRepository mocks = new MockRepository(); var web = mocks.StrictMock<IWebInteractor>(); var site = new Website("www.whocares.com", "whatever"); var mockFSI = MockRepository.GenerateStub<FileSystemInteractor>(); var retVal = new CrawlResult(); retVal.ReturnCode = 404; retVal.Html = @""; Expect.On(web).Call(web.GetPage("www.whocares.com")).Return(retVal); mocks.ReplayAll(); var useableBot = new Bot(site, null, null, web, mockFSI); var check = useableBot.CrawlSite(1); Assert.IsTrue(check[0].ReturnCode == 404); mocks.VerifyAll(); }