public void FileBaseUrl( ) { uri = testLocation + "index.html"; rs = new RunSpider(uri, "file:///" + testLocation, -1); PrintSpiderResults(rs); Assertion.AssertEquals("Total Pages", rs.Pages.Length, 12); SiteAssert(rs, uri, true, "OK"); }
private void PrintSpiderResults(RunSpider rs) { if (isResultsOutput) { foreach (WebPageState state in rs.Pages) { Console.WriteLine("Code: {0,4}, Process Status: {1}, Url: {2}, Desc: {3}", state.StatusCode, ProcessStatus(state), state.Uri.AbsoluteUri, state.StatusDescription); } } }
public void GoodUrlBadRef( ) { // TODO - Url works but the id value returns a bad page. Need to look into options here uri = "http://www.gotdotnet.com/Community/Workspaces/workspace.aspx?id=c20d12b0-af52-402b-9b7c-aaeb21d1f43"; rs = new RunSpider(uri, 1); PrintSpiderResults(rs); Assertion.AssertEquals("Total Pages", rs.Pages.Length, 1); SiteAssert(rs, uri, false, "404"); }
public void GoodUrlBaseUrl2( ) { // Good url, Base Url is domain, Do full parse on any part of the domain uri = "http://www.holidaycoast.net.au/coffsharbour/"; rs = new RunSpider(uri, "http://www.holidaycoast.net.au/", 5); PrintSpiderResults(rs); Assertion.AssertEquals("Total Pages", rs.Pages.Length, 46); SiteAssert(rs, uri, true, "OK"); }
public void GoodUrlBaseUrl1( ) { // Good url, Base Url is a path. Only do full parse in this path uri = "http://www.holidaycoast.net.au/coffsharbour/"; rs = new RunSpider(uri, uri, 5); PrintSpiderResults(rs); Assertion.AssertEquals("Total Pages", rs.Pages.Length, 14); SiteAssert(rs, uri, true, "OK"); }
public void GoodUrl2( ) { // Good url, 39 links on this url uri = "http://www.gotdotnet.com/Community/Workspaces/workspace.aspx?id=c20d12b0-af52-402b-9b7c-aaeb21d1f431"; rs = new RunSpider(uri, 1); PrintSpiderResults(rs); Assertion.AssertEquals("Total Pages", rs.Pages.Length, 40); SiteAssert(rs, uri, true, "OK"); }
public void GoodUrl1( ) { // Good url, 13 links on this url uri = "http://www.holidaycoast.net.au/"; rs = new RunSpider(uri, 1); PrintSpiderResults(rs); Assertion.AssertEquals("Total Pages", rs.Pages.Length, 12); SiteAssert(rs, uri, true, "OK"); }
private void SiteAssert(RunSpider rs, string uri, bool expProcessSuccesfull, string expStatusCode) { foreach (WebPageState page in rs.Pages) { if (uri.Equals(page.Uri)) { Assertion.AssertEquals("Page Status", expProcessSuccesfull, page.ProcessSuccessfull); Assertion.AssertEquals("Page Code", expStatusCode, page.StatusCode); } } }
public void E502BadHost( ) { // Bad Host uri = "http://www.invalidspidersite.net.au/"; rs = new RunSpider(uri, 1); PrintSpiderResults(rs); Assertion.AssertEquals("Total Pages", rs.Pages.Length, 1); SiteAssert(rs, uri, false, "502"); }
public void E403Forbidden( ) { // Bad Url uri = "http://www.midcoast.com.au/~jpilgrim"; rs = new RunSpider(uri, 1); PrintSpiderResults(rs); Assertion.AssertEquals("Total Pages", rs.Pages.Length, 1); SiteAssert(rs, uri, false, "403"); }
public void E404BadUrl( ) { // Bad Url uri = "http://www.holidaycoast.net.au/nucklehead"; rs = new RunSpider(uri, 1); PrintSpiderResults(rs); Assertion.AssertEquals("Total Pages", rs.Pages.Length, 1); SiteAssert(rs, uri, false, "404"); }
public void EntireSiteLongRunning( ) { if (isLongRunning) { uri = "http://www.holidaycoast.net.au/"; rs = new RunSpider(uri, -1); PrintSpiderResults(rs); Assertion.AssertEquals("Total Pages", rs.Pages.Length, 224); SiteAssert(rs, uri, true, "OK"); } }