public void SelectProjectionCanUseTupleCreateWithCountPredicate() { var query = Robots.GroupBy(g => g.Zone) .Select(g => Tuple.Create(g.Key, g.Count(r => r.EnergyUse > 1.5), g.Count(r => r.EnergyUse <= 2.0))); TestProjectionWithCountPredicate(query.Expression); }
public void SelectProjectionCanUseAnonymousConstructorWithCountPredicate() { var query = Robots.GroupBy(g => g.Zone) .Select(g => new { g.Key, High = g.Count(r => r.EnergyUse > 1.5), Low = g.Count(r => r.EnergyUse <= 2.0) }); TestProjectionWithCountPredicate(query.Expression); }
public void SelectProjectionCreatesMultipleFacets() { var expectedFields = new[] { "cost", "energyUse", "started" }; var query = Robots.GroupBy(r => r.Zone) .Select(g => new { Count = g.Count(), SumCost = g.Sum(a => a.Cost), AverageEnergyUse = g.Average(a => a.EnergyUse), MinStarted = g.Min(a => a.Started), }); var translation = ElasticQueryTranslator.Translate(Mapping, query.Expression); Assert.Contains("AnonymousType", Assert.IsType <ListTermFacetsElasticMaterializer>(translation.Materializer).ElementType.FullName); Assert.Equal("count", translation.SearchRequest.SearchType); Assert.Equal(expectedFields.Length + 1, translation.SearchRequest.Facets.Count); foreach (var expectedField in expectedFields) { var facet = translation.SearchRequest.Facets.OfType <TermsStatsFacet>().Single(s => s.Value == expectedField); Assert.Equal("zone", facet.Key); Assert.Null(facet.Filter); } var countTerms = translation.SearchRequest.Facets.OfType <TermsFacet>().Single(); Assert.Contains("zone", countTerms.Fields); Assert.Null(countTerms.Filter); }
public void SelectProjectionCanCreateMixedFacets() { var query = Robots.GroupBy(r => r.Zone) .Select(g => new { AverageEnergyUse = g.Average(a => a.EnergyUse), CountHighEnergy = g.Count(a => a.EnergyUse > 50.0) }); var translation = ElasticQueryTranslator.Translate(Mapping, query.Expression); Assert.Contains("AnonymousType", Assert.IsType <ListTermFacetsElasticMaterializer>(translation.Materializer).ElementType.FullName); Assert.Equal("count", translation.SearchRequest.SearchType); Assert.Equal(2, translation.SearchRequest.Facets.Count); { var termsStatsFacet = translation.SearchRequest.Facets.OfType <TermsStatsFacet>().Single(); Assert.Equal("zone", termsStatsFacet.Key); Assert.Equal("energyUse", termsStatsFacet.Value); Assert.Null(termsStatsFacet.Filter); } { var termsFacet = translation.SearchRequest.Facets.OfType <TermsFacet>().Single(); var range = Assert.IsType <RangeCriteria>(termsFacet.Filter); Assert.Equal("energyUse", range.Field); Assert.Equal(1, range.Specifications.Count); var specification = range.Specifications[0]; Assert.Equal(RangeComparison.GreaterThan, specification.Comparison); Assert.Equal(50.0, specification.Value); } }
public void Robots_InvalidLine_Malformed() { string s = "User-agent: *" + nl + "foo"; Robots r = new Robots(s); Assert.True(r.Malformed); }
//Crawler sets the htmlcontent, htmlLinks and ID of a Webpage. //Corners cut: Does only crawl the English Wikipedia webpages. //Crawls until 1000 webpages have been retrieved. //Near-duplicate detection: Only checks whether or not a webpage has the same URL. public List <Webpage> Crawl() { frontier.Enqueue(this._urlSeed); int fileNameNumber = 1; while (pages.Count < 1000 && frontier.Count > 0) { Uri url = frontier.Dequeue(); Webpage currentPage = new Webpage(fileNameNumber, url); if (pages.Contains(currentPage)) { continue; } try { currentPage.htmlContent = _webClient.DownloadString(currentPage.uri); string directory = AppDomain.CurrentDomain.BaseDirectory + @"\docs\doc" + fileNameNumber + ".html"; _webClient.DownloadFile(url, directory); } catch (Exception e) { Console.WriteLine("Exception: " + e); continue; } List <Uri> pageLinks = ExtractLinks(currentPage.uri); currentPage.htmlLinks = pageLinks; pages.Add(currentPage); Robots robot; foreach (Uri link in pageLinks) { if (!frontier.Contains(link) && !pages.Exists(x => x.uri == link)) { if (!robotTxts.TryGetValue(currentPage.uri.Host, out robot)) { robot = new Robots("http://" + currentPage.uri.Host + "/robots.txt"); } if (currentPage.uri.Host == "en.wikipedia.org") { if (robot.IsPathAllowed("sw701crawlftwplz", link.ToString())) { frontier.Enqueue(link); } } } } Console.WriteLine("Pages: " + pages.Count); fileNameNumber++; } return(pages); }
public void ShouldFetchARecord() { var callRawExpectation = A.CallTo(() => Shopify.CallRaw(HttpMethod.Get, JsonFormatExpectation(), "/admin/robots/89", EmptyQueryParametersExpectation(), null)); callRawExpectation.Returns(TaskForResult <string>("robot #89's json")); var translationExpectation = A.CallTo(() => Shopify.TranslateObject <Robot>("robot", "robot #89's json")); var translatedRobot = new Robot { Id = 89 }; // // TODO: .Get will start setting translationExpectation.Returns(translatedRobot); var answer = Robots.Find(89); answer.Wait(); Assert.AreSame(answer.Result, translatedRobot); // check for the Parts subresource object Assert.AreEqual("/admin/robots/89/parts", answer.Result.Parts.Path()); callRawExpectation.MustHaveHappened(); translationExpectation.MustHaveHappened(); }
public void IsPathAllowed_StarWildcard(string rule, string path, Boolean result) { string s = @"User-agent: *" + nl + "Disallow: " + rule; Robots r = Robots.Load(s); Assert.Equal(result, r.IsPathAllowed("*", path)); }
public void ShouldAmalgamateQueryParmaetersWithWhereByMemberExpressions() { var rrNexus = Robots.Where(r => r.RobotType, "nexus"); var rrNexusParameters = rrNexus.FullParameters(); Assert.AreEqual("nexus", rrNexusParameters["RobotType"]); }
public void ShouldFetchAListOfAllMatchedModels() { var callRawExpectation = A.CallTo(() => Shopify.CallRaw(HttpMethod.Get, JsonFormatExpectation(), "/admin/robots", EmptyQueryParametersExpectation(), null)); callRawExpectation.Returns(TaskForResult <string>("json text!")); var translationExpectation = A.CallTo(() => Shopify.TranslateObject <List <Robot> >("robots", "json text!")); translationExpectation.Returns(new List <Robot>() { new Robot() { Id = 8889 } }); var answer = Robots.AsListUnpaginated(); answer.Wait(); callRawExpectation.MustHaveHappened(); translationExpectation.MustHaveHappened(); Assert.AreEqual(1, answer.Result.Count); Assert.NotNull(answer.Result[0].Parts); Assert.AreEqual("/admin/robots/8889/parts", answer.Result[0].Parts.Path()); }
protected override void ProcessRecord() { var startJob = new StartJobParameters { StartInfo = new StartProcessDto { ReleaseKey = Process.Key, Source = StartProcessDtoSource.Manual, } }; if (All.IsPresent) { startJob.StartInfo.Strategy = StartProcessDtoStrategy.All; } else if (RobotCount.HasValue) { startJob.StartInfo.Strategy = StartProcessDtoStrategy.RobotCount; startJob.StartInfo.NoOfRobots = RobotCount.Value; } else if (Robots != null) { startJob.StartInfo.Strategy = StartProcessDtoStrategy.Specific; startJob.StartInfo.RobotIds = Robots.Cast <long?>().ToList(); } var jobs = HandleHttpOperationException(() => Api.Jobs.StartJobs(startJob)); foreach (var dto in jobs.Value) { WriteObject(Models.Job.FromDto(dto)); } }
public void Robots_WithRelativeSitemapDirective_OnlyHasSitemapValue() { string s = "User-agent: *" + nl + "Sitemap: /sitemap.xml"; Robots r = new Robots(s); Assert.Equal(1, r.Sitemaps.Count); }
public void Robots_WithAbsoluteSitemapDirective_HasSitemapUrlAndValue() { string s = "User-agent: *" + nl + "Sitemap: http://foo.com/sitemap.xml"; Robots r = new Robots(s); Assert.Equal(1, r.Sitemaps.Count); }
public void SelectCountPredicateCreatesTermsFacetWithFilter() { var query = Robots.GroupBy(r => r.Zone).Select(g => g.Count(r => r.Cost > 5m)); var translation = ElasticQueryTranslator.Translate(Mapping, query.Expression); Assert.Equal(typeof(int), Assert.IsType <ListTermFacetsElasticMaterializer>(translation.Materializer).ElementType); Assert.Equal("count", translation.SearchRequest.SearchType); Assert.Equal(1, translation.SearchRequest.Facets.Count); var facet = Assert.IsType <TermsFacet>(translation.SearchRequest.Facets[0]); Assert.Equal(1, facet.Fields.Count); Assert.Equal("zone", facet.Fields[0]); var filter = Assert.IsType <RangeCriteria>(facet.Filter); Assert.Equal("cost", filter.Field); Assert.Equal(1, filter.Specifications.Count); var specification = filter.Specifications[0]; Assert.Equal("gt", specification.Name); Assert.Equal(5m, specification.Value); }
public void ShouldReplaceInstanceSubResourceForHasOnePlaceholder() { var getRobotExpectation = A.CallTo(() => Shopify.CallRaw(HttpMethod.Get, JsonFormatExpectation(), "/admin/robots/420", EmptyQueryParametersExpectation(), null)); getRobotExpectation.Returns(TaskForResult <string>("Robot #420's json")); // Robot #42 has Brain #56 var translationExpectation = A.CallTo(() => Shopify.TranslateObject <Robot>("robot", "Robot #420's json")); var translatedRobot = new Robot { Id = 420, Brain = new HasOneDeserializationPlaceholder <Brain>(56) }; translationExpectation.Returns(translatedRobot); var answer = Robots.Find(420); answer.Wait(); getRobotExpectation.MustHaveHappened(); translationExpectation.MustHaveHappened(); Assert.IsInstanceOf <SingleInstanceSubResource <Brain> >(answer.Result.Brain); Assert.AreEqual(56, answer.Result.Brain.Id); }
public void IsPathAllowed_RuleWithoutUserAgent_True() { string s = "Disallow: /"; Robots r = Robots.Load(s); Assert.True(r.IsPathAllowed("*", "/foo")); }
public override void DoService(IRobot robot, int procedureTime) { base.DoService(robot, procedureTime); robot.Energy -= 6; robot.Happiness += 12; Robots.Add(robot); }
public void FilterIsWipedWhenConstantTrue() { var query = Robots.Where(r => true); var translation = ElasticQueryTranslator.Translate(Mapping, query.Expression); Assert.Null(translation.SearchRequest.Query); }
public bool IsPathAllowed_StarWildcard(string rule, string path) { string s = @"User-agent: *" + nl + "Disallow: " + rule; Robots r = Robots.Load(s); return(r.IsPathAllowed("*", path)); }
/** Sitemaps **************************************************************/ public async Task <List <string> > GetSitemapsAsList(string Url) { List <string> SitemapsList = new List <string>(); if (MacroscopePreferencesManager.GetFollowRobotsProtocol()) { Robots robot = await this.FetchRobot(Url : Url); try { if ((robot != null) && (robot.Sitemaps != null)) { foreach (Sitemap SitemapEntry in robot.Sitemaps) { string SitemapUrl = SitemapEntry.Url.ToString(); string SitemapUrlAbs = MacroscopeHttpUrlUtils.MakeUrlAbsolute(BaseUrl: Url, Url: SitemapUrl); SitemapsList.Add(SitemapUrlAbs); this.DebugMsg(string.Format("ROBOTS SitemapUrl: {0}", SitemapUrl)); } } } catch (Exception ex) { this.DebugMsg(ex.Message); } } return(SitemapsList); }
public void TypeExistsCriteriaIsAppliedWhenFilterIsConstantTrue() { var query = Robots.Where(r => true); var translation = ElasticQueryTranslator.Translate(CouchMapping, query.Expression); Assert.IsType <ExistsCriteria>(translation.SearchRequest.Query); }
public void Robots_DirectiveWithoutUserAgent_Malformed() { string s = "Disallow: /file.html"; Robots r = new Robots(s); Assert.True(r.Malformed); }
public static void WhereContainsFailsAfterQueryTransition() { var query = Robots.Query(r => r.Name.StartsWith("a")).Where(r => r.Name.Contains("b")); var ex = Assert.Throws <NotSupportedException>(() => Translate(query)); Assert.Contains("String.Contains can only be used within .Query()", ex.Message); }
public void Robots_InvalidUserAgent_Malformed() { string s = "User-agent: " + nl + "Disallow: /file.html"; Robots r = new Robots(s); Assert.True(r.Malformed); Assert.True(r.IsPathAllowed("myRobot", "/file.html")); }
public void Robots_WithRelativeSitemapDirective_OnlyHasSitemapValue() { string s = "User-agent: *" + nl + "Sitemap: /sitemap.xml"; Robots r = new Robots(s); // sitemap shall be an absolute URL - so, it is not valid to add Sitemap with incorrect value to collection Assert.AreEqual(0, r.Sitemaps.Count); }
public void Robots_InValidWithRules_NotMalformedAndDoesntHaveRules() { string s = "User-agent: *" + nl + "Disallow: /" + nl + "foo"; Robots r = new Robots(s); Assert.True(r.Malformed); Assert.True(r.HasRules); }
public void Robots_InvalidWithoutRules_MalformedAndDoesntHAveRules() { string s = "User-agent: *" + nl + "foo: bar"; Robots r = new Robots(s); Assert.True(r.Malformed); Assert.False(r.HasRules); }
public void Robots_WhiteSpaceOnlyLines_StripsOutWhiteSpaceOnlyLines() { string s = "User-agent: *" + nl + " " + nl + "Disallow: /"; Robots r = new Robots(s); Assert.False(r.Malformed); Assert.True(r.HasRules); }
public void Robots_ValidWithoutRules_NotMalformedAndDoesntHaveRules() { string s = "User-agent: *"; Robots r = new Robots(s); Assert.False(r.Malformed); Assert.False(r.HasRules); }
public void Robots_DefaultAllowDirectiveImplementation_MoreSpecific() { Robots r = new Robots(String.Empty); var actual = r.AllowRuleImplementation; var expected = AllowRuleImplementation.MoreSpecific; Assert.AreEqual(expected, actual); }
public void SquareTest2() { var graph = new Graph(4); graph.AddNeighbours(0, new List<int>() { 1, 2 }); graph.AddNeighbours(1, new List<int>() { 3 }); graph.AddNeighbours(2, new List<int>() { 3 }); var robots = new Robots(graph, new[] { 0, 1, 3 }); Assert.IsFalse(robots.WillBeDestroyed()); }
public void SingleRobotTest() { var graph = new Graph(3); graph.AddNeighbours(0, new List<int>() { 1, 2 }); graph.AddNeighbours(1, new List<int>() { 2 }); graph.AddNeighbours(2, new List<int>()); var robots = new Robots(graph, new[] { 0 }); Assert.IsFalse(robots.WillBeDestroyed()); }
public void WillBeDestroyedTest2() { var graph = new Graph(3); graph.AddNeighbours(0, new List<int>() { 1 }); graph.AddNeighbours(1, new List<int>() { 2 }); var robots = new Robots(graph, new[] { 1, 2 }); Assert.IsFalse(robots.WillBeDestroyed()); var robots2 = new Robots(graph, new[] { 2, 0 }); Assert.IsTrue(robots2.WillBeDestroyed()); }
public void WillBeDestroyedTest3() { var graph = new Graph(4); graph.AddNeighbours(0, new List<int>() { 1, 3 }); graph.AddNeighbours(1, new List<int>() { 3 }); graph.AddNeighbours(2, new List<int>() { 3 }); var robots = new Robots(graph, new[] { 0, 2 }); Assert.IsTrue(robots.WillBeDestroyed()); var robots2 = new Robots(graph, new[] { 2, 0 }); Assert.IsTrue(robots2.WillBeDestroyed()); }
public void WillBeDestroyedTest() { var graph = new Graph(6); graph.AddNeighbours(0, new List<int>() { 1 }); graph.AddNeighbours(1, new List<int>() { 2, 5, 0 }); graph.AddNeighbours(2, new List<int>() { 1 }); graph.AddNeighbours(3, new List<int>() { 4 }); graph.AddNeighbours(4, new List<int>() { 3, 5 }); graph.AddNeighbours(5, new List<int>() { 1, 4 }); var robots = new Robots(graph, new[] { 1, 4 }); Assert.IsTrue(robots.WillBeDestroyed()); var robots2 = new Robots(graph, new[] { 1, 5 }); Assert.IsFalse(robots2.WillBeDestroyed()); }
private RobotsBuilder(Uri baseUri) { _robots = new Robots {BaseUri = baseUri}; }
public object Post(Robots request) { using (var db = DbFactory.OpenDbConnection()) { db.Insert(request.TranslateTo<Robot>()); return Get(new Robots()); } }
public object Get(Robots request) { var db = DbFactory.OpenDbConnection(); if (request.Delete.IsInt()) { db.DeleteById<Robot>(request.Delete.ToInt()); } return new RobotsResponse { Total = db.GetScalar<int>("select count(*) from Robot"), Results = request.Id != default(int) ? db.Select<Robot>(q => q.Id == request.Id) : db.Select<Robot>() }; }