static void Main(string[] args) { var a = new CrawlController(); a.Start(); Console.ReadKey(); }
public Crawl(CrawlController crawlController, int maxGrowth, Vector2 position, float growthMultiplier, float growthDivisor) { this.crawlController = crawlController; number = CrawlNumber; CrawlNumber++; GrowthList = new List <Vector2>(); PixelList = new List <Vector2>(); CrawlLocations = new HashSet <Vector2>(); Random = new System.Random(CrawlNumber); this.maxGrowth = maxGrowth; //GrowByPosition(position); setupTestVariables(); this.growthDivisor = growthDivisor; GrowByPosition(position); //PixelList.Add(position); //crawlController.SetPixel((int)position.x, (int)position.y, Color.green); OriginPoint = position; growthMultiplierTest = growthMultiplier; }
public async Task GetTest() { // Arrange const int reqTimeout = 20; const string reqDomain = "https://sub.domain.com"; var reqUrl = $"{reqDomain}/firstpage"; var expectedPages = new[] { new PageDto($"{reqUrl}/about", new ConcurrentDictionary <string, short>()), new PageDto(reqUrl, new ConcurrentDictionary <string, short>()) }; spiderManagerMock.Setup(spider => spider.CreateSiteMap(reqUrl, It.IsAny <CancellationToken>())).ReturnsAsync(expectedPages); var controller = new CrawlController(new NullLogger <CrawlController>(), spiderManagerMock.Object); // Act var result = (await controller.Post(reqUrl, reqTimeout)).Result as OkObjectResult; // Assert Assert.IsNotNull(result); var dto = result.Value as SiteMap; Assert.AreEqual(reqUrl, dto.StartUrl); Assert.AreEqual(reqDomain, dto.RequestedDomain); Assert.IsTrue(dto.Completed); Assert.AreEqual(expectedPages, dto.Pages); }
public async Task GetTestBadRequest(string reqUrl) { // Arrange const int reqTimeout = 20; var controller = new CrawlController(new NullLogger <CrawlController>(), spiderManagerMock.Object); // Act var result = await controller.Post(reqUrl, reqTimeout); // Assert Assert.That(result.Result is BadRequestResult); }
// Start is called before the first frame update void Start() { crawls = new List <Crawl>(); InvokeRepeating("updateCrawlObjects", 0f, 0.1f); instance = this; BurntSpaces = new HashSet <Vector2>(); initializedCallbacks(); otherColor = new Color(0.165f, 0.784f, 0.263f, 1f); elementController = GetComponent <ElementController>(); }
public void GivenPostIsCalledThenCollectAndSaveAreCalled() { var url = "https://www.bbc.co.uk/news/uk-politics-45621354"; var collectMock = new Mock <ICollector>(); collectMock.Setup(c => c.Collect(url)).Returns(new List <WordMetric>()); var repositoryMock = new Mock <IWordRepository>(); var sut = new CrawlController(collectMock.Object, repositoryMock.Object); sut.Post(new FormData { url = url }); collectMock.Verify(c => c.Collect(url), Times.Once); repositoryMock.Verify(r => r.Save(It.IsAny <List <WordMetric> >()), Times.Once); }
public static (HashSet <Vector2>, int, List <Vector2>) MakeCircleHashSet(Vector2 startOrigin, int r, CrawlController crawlController) { HashSet <Vector2> castingList = new HashSet <Vector2>(); Vector2 origin = startOrigin; origin.x = (int)origin.x; origin.y = (int)origin.y; List <Vector2> tmpList = new List <Vector2>(); List <Vector2> list = new List <Vector2>(); float rSquared = r * r; // using sqared reduces execution time (no square root needed) for (int x = 1; x <= r; x++) { for (int y = 0; y <= r; y++) { Vector2 v = new Vector2(x, y); if (Vector2.SqrMagnitude(v) <= rSquared) { tmpList.Add(v); } else { break; } } } list.Add(origin); foreach (Vector2 v in tmpList) { Vector2 vMirr = new Vector2((int)v.x, (int)-1 * v.y); list.Add(origin + v); list.Add(origin + new Vector2((int)v.x * -1, (int)v.y * -1)); list.Add(origin + vMirr); list.Add(origin + new Vector2((int)vMirr.x * -1, (int)vMirr.y * -1)); } int totalGrowth = 0; List <Vector2> consumeList = new List <Vector2>(); foreach (Vector2 location in list) { //Debug.Log(location); if (!castingList.Contains(location)) { consumeList.Add(location); castingList.Add(location); if (crawlController.CrawlHere(location)) { totalGrowth += 1; } } } return(castingList, totalGrowth, consumeList); }