public async Task GScraperAvailableTest(string query, int limit, bool safeSearch) { // Arrange var scraper = new GoogleScraper(); // Act var results = await scraper.GetImagesAsync(query, limit, safeSearch); // Assert Assert.NotEmpty(results); }
public void CountOccurenceOfUrl_ReturnsExpectedCount_WhenUrlInHtml() { // Arrange var htmlContent = TestHelper.GetEmbeddedResource("TestFiles.kant.html"); var url = "en.wikipedia.org"; // Act var result = GoogleScraper.CountOccurenceOfUrl(htmlContent, url); // Assert Assert.Equal(1, result); }
public void CountOccurenceOfUrl_ReturnsZeroCount_WhenUrlNotInHtml() { // Arrange var htmlContent = "dummy_html_with_no_url"; var url = "en.wikipedia.org"; // Act var result = GoogleScraper.CountOccurenceOfUrl(htmlContent, url); // Assert Assert.Equal(0, result); }
private static async Task Main() { Console.WriteLine("GScraper Example Program"); var scraper = new GoogleScraper(); while (true) { Console.Write("Query (enter \'e\' to exit): "); string text = Console.ReadLine(); if (text == null || text == "e") { break; } Console.Write("Limit?: "); if (!int.TryParse(Console.ReadLine(), NumberStyles.Integer, CultureInfo.InvariantCulture, out int limit)) { continue; } IReadOnlyList <ImageResult> images; try { images = await scraper.GetImagesAsync(text, limit).ConfigureAwait(false); } catch (HttpRequestException e) { Console.WriteLine(e); continue; } catch (GScraperException e) { Console.WriteLine(e); continue; } foreach (var image in images) { Console.WriteLine($"Title: {image.Title}"); Console.WriteLine($"Link: {image.Link}"); Console.WriteLine($"ThumbnailLink: {image.ThumbnailLink}"); Console.WriteLine($"ContextLink: {image.ContextLink}"); Console.WriteLine($"DisplayLink: {image.DisplayLink}"); Console.WriteLine($"Width: {image.Width}"); Console.WriteLine($"Height: {image.Height}"); Console.WriteLine(); } } scraper.Dispose(); }
public static async Task Download(string query, string aspectRatio) { var scraper = new GoogleScraper(); IReadOnlyList <ImageResult> images = await scraper.GetImagesAsync(query + " music wallpaper " + aspectRatio, 1).ConfigureAwait(false); Console.WriteLine($"Link: {images[0].Link}"); scraper.Dispose(); Directory.CreateDirectory("downloads"); Directory.CreateDirectory("downloads\\" + query); using (WebClient webClient = new WebClient()) { webClient.DownloadFile(images[0].Link, "downloads\\" + query + "\\image.jpg"); } Console.WriteLine("Downloaded"); }
private static async Task Main() { Console.WriteLine("GScraper Example Program"); using var scraper = new GoogleScraper(); // Other scrapers: // using var scraper = new GScraper.DuckDuckGo.DuckDuckGoScraper(); // using var scraper = new GScraper.Brave.BraveScraper(); while (true) { Console.Write("Query (enter 'e' to exit): "); string text = Console.ReadLine(); if (string.IsNullOrEmpty(text)) { continue; } if (text == "e") { break; } IEnumerable <IImageResult> images; try { images = await scraper.GetImagesAsync(text); } catch (Exception e) when(e is HttpRequestException or GScraperException) { Console.WriteLine(e); continue; } bool enumerateAll = false; bool stop = false; foreach (var image in images) { Console.WriteLine(); Console.WriteLine(JsonSerializer.Serialize(image, image.GetType(), new JsonSerializerOptions { WriteIndented = true })); Console.WriteLine(); if (!enumerateAll) { Console.Write("Press 'n' to send the next image, 'a' to enumerate all images and 's' to stop: "); var key = Console.ReadKey().Key; Console.WriteLine(); switch (key) { case ConsoleKey.A: enumerateAll = true; break; case ConsoleKey.S: stop = true; break; default: break; } } if (stop) { break; } } } } }