public void ExecutesQueriesViaHttpClient() { var httpClient = new Mock<IHttpClient>(MockBehavior.Strict); var client = new WikimediaClient(httpClient.Object, "Foo", 5); var task = client.Query("foo=bar"); httpClient.Verify(s => s.Get(client.BaseQueryUrl + "&foo=bar", "Foo")); }
static async Task MainAsync(IMongoDatabase db) { var client = new WikimediaClient(USER_CLIENT, MAX_LAG); client.LaggedResponse += (sender, response) => Console.WriteLine($"Response was lagged {response.Error.SecondsLagged} seconds. Waiting to issue next query."); var articles = db.GetCollection<BsonDocument>("Articles"); await articles.Indexes.CreateOneAsync("{PageId: 1}"); var start = await articles.CountAsync("{}"); Console.WriteLine($"Starting download. {start.ToString("#,###,##0")} articles currently into the database. Press Ctrl+C to interrupt."); // Keep the same number of download tasks running at the same time int MAX_TASKS = 50; var queue = new List<Task>(MAX_TASKS); for(int i = 0; i < MAX_TASKS; i++) { queue.Add(InsertRandomDocumentsIntoDatabase(client, articles)); } int completedTasks = 0; var sw = new Stopwatch(); sw.Start(); while (true) { // keep replacing finished tasks with new ones until the process is interrupted by the user. var finishedTask = await Task.WhenAny(queue); queue.Remove(finishedTask); queue.Add(InsertRandomDocumentsIntoDatabase(client, articles)); completedTasks++; if(completedTasks % 100 == 0) { var total = await articles.CountAsync("{}"); var soFar = total - start; var totalSeconds = sw.Elapsed.TotalSeconds; var speed = soFar / (double)totalSeconds; Console.WriteLine($"Total of {total.ToString("#,###,##0")} articles inserted into the database ({speed.ToString("0.00")} articles/sec). Press Ctrl+C to interrupt."); } } }
static async Task InsertRandomDocumentsIntoDatabase(WikimediaClient client, IMongoCollection<BsonDocument> collection) { var response = await client.GetMaxRandomArticles(); var updates = response.Pages.Values.Select(page => { var doc = DocumentFromPage(page); return collection.ReplaceOneAsync( Builders<BsonDocument>.Filter.Eq(d => d["PageId"], page.PageId), doc, new UpdateOptions { IsUpsert = true }); }); await Task.WhenAll(updates); }
public async Task GetsRandomArticles() { var client = new WikimediaClient(USER_CLIENT, 5); var response = await client.GetMaxRandomArticles(); Assert.IsNotEmpty(response.Pages); }
public async Task ReturnsQueryResponsesWithPages() { var httpClient = new Mock<IHttpClient>(MockBehavior.Strict); var client = new WikimediaClient(httpClient.Object, "Foo", 5); httpClient.Setup(c => c.Get(client.BaseQueryUrl + "&foo=bar", "Foo")).ReturnsAsync("{'query':{'pages':{}}}"); var response = await client.Query("foo=bar"); Assert.NotNull(response.Pages); }
public void HasBaseQueryUrl() { var httpClient = Mock.Of<IHttpClient>(); var client = new WikimediaClient(httpClient, "foo", 52); Assert.AreEqual("https://en.wikipedia.org/w/api.php?format=json&maxlag=52&action=query&prop=extracts|info&inprop=url&exlimit=max&exintro=1", client.BaseQueryUrl); }
public void UsesDefaultClientWhenNotSupplied() { var client = new WikimediaClient("foo", 2); Assert.AreSame(HttpClient.Default, client.HttpClient); }
public void HasMaxLag() { var httpClient = Mock.Of<IHttpClient>(); var client = new WikimediaClient(httpClient, "foo", 5); Assert.AreEqual(5, client.MaxLag); }
public void HasBaseUrl() { var httpClient = Mock.Of<IHttpClient>(); var client = new WikimediaClient(httpClient, "foo", 25); Assert.AreEqual("https://en.wikipedia.org/w/api.php?format=json&maxlag=25", client.BaseUrl); }
public void HasUserAgent() { var httpClient = Mock.Of<IHttpClient>(); var client = new WikimediaClient(httpClient, "foo", 5); Assert.AreEqual("foo", client.UserAgent); }
public void HasHttpClient() { var httpClient = Mock.Of<IHttpClient>(); var client = new WikimediaClient(httpClient, "foo", 5); Assert.AreSame(httpClient, client.HttpClient); }
public async Task RetriesHttpQueryWhileHasMaxLagError() { var httpClient = new Mock<IHttpClient>(MockBehavior.Strict); var client = new WikimediaClient(httpClient.Object, "FooAgent", 5, minimumDelaySeconds: 0); // To avoid delays during tests var errorResponse = "{'error':{'code':'maxlag','info':'0 seconds lagged'}}"; var successResponse = "{'query':{}}"; // Sets up mock responses in sequence to simulate a maxlag errors followed by a success response the third time around httpClient .SetupSequence(c => c.Get(client.BaseQueryUrl + "&foo=bar", "FooAgent")) .Returns(Task.FromResult(errorResponse)) .Returns(Task.FromResult(errorResponse)) .Returns(Task.FromResult(successResponse)); var response = await client.Query("foo=bar"); Assert.NotNull(response); }
public async Task QueriesMaxNumberOfRandomArticles() { var httpClient = new Mock<IHttpClient>(MockBehavior.Strict); var client = new WikimediaClient(httpClient.Object, "Foo", 5); var expectedUrl = client.BaseQueryUrl + "&generator=random&grnnamespace=0&grnlimit=max"; httpClient.Setup(c => c.Get(expectedUrl, "Foo")).ReturnsAsync("{'query':{}}"); var response = await client.GetMaxRandomArticles(); Assert.NotNull(response); }
public async Task ReturnsQueryResponsesWithPageData() { var httpClient = new Mock<IHttpClient>(MockBehavior.Strict); var client = new WikimediaClient(httpClient.Object, "Foo", 5); httpClient.Setup(c => c.Get(client.BaseQueryUrl + "&foo=bar", "Foo")).ReturnsAsync("{'query':{'pages':{'a':{'title':'Hello world'}}}}"); var response = await client.Query("foo=bar"); Assert.AreEqual("Hello world", response.Pages["a"].Title); }