public void RunAsync(IpcRequest request, Action <IpcResponse> callback) { CreateServerProcessAsync().ContinueWith(t => { // Register callback so that we can reply with error if needed _callbacks.Add(request, callback); if (t.Exception != null) { // Skip the "AggregateException" var error = t.Exception.InnerExceptions.Count == 1 ? t.Exception.InnerExceptions[0] : t.Exception; // Reply error to callback (this will also fire general "server is down" event) HandleSendRequestError(request, error); } else { // The queue is guaranteed to be started at this point, so enqueue the request // so it is sent to the server _requestQueue.Enqueue(request); } }, new CancellationToken(), TaskContinuationOptions.ExecuteSynchronously, // Make sure to run on thread pool even if called from a UI thread TaskScheduler.Default); }
public void RunAsync(IpcRequest request, Action <IpcResponse> callback) { CreateServerProcess(); // Order is important below to avoid race conditions! _callbacks.Add(request, callback); _requestQueue.Enqueue(request); }
private Guid EnqueueRequest(string methodName, OfflineBehavior behavior, params object[] arguments) { Request request = CreateRequest(methodName, behavior, arguments); requestQueue.Enqueue(request); return(request.RequestId); }
private void EnqueueNewRequest(IRequestQueue requestQueue, string endpoint, string tag, int stamps) { Request request = new Request(); request.Endpoint = endpoint; request.Behavior.Tag = tag; request.Behavior.Stamps = stamps; requestQueue.Enqueue(request); }
private async Task RqTestDefaultWorkflow(IRequestQueue rq, long crawlId) { TestContext.WriteLine($"CrawlId = {crawlId}"); var sw = new Stopwatch(); sw.Start(); // Enqueue 3 Urls await rq.EnqueueAsync(crawlId, new[] { "j1", "j2", "j3" }); //Dequeue and finish 1 await foreach (var d in rq.DequeueAsync(crawlId, 1, DateTimeOffset.UtcNow.AddSeconds(10))) { TestContext.WriteLine($"[{crawlId}] rocessing... {d}"); await Task.Delay(200); await rq.DeleteAsync(crawlId, d); } // Dequeue 1 and fail to finish in time await foreach (var d in rq.DequeueAsync(crawlId, 1, DateTimeOffset.UtcNow.AddSeconds(-1))) { TestContext.WriteLine($"[{crawlId}] Failing... {d}"); await Task.Delay(200); } // Enqueue 2 more rq.Enqueue(crawlId, new[] { "j4", "j5" }); // Dequeue up to 10 var d1 = rq.Dequeue(crawlId, 10, DateTimeOffset.UtcNow.AddSeconds(10)); foreach (var d in d1) { TestContext.WriteLine($"[{crawlId}] Processing... {d}"); } rq.Delete(crawlId, d1); // that should hav been 3 from the queue and 1 from the failed jobs Assert.AreEqual(4, d1.Count); // Queue should be empty var d2 = rq.Dequeue(crawlId, 10, DateTimeOffset.UtcNow.AddSeconds(10)); Assert.AreEqual(0, d2.Count, message: $"[{crawlId}] Unexpected: {string.Join(",", d2)}"); sw.Stop(); TestContext.WriteLine($"[{crawlId}] ElapsedMilliseconds: {sw.ElapsedMilliseconds}"); }
public bool Push(Request request) { if (request == null) { return(false); } if (!request.DontFilter) { if (_dupeFilter.Seen(request)) { return(false); } } _cacheQueue.Enqueue(request); return(true); }
private async Task <IActionResult> ProcessRequest(RequestQueueItem.ApiEndpoint endpoint, IActionResult result) { var queueItem = new RequestQueueItem(Request, endpoint); var enqueuResponse = _requestQueue.Enqueue(queueItem); if (enqueuResponse != EnqueueResponse.Ok) { var response = StatusCode(TooManyRequestHttpCode, $"Too many requests. {enqueuResponse} occured."); LogMessage(LogLevel.Error, response); return(response); } await Task.Delay(_random.Next(500, 2000)); LogMessage(LogLevel.Information, result); _requestQueue.Dequeu(); return(result); }
public void RunAsync(IpcRequest request, Action <IpcResponse> callback) { CreateServerProcessAsync().ContinueWith(t => { // Register callback so that we can reply with error if needed _callbacks.Add(request, callback); if (t.Exception != null) { // Skip the "AggregateException" var error = t.Exception.InnerExceptions.Count == 1 ? t.Exception.InnerExceptions[0] : t.Exception; // Reply error to callback (this will also fire general "server is down" event) HandleSendRequestError(request, error); } else { // The queue is guaranteed to be started at this point, so enqueue the request // so it is sent to the server _requestQueue.Enqueue(request); } }); }