private void HandleBadJob(IThreadedWebClientJob job, int threadIndex) { if (!this.badJobs.ContainsKey(job)) { this.badJobs.GetOrAdd(job, 0); } this.badJobs[job]++; if (this.badJobs[job] < this.badJobRetry) { logger.Debug("Downloader {0} is adding job {1} back to the queue. {2} of {3} retires left", threadIndex, job, this.badJobs[job], this.badJobRetry - 1); this.Enqueue(job); } else { logger.Debug("Downloader {0}'s job {1} is out of retys.", threadIndex, job); // The jobs have failed multible times, inform and clean up int tryInt; this.badJobs.TryRemove(job, out tryInt); if (this.DownloaderBadJob != null) { this.DownloaderBadJob(this, threadIndex, job); } } }
public static List <IThreadedWebClientJob> Return(this IThreadedWebClientJob self, IThreadedWebClientJob job) { return(new List <IThreadedWebClientJob>() { job }); }
private void OnDownloaderThreadJobChanged(object sender, int threadId, IThreadedWebClientJob job) { if (this.DownloaderThreadJobChanged != null) { this.DownloaderThreadJobChanged(sender, threadId, job); } }
private void OnDownloaderThreadJobChanged(object sender, int threadId, IThreadedWebClientJob job) { if (job == null) { SetDownloadThreadStatus(threadId, "Inactive"); return; } SetDownloadThreadStatus(threadId, job.ToString()); }
private void OnWorkerThreadStatusChanged(object sender, int threadId, IThreadedWebClientJob job) { if (job == null) { SetWorkerThreadStatus(threadId, "Inactive"); } else { SetWorkerThreadStatus(threadId, "Running: " + job); } }
public async void TestJob(IThreadedWebClientJob initialJob) { Stack <IThreadedWebClientJob> queue = new Stack <IThreadedWebClientJob>(); queue.Push(initialJob); while (queue.Count > 0) { IThreadedWebClientJob job = queue.Pop(); using (HttpClient client = new HttpClient(new HttpClientHandler() { AutomaticDecompression = DecompressionMethods.GZip | DecompressionMethods.Deflate })) { // Add default headers to the client to simulate // a real browser ScraperHelper.AddHeadersToClient(client); try { CancellationTokenSource cancelToken = new CancellationTokenSource(); cancelToken.CancelAfter(new TimeSpan(0, 1, 0, 00)); await job.ExecuteDownload(client, cancelToken.Token); List <IThreadedWebClientJob> newJobs; try { newJobs = job.Execute(); foreach (var t in newJobs) { queue.Push(t); } } catch (Exception exp) { job.FailedExecute(exp); } } // WebException may be a proxy error catch (WebException exp) { throw; } // Uncaught error catch (Exception exp) { job.FailedDownload(exp); } } } }
private void Enqueue(IThreadedWebClientJob job) { if (this.useDepthFirst) { this.JobsQueueStack.Push(job); } else { this.jobsQueue.Enqueue(job); } if (this.JobInQueueChanged != null) { int queue = this.useDepthFirst ? this.JobsQueueStack.Count : this.jobsQueue.Count; this.JobInQueueChanged(this, queue); } }
private void OnWorkerThreadJobDone(object sender, int threadId, IThreadedWebClientJob job) { SetWorkerThreadStatus(threadId, "Finished: " + job); SetWorkerJobDone(); }
private void OnWorkerThreadError(object sender, int threadId, IThreadedWebClientJob job, Exception exp) { SetWorkerThreadStatus(threadId, exp.Message); }
/// <summary> /// Add a job to be executed /// </summary> public void AddJob(IThreadedWebClientJob job) { logger.Trace("Adding download job {0}", job); this.Enqueue(job); }
public static List <IThreadedWebClientJob> ReturnNothing(this IThreadedWebClientJob self) { return(new List <IThreadedWebClientJob>()); }
public void AddJob(IThreadedWebClientJob Job) { this.client.AddJob(Job); }