public void ParseUrl(int id) { var job = ParserJobsRepository.GetById(id); var document = HtmlService.Load(job.Url); job.Count = HtmlService.CountText(document, job.Pattern); job.Status = ParserJobStatus.Processing; ParserJobsRepository.SaveChanges(); var childUrls = HtmlService.GetUrls(document); var jobBatchId = StartBatchJobs(id, childUrls, job.IsCritical, job.Pattern, 1, job.Depth); if (job.Depth == 1) { BatchJobClient.AwaitBatch(jobBatchId, x => { if (job.IsCritical) { x.Enqueue(() => SetCriticalJobStatus(job.Id, ParserJobStatus.Completed)); } else { x.Enqueue(() => SetJobStatus(job.Id, ParserJobStatus.Completed)); } }); } }
public void ParseChildUrl(int id, string url, bool isCritical, string pattern, int depth, int maxDepth) { var job = ParserJobsRepository.GetById(id); if (maxDepth <= depth) { return; } var document = HtmlService.Load(url); var count = HtmlService.CountText(document, pattern); ParserJobsRepository.UpdateCount(job, count); var childUrls = HtmlService.GetUrls(document); var jobBatchId = StartBatchJobs(id, childUrls, isCritical, pattern, depth + 1, maxDepth); if (depth + 1 > maxDepth) { BatchJobClient.AwaitBatch(jobBatchId, x => { if (isCritical) { x.Enqueue(() => SetCriticalJobStatus(id, ParserJobStatus.Completed)); } else { x.Enqueue(() => SetJobStatus(id, ParserJobStatus.Completed)); } }); } }