public GrabCompleteEventArgs(GrabbedJob job) { this.Job = job; }
private void tmr_Elapsed(object sender, System.Timers.ElapsedEventArgs e) { if(ScheduledGrab.GrabSchedule==Constants.GrabSchedules.OneTime) tmr.Stop(); BackgroundWorker worker = new BackgroundWorker(); worker.DoWork += (s, dwe) => { if (string.IsNullOrEmpty(Thread.CurrentThread.Name)) Thread.CurrentThread.Name = string.Format("Grab Job #{0} - Thread #{1}" , ScheduledGrab.GrabID, Thread.CurrentThread.ManagedThreadId); Logger.Log(string.Format("Grabbing Job #{0} on thread {1}" , ScheduledGrab.GrabID, Thread.CurrentThread.ManagedThreadId)); GrabbedJob grabbedJob = new GrabbedJob(this, null); //bool finished = false; //while (!finished) { GrabEventArgs grapParams = ScheduledGrab.GrabParams; Console.WriteLine(Thread.CurrentThread.ManagedThreadId + ":" + grabbedJob.WaitIndex); if(!string.IsNullOrWhiteSpace(grapParams.Url.Url)) { try { GrabResponse response = new GrabResponse(); switch (ScheduledGrab.GrabMode) { case Constants.GrabModes.Regex: response.RawResponse = GetStringContents(ScheduledGrab.GrabSource, grapParams.Url.Url); int numExtracted = 0; MatchCollection matches = grapParams.GrabExpression.Matches(response.RawResponse); foreach(Match match in matches) { if(match.Success) { List<string> captures = new List<string>(); for(int i=1;i<match.Groups.Count;i++) { Group matchGroup=match.Groups[i]; foreach(Capture capture in matchGroup.Captures) { captures.Add(capture.Value); } } response.ParsedResponse.Add(captures); numExtracted++; } } grabbedJob.Response = response; grabbedJob.Result = Constants.GrabResult.Success; Logger.Log(string.Format("Extracted {0} matches from {1}", numExtracted, grapParams.Url.Url)); break; case Constants.GrabModes.Scrape: response.BinaryResponse = GetBinaryContents(ScheduledGrab.GrabSource, grapParams.Url.Url); Logger.Log(string.Format("Extracted page content from {0} - {1} bytes", grapParams.Url.Url, response.BinaryResponse.Length)); break; } } catch(Exception ex) { grabbedJob.Result = Constants.GrabResult.Fail; Logger.Log(string.Format("Error reading matches from {0}", grapParams.Url.Url) + ": " + ex.Message, true); } } Console.WriteLine(Thread.CurrentThread.ManagedThreadId+":"+grabbedJob.WaitIndex); dwe.Result = new GrabCompleteEventArgs(grabbedJob); //} }; worker.RunWorkerCompleted += (s, rwe) => { IsBusy = false; if (rwe.Error != null) { Logger.Log("Error grabbing URLs" + ": " + rwe.Error.Message, true); if (GrabFailed != null) GrabFailed(this, EventArgs.Empty); } else { GrabCompleteEventArgs args = rwe.Result as GrabCompleteEventArgs; if (args != null) { if (GrabComplete != null) GrabComplete(this, args); Logger.Log(string.Format("Grabbing of job #{0} ({1}) complete", ScheduledGrab.GrabID, ScheduledGrab.Name)); } } }; IsBusy = true; worker.RunWorkerAsync(); }