public void CrawlAsync(ICrawlDaddy crawl) { // Create an AsyncOperation for taskId. _asyncOp = AsyncOperationManager.CreateOperation(crawl.CrawlerId); crawl.DomainCrawlStarted += crawl_DomainCrawlStarted; crawl.DomainCrawlEnded += crawl_DomainCrawlEnded; crawl.LinkCrawlCompleted += crawl_LinkCrawlCompleted; crawl.ExternalLinksFound += crawl_ExternalLinksFound; // Start the asynchronous operation. WorkerEventHandler workerDelegate = new WorkerEventHandler(CrawlWorker); workerDelegate.BeginInvoke(crawl, _asyncOp, null, null); }
public void InitializeCrawl(ICrawlDaddy crawl) { _wrapper = new CrawlDaddyAsyncWrapper(); _wrapper.ProgressChanged += _wrapper_ProgressChanged; _wrapper.CrawlCompleted += _wrapper_CrawlCompleted; _externalLinks = new List <string>(); _linksCrawled = new List <string>(); Reset(); _crawl = crawl; lblCrawlerId.Text = _crawl.CrawlerId.ToString(); txtSeedUrl.Text = _crawl.Seed.AbsoluteUri; lblBaseDomain.Text = _crawl.BaseDomain; }
public void InitializeCrawl(ICrawlDaddy crawl) { _externalLinks = new List<string>(); _linksCrawled = new List<string>(); Reset(); _crawl = crawl; _crawl.DomainCrawlStarted += _crawl_DomainCrawlStarted; _crawl.DomainCrawlEnded += _crawl_DomainCrawlEnded; _crawl.ExternalLinksFound += _crawl_ExternalLinksFound; _crawl.LinkCrawlCompleted += _crawl_LinkCrawlCompleted; lblCrawlerId.Text = _crawl.CrawlerId.ToString(); txtSeedUrl.Text = _crawl.Seed.AbsoluteUri; lblBaseDomain.Text = _crawl.BaseDomain; }
public void InitializeCrawl(ICrawlDaddy crawl) { _externalLinks = new List <string>(); _linksCrawled = new List <string>(); Reset(); _crawl = crawl; _crawl.DomainCrawlStarted += _crawl_DomainCrawlStarted; _crawl.DomainCrawlEnded += _crawl_DomainCrawlEnded; _crawl.ExternalLinksFound += _crawl_ExternalLinksFound; _crawl.LinkCrawlCompleted += _crawl_LinkCrawlCompleted; lblCrawlerId.Text = _crawl.CrawlerId.ToString(); txtSeedUrl.Text = _crawl.Seed.AbsoluteUri; lblBaseDomain.Text = _crawl.BaseDomain; }
public void InitializeCrawl(ICrawlDaddy crawl) { _wrapper = new CrawlDaddyAsyncWrapper(); _wrapper.ProgressChanged += _wrapper_ProgressChanged; _wrapper.CrawlCompleted += _wrapper_CrawlCompleted; _externalLinks = new List<string>(); _linksCrawled = new List<string>(); Reset(); _crawl = crawl; lblCrawlerId.Text = _crawl.CrawlerId.ToString(); txtSeedUrl.Text = _crawl.Seed.AbsoluteUri; lblBaseDomain.Text = _crawl.BaseDomain; }
private void CrawlEnded(ICrawlDaddy crawl, Exception exception, bool canceled, AsyncOperation asyncOp) { crawl.DomainCrawlStarted -= crawl_DomainCrawlStarted; crawl.DomainCrawlEnded -= crawl_DomainCrawlEnded; crawl.LinkCrawlCompleted -= crawl_LinkCrawlCompleted; crawl.ExternalLinksFound -= crawl_ExternalLinksFound; // Package the results of the operation in a // CrawlDaddyCompletedEventArgs. var e = new CrawlDaddyCompletedEventArgs("DONE", exception, canceled, asyncOp.UserSuppliedState); // End the task. The asyncOp object is responsible // for marshaling the call. asyncOp.PostOperationCompleted(_onCompletedDelegate, e); // Note that after the call to OperationCompleted, // asyncOp is no longer usable, and any attempt to use it // will cause an exception to be thrown. }
private void CrawlWorker(ICrawlDaddy crawl, AsyncOperation asyncOp) { ProgressChangedEventArgs args = null; Exception e = null; // Check that the task is still active. // The operation may have been canceled before // the thread was scheduled. if (!_cancelPending) { try { crawl.StartCrawl(); } catch (Exception ex) { e = ex; } } CrawlEnded(crawl, e, _cancelPending, asyncOp); }