/// <summary> /// Stop this Crawler and dispose of its resources. /// </summary> public void Dispose() { if (_lock == null) { return; } if (Running) { OnStop = new ActionSet(Dispose); _abort = true; _nextUrlSet = false; } _lock.Take(); _timer.Run = false; _time.Stop(); Connecting = false; Processing = false; _request = null; _response = null; _parseUrl = null; _ascii = null; _utf8 = null; _lock.Release(); _lock = null; }
/// <summary> /// Assign an extractor to the crawler. /// </summary> public void SetParsers(ParseUrl parseUrl, Parse parseAll) { _lock.Take(); // abort if running to refresh parser _abort |= Running; _parseUrl = parseUrl; _parseAll = parseAll; _parseAllSet = _parseAll != null; _lock.Release(); }
/// <summary> /// Create the extraction structure for the current crawling session. /// </summary> private Teple <ParseUrl, Parse> GetParsers(Crawler crawler) { // have the extractors been created? if (ExtractorUrl == null) { // no, create an extractor of urls ExtractorUrl = new ExtractUrl(null, new ArrayRig <Protocol>(Protocols)); } ParseUrl parseUrl = (ParseUrl)ExtractorUrl.GetParser(); parseUrl.OnUrl = new ActionSet <string, Crawler>(OnUrl, null, crawler); Parse parseElse = _session.PageExtractor == null ? null : _session.PageExtractor.GetParser(); return(new Teple <ParseUrl, Parse>(parseUrl, parseElse)); }
public async Task <List <Stock> > PostStocks([FromBody] StockUrl urlParts) { var fullUrl = ParseUrl.Parse(urlParts); return(await _stocks.CallStockApi(fullUrl, urlParts)); }