public async Task <bool> CrawlLinkManager() { if (!IsInProccess(CrawlLinkIsInProccessCacheKey)) { try { SetScheduleInProccess(SchedulerStat.inProccess, CrawlLinkIsInProccessCacheKey); Repository.ContentManagerRepository ContentManagerRepository = new Repository.ContentManagerRepository(); Repository.BaseContentRepository BaseContentRepository = new Repository.BaseContentRepository(); var SuccessList = new List <Models.ContentManager.Site>(); var BaseContentList = new List <Models.BaseContent.BaseContent>(); var deque = await ContentManagerRepository.DequeSite(10); foreach (var item in deque) { var rssModel = (await ContentManagerRepository.GetSiteAllRss(item._id)).FirstOrDefault(q => string.IsNullOrEmpty(q.url)); var res = await CrawlerLink(item); if (res != null) { SuccessList.Add(item); foreach (var rss in res) { BaseContentList.Add(new Models.BaseContent.BaseContent() { dateticks = rss.dateticks, description = rss.description, insertdateticks = DateTime.Now.Ticks, rssid = rssModel != null ? rssModel._id: "", title = rss.title, url = rss.url, userid = item.userid, bycrawled = true }); } } } //TODO: Resolve concarrency problem insert repeated if waite for preve task var addRes = await BaseContentRepository.Add(BaseContentList); var changeRes = await ContentManagerRepository.ChangeLastCarawlDateSite(SuccessList); // Cache Url on Is Repeated Url If not repeated ... //var AddToRedisRes = await BaseContentRepository.AddRssURlInRedis(BaseContentList); SetScheduleInProccess(SchedulerStat.idle, CrawlLinkIsInProccessCacheKey); return(true); } catch (Exception ex) { SetScheduleInProccess(SchedulerStat.idle, CrawlLinkIsInProccessCacheKey); return(false); } } return(false); }