private List <FeedModel> GetFeedJobs(string baseUrl, string proxyUrl, FeedNode node) { Logger.GetLogger(baseUrl).Info("start get feed"); try { if (NodeConfigurationSection.Standalone) { var paging = new Paging(); paging.CurrentPage = _page; paging.PageSize = 50; var feeds = FeedLiteDb.GetAvailableFeeds(paging); if (_page > 0 && feeds.Count == 0) { _page = 0; paging.CurrentPage = _page; feeds = FeedLiteDb.GetAvailableFeeds(paging); } _page++; Logger.GetLogger(baseUrl).Info("get feed jobs:" + feeds.Count); return(feeds); } else { var d = node.GetData("/config/feed/" + node.BaseUrl); var config = JsonConvert.DeserializeObject <NodeConfig>(d.Data); var pages = config.Pages == null ? "" : string.Join(",", config.Pages); var client = new RestClient("http://" + proxyUrl); var restRequest = new RestRequest("api/feed/job?pages=" + pages); restRequest.Method = Method.GET; var restResponse = client.Execute(restRequest); var feeds = JsonConvert.DeserializeObject <List <FeedModel> >(restResponse.Content); Logger.GetLogger(baseUrl).Info("get feed jobs:" + feeds.Count); return(feeds); } } catch (Exception ex) { Logger.GetLogger(baseUrl).Info("get feed error " + ex.Message); return(new List <FeedModel>()); } }
private List <FeedModel> GetFeedJobs(string proxyUrl, FeedNode node) { try { var d = node.GetData("/config/feed/" + node.BaseUrl); var config = JsonConvert.DeserializeObject <NodeConfig>(d.Data); var pages = config.Pages == null ? "" : string.Join(",", config.Pages); var client = new RestClient("http://" + proxyUrl); var restRequest = new RestRequest("api/feed/job?pages=" + pages); restRequest.Method = Method.GET; var restResponse = client.Execute(restRequest); var feeds = JsonConvert.DeserializeObject <List <FeedModel> >(restResponse.Content); return(feeds); } catch (Exception ex) { return(new List <FeedModel>()); } }
public Task SyncFeed() { return(Task.Run(() => { try { scheduler.Clear(); if (NodeConfigurationSection.Standalone) { var page = 1; var paging = new Paging(); paging.CurrentPage = page; paging.PageSize = 5000; var feeds = FeedLiteDb.GetAvailableFeeds(paging); while (feeds.Count != 0) { foreach (var feed in feeds) { AddJob(feed); } Logger.GetLogger(baseUrl).Info(baseUrl + " sync feed and add feed jobs:" + feeds.Count); paging.CurrentPage = ++page; feeds = FeedLiteDb.GetAvailableFeeds(paging); } } else { var data = feedNode.GetData("/config/feed/" + baseUrl); var config = JsonConvert.DeserializeObject <NodeConfig>(data.Data); if (config.Pages == null || config.Pages.Length == 0) { return; } feedPages = config.Pages; var feedsResponse = NodeVisitor.Feeder.GetFeedJobs(string.Join(",", config.Pages)); if (string.IsNullOrEmpty(feedsResponse)) { throw new Exception("feedproxy can't connect"); } var feeds = JsonConvert.DeserializeObject <List <FeedModel> >(feedsResponse); foreach (var feed in feeds) { AddJob(feed); } Logger.GetLogger(baseUrl).Info(baseUrl + " sync feed and add feed jobs:" + feeds.Count); } } catch (Exception ex) { Logger.GetLogger(baseUrl).Error(baseUrl + " sync feed error " + ex.Message); } })); }