private void saveFeedItems(FeedSource channel) { RssItemList posts = channel.RssChannel.RssItems; for (int i = posts.Count - 1; i >= 0; i--) { RssItem post = posts[i]; FeedEntry entry = new FeedEntry(); entry.FeedSource = channel; entry.Author = post.Author; entry.Category = post.Category; entry.Description = post.Description; entry.Link = post.Link; entry.PubDate = post.PubDate; if (entry.PubDate == null || entry.PubDate < new DateTime(1900, 1, 1)) { entry.PubDate = DateTime.Now; } entry.Title = post.Title; entry.Created = DateTime.Now; FeedEntry savedEntry = entryService.GetByLink(entry.Link); if (savedEntry == null) { db.insert(entry); } } }
public override void DataBind() { try { RssItemList rssData = new RssItemList(); RssDocument rssDoc = null; // Get the rssData if (Feed != null) { try { // get the proper dataSource (based on if the DataSource is a URL, file path, XmlReader, etc.) RssEngine rssEng = new RssEngine(); // Get the RssDocument - either from cache or not... if (DataSource is string) { // See if we have a cached version object cachedRssDoc = HttpContext.Current.Cache.Get(string.Concat("RssFeed||", this.Feed)); if (cachedRssDoc != null) rssDoc = (RssDocument)cachedRssDoc; else { // get the DataSource and cache it bool isPending = false; lock (pendingRequests) { isPending = pendingRequests.ContainsKey((string)DataSource); if (!isPending) { pendingRequests.Add((string)DataSource, DateTime.Now); } else { if (pendingRequests[(string)DataSource] < DateTime.Now.AddMinutes(-5)) { pendingRequests.Remove((string)DataSource); } } } if (!isPending) { try { rssDoc = rssEng.GetDataSource(DataSource); HttpContext.Current.Cache.Insert(string.Concat("RssFeed||", Feed), rssDoc, null, DateTime.Now.AddMinutes(5), TimeSpan.Zero); } finally { pendingRequests.Remove((string)DataSource); } } } } else { } // We are NOT using caching, so grab the DataSource //rssDoc = rssEng.GetDataSource(DataSource); if (rssDoc == null) return; for (int i = 0; i < rssDoc.Items.Count && (this.MaxItems > 0 && i < this.MaxItems); i++) { rssData.Add(rssDoc.Items[i]); } } catch (XmlException) { // whoops, there was a problem parsing the data. //isValidXml = false; } } this.DataSource = rssData; } catch (Exception ex) { //log.Error(string.Format("RSS control error for {0}", Url), ex); return; } base.DataBind(); }
public override void Check(SetProgressDelegate prog, int startpct, int totPct) { int c = ActionList.Count + 2; int n = 1; prog.Invoke(startpct); // ReSharper disable once InconsistentNaming RssItemList RSSList = new RssItemList(); foreach (string s in TVSettings.Instance.RSSURLs) { RSSList.DownloadRSS(s, TVSettings.Instance.FNPRegexs); } ItemList newItems = new ItemList(); ItemList toRemove = new ItemList(); foreach (ItemMissing action in ActionList.MissingItems()) { if (ActionCancel) { return; } prog.Invoke(startpct + ((totPct - startpct) * (++n) / (c))); ProcessedEpisode pe = action.Episode; string simpleShowName = Helpers.SimplifyName(pe.Show.ShowName); string simpleSeriesName = Helpers.SimplifyName(pe.TheSeries.Name); foreach (RSSItem rss in RSSList) { if ( !FileHelper.SimplifyAndCheckFilename(rss.ShowName, simpleShowName, true, false) && !( string.IsNullOrEmpty(rss.ShowName) && FileHelper.SimplifyAndCheckFilename(rss.Title, simpleSeriesName, true, false) ) ) { continue; } if (rss.Season != pe.AppropriateSeasonNumber) { continue; } if (rss.Episode != pe.AppropriateEpNum) { continue; } LOGGER.Info($"Adding {rss.URL} as it appears to be match for {action.Episode.Show.ShowName} S{action.Episode.AppropriateSeasonNumber}E{action.Episode.AppropriateEpNum}"); newItems.Add(new ActionTDownload(rss, action.TheFileNoExt, pe)); toRemove.Add(action); } } foreach (ActionTDownload x in FindDuplicates(newItems)) { newItems.Remove(x); } foreach (Item i in toRemove) { ActionList.Remove(i); } foreach (Item action in newItems) { ActionList.Add(action); } prog.Invoke(totPct); }