/// <summary> /// Get a RSS Info by Guid proprty /// </summary> /// <param name="rss">the rss you want to find</param> /// <returns>Task Rss with its info from db</returns> public static async Task <RSS> GetFeedAsync(RSS rss) { return(await Task.Run(() => { using var db = new LiteDatabase(LiteDbContext.ConnectionString); var col = db.GetCollection <RSS>(LiteDbContext.RSSs); return col.FindOne(x => x.Guid == rss.Guid); })); }
// GET: RSS public ActionResult RSSContent() { RSS RSSService = new RSS(); NewsView StockNews = new NewsView(); StockNews = RSSService.RSSNews(); return(PartialView(StockNews)); }
protected void Page_Load(object sender, EventArgs e) { var objRSS = new RSS(); objRSS.ExecuteUrlRead(); System.Xml.XmlNodeList rssItems = objRSS.Document.SelectNodes("rss/channel/item"); string title = ""; string link = ""; string description = ""; objRSS.LstRSS = new List <RSSItem>(); for (int i = 0; i < rssItems.Count; i++) { System.Xml.XmlNode rssDetail; rssDetail = rssItems.Item(i).SelectSingleNode("title"); if (rssDetail != null) { title = rssDetail.InnerText; } else { title = ""; } rssDetail = rssItems.Item(i).SelectSingleNode("link"); if (rssDetail != null) { link = rssDetail.InnerText; } else { link = ""; } rssDetail = rssItems.Item(i).SelectSingleNode("description"); if (rssDetail != null) { description = rssDetail.InnerText; } else { description = ""; } objRSS.LstRSS.Add(new RSSItem { Title = title, Description = description, Link = link }); } rptRSS.DataSource = objRSS.LstRSS; rptRSS.DataBind(); }
/// <summary> /// Add a new RSS to the database /// </summary> /// <param name="rss">the new RSS you want to add</param> /// <returns>Task The New RSS with the id from the database</returns> public static async Task <RSS> AddNewFeedAsync(RSS rss) { return(await Task.Run(() => { using var db = new LiteDatabase(LiteDbContext.ConnectionString); var col = db.GetCollection <RSS>(LiteDbContext.RSSs); col.Insert(rss); return rss.Id > 0 ? rss : null; })); }
//метод для добавления ленты ленты public HttpResponseMessage AddRSS([FromBody] RSS rss) { if (!ModelState.IsValid) //валидация { return(Request.CreateResponse(HttpStatusCode.BadRequest, ModelState)); } rsscontext.RSST.Add(rss); rsscontext.SaveChanges(); return(Request.CreateResponse(HttpStatusCode.Created, rss)); }
protected override void Seed(NewsModel context) { var rss1 = new RSS("Sport news", "http://fakty.ua/rss_feed/sport"); var rss2 = new RSS("Society news", " http://fakty.ua/rss_feed/society"); var collection = new FeedCollection(); var feeds = new List <Feed>(); feeds.Add(rss1); feeds.Add(rss2); collection.Feeds = feeds; context.FeedCollections.Add(collection); context.SaveChanges(); }
/// <summary> /// Extracts and creates a list of feed items from RSS/ATOM URLs. /// </summary> /// <param name="feedUriList">List of URLs to extract feed items from.</param> /// <param name="feedItemCount">Number of feed items to be included from each source RSS/ATOM URL.</param> /// <param name="feedSummaryCharLength">Number of characters to be included in the feed summary.</param> /// <param name="stripHtmlTags">Strips the HTML tags out of the feed summary.</param> /// <returns>List of Feed items.</returns> public static List<RSS> BuildFeed(List<string> feedUriList, int feedItemCount, int feedSummaryCharLength, bool stripHtmlTags) { List<RSS> feedItemsList = new List<RSS>(); // From each feed URL, collect a number of feeds, then sort the feed list by publishing date. foreach (string feedUri in feedUriList) { SyndicationFeed syndicationFeed = SyndicationFeed.Load(XmlReader.Create(feedUri)); List<RSS> tempFeedList = new List<RSS>(); foreach (SyndicationItem syndicationItem in syndicationFeed.Items) { RSS feedItem = new RSS(); feedItem.FeedSource = syndicationFeed.Title.Text; feedItem.FeedPublishDateTime = syndicationItem.PublishDate; feedItem.FeedTitle = syndicationItem.Title.Text; feedItem.FeedUrl = syndicationItem.Id; feedItem.FeedSummary = syndicationItem.Summary.Text; tempFeedList.Add(feedItem); } tempFeedList = (from feedItem in tempFeedList orderby feedItem.FeedPublishDateTime descending select feedItem).ToList(); for (int i = 0; i < feedItemCount; i++) { if (i < tempFeedList.Count) { feedItemsList.Add(tempFeedList[i]); } } tempFeedList = null; } feedItemsList = (from feedItem in feedItemsList orderby feedItem.FeedPublishDateTime descending select feedItem).ToList(); for (int i = 0; i < feedItemsList.Count; i++) { if (stripHtmlTags) { feedItemsList[i].FeedSummary = HtmlFilterHelper.Filter(feedItemsList[i].FeedSummary, new string[] { }); } feedItemsList[i].FeedSummary = feedItemsList[i].FeedSummary.Substring(0, feedSummaryCharLength) + "..."; } return feedItemsList; }
/// <summary> /// RSSを取得 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private async void BtnGet_Click(object sender, EventArgs e) { var url = "https://blogs.windows.com/feed/"; var rsscl = new RSSClient(); var xml = await rsscl.Open(url); var rss = RSS.Load(xml); _rss = rss; var arr = new ArrayAdapter <RSSItem>(this, Android.Resource.Layout.SimpleListItem1, rss.channel.Items); lv.Adapter = arr; lv.ItemClick += Lv_ItemClick; }
public HttpResponseMessage Get(int id) { RSS rssValue = rsscontext.RSST.Find(id); XDocument feedXml = XDocument.Load(rssValue.Link); var feeds = from feed in feedXml.Descendants("item") select new RSS { Title = feed.Element("title").Value, Link = feed.Element("link").Value, Description = Regex.Match(feed.Element("description").Value, @"^.{1,180}\b(?<!\s)").Value }; return(Request.CreateResponse(HttpStatusCode.OK, feeds)); }
//метод для удаления ленты public HttpResponseMessage DeleteRSS(int id) { RSS rss = rsscontext.RSST.Find(id); if (rss != null) { rsscontext.RSST.Remove(rss); rsscontext.SaveChanges(); return(Request.CreateResponse(HttpStatusCode.OK, rss)); } else { return(Request.CreateResponse(HttpStatusCode.NotFound)); } }
public HttpResponseMessage GetRSS(int id) { RSS rss = rsscontext.RSST.Find(id); if (rss != null) { return(Request.CreateResponse(HttpStatusCode.OK, rss)); } else { return(Request.CreateResponse(HttpStatusCode.NotFound, "Лента не Найдена")); } }
private void WriteConfig(RSS content, ConverterOptions options) { var config = new Config { Name = content.Channel.Title, Description = content.Channel.Description, Url = content.Channel.Link }; string fileName = Path.Combine(options.OutputDirectory, "config.yaml"); var yaml = _yamlSerializer.Serialize(config); File.WriteAllText(fileName, yaml, Encoding.UTF8); _logger.LogInformation($"Written '{fileName}'."); }
//метод для редактирования ленты ленты public HttpResponseMessage EditRSS(int id, [FromBody] RSS rss) { if (id == rss.Id) { if (!ModelState.IsValid) //валидация { return(Request.CreateResponse(HttpStatusCode.BadRequest, ModelState)); } rsscontext.Entry(rss).State = EntityState.Modified; rsscontext.SaveChanges(); return(Request.CreateResponse(HttpStatusCode.OK, rss)); } else { return(Request.CreateResponse(HttpStatusCode.BadRequest, "Лента не найдена!")); } }
private void WriteComments(RSS content, ConverterOptions options) { var posts = content.Channel.Items.Where(item => item.PostType == "post" || item.PostType == "page").ToList(); var staticmanComments = posts.SelectMany(post => _mapper.Map <IEnumerable <HugoModels.Comment> >(post)); var knownCommenters = new HashSet <string>(); var commentsDirectory = Directory.CreateDirectory(Path.Combine(options.OutputDirectory, "data\\comments")); foreach (var comment in staticmanComments) { string knownCommenter = $"{comment.Metadata.Email},{comment.Metadata.Name.ToLowerInvariant()}"; if (!knownCommenters.Contains(knownCommenter)) { knownCommenters.Add(knownCommenter); } string directory = Path.GetDirectoryName(comment.FileName); string fileName; if (string.IsNullOrEmpty(directory)) { fileName = Path.Combine(commentsDirectory.FullName, comment.FileName); } else { Directory.CreateDirectory(Path.Combine(commentsDirectory.FullName, directory)); fileName = Path.Combine(commentsDirectory.FullName, comment.FileName); } var yaml = _yamlSerializer.Serialize(comment.Metadata); File.WriteAllText(fileName, yaml, Encoding.UTF8); _logger.LogInformation($"Written '{fileName}'."); } string filename = Path.Combine(commentsDirectory.FullName, "known-commenters.csv"); StringBuilder knownCommentersCsvContent = new StringBuilder(); foreach (var knownCommenter in knownCommenters) { knownCommentersCsvContent.AppendLine(knownCommenter); } File.WriteAllText(filename, knownCommentersCsvContent.ToString(), Encoding.UTF8); }
public void AddRSSFeedTest() { var mockFeedCollRepository = Substitute.For <IGenericRepository <FeedCollection> >(); var feed = new RSS() { Id = 1, Name = "Zero", RssUrl = "123" }; var listoffeeds = new List <Feed>() { feed }; FeedCollection coll = new FeedCollection() { Feeds = listoffeeds, Id = 0 }; mockFeedCollRepository.Get().Returns(coll); editSubSys.AddRSSFeed(0, "test", "www.test"); }
private void WritePosts(RSS content, ConverterOptions options) { var posts = content.Channel.Items.Where(item => item.PostType == "post" || item.PostType == "page").ToList(); _logger.LogInformation($"Found {posts.Count} posts/pages."); var attachments = content.Channel.Items.Where(item => item.PostType == "attachment").ToDictionary(key => key.PostId); var hugoPosts = posts.Select(post => _mapper.Map <Post>(post, opts => { opts.Items[ConverterLibraryAutoMapperProfile.ItemNameAttachments] = attachments; opts.Items[ConverterLibraryAutoMapperProfile.ItemNameSiteUrl] = content.Channel.Link; opts.Items[ConverterLibraryAutoMapperProfile.ItemNamePageResources] = options.PageResources; })); foreach (var hugoPost in hugoPosts) { string outputDirectory = hugoPost.Metadata.Type == "page" ? options.OutputDirectory : Path.Combine(options.OutputDirectory, HugoContentDirectoryName); string postFileName = Path.Combine(outputDirectory, hugoPost.Filename); string postFullDirectory = Path.GetDirectoryName(postFileName); Directory.CreateDirectory(postFullDirectory); string imageBaseUrl = options.PageResources ? null : GetImageBaseUrl(hugoPost, "/uploads"); var replacedImages = _imageReplacer.Replace(hugoPost, content.Channel.Link, imageBaseUrl, options.ImageShortCode); CopyReplacedImagesToOutputDirectory(options, replacedImages, postFullDirectory); var yaml = _yamlSerializer.Serialize(hugoPost.Metadata); StringBuilder hugoYaml = new StringBuilder(); hugoYaml.AppendLine("---"); hugoYaml.AppendLine(yaml); hugoYaml.AppendLine("---"); hugoYaml.AppendLine(hugoPost.Content); File.WriteAllText(postFileName, hugoYaml.ToString(), Encoding.UTF8); _logger.LogInformation($"Written '{postFileName}'."); } }
private void LoadXMLFile() { try { Serializer ser = new Serializer(); string path = string.Empty; string xmlInputData = string.Empty; string xmlOutputData = string.Empty; // EXAMPLE 1 path = Directory.GetCurrentDirectory() + @"\settings.xml"; xmlInputData = File.ReadAllText(path); RSS rss = ser.Deserialize <RSS>(xmlInputData); } catch (Exception ex) { MessageBox.Show(ex.Message); } }
public static List <HeroCard> GetResults(string url, string cat, string loc) { List <HeroCard> CardList = new List <HeroCard>(); RSS rss = new RSS(url); SyndicationFeed Feed = rss.Get(); if (Feed != null) { int cnt = 0; foreach (var item in Feed.Items) { string s1 = item.Summary.Text.ToString(); bool boolean = s1.StartsWith("Category: " + cat + " <br><br>Location: " + loc); if (boolean == true) { cnt = cnt + 1; var heroCard = new HeroCard { Title = item.Title.Text, Buttons = new List <CardAction> { new CardAction(ActionTypes.OpenUrl, "Link", value: item.Links.FirstOrDefault().Uri.ToString()) } }; CardList.Add(heroCard); } } if (cnt == 0) { var heroCard = new HeroCard { Title = "No jobs found for your selection" }; CardList.Add(heroCard); } } return(CardList); }
public void ReadNewsTest() { Arrange(); var presentView = new List <NewsPresent>() { new NewsPresent() { Description = "Desc", Title = "Title", PublicationDate = DateTime.Now } }; var mockFeedCollectionRepository = kernel.Get <IGenericRepository <FeedCollection> >(); uow.FeedCollectionRepository.Returns(mockFeedCollectionRepository); var mockRSS = new RSS() { Id = 1, Name = "Name", RssUrl = "http://fakty.ua/rss_feed/ukraina" }; mockFeedCollectionRepository.Get().Returns(new List <FeedCollection>() { new FeedCollection() { Id = 1, Feeds = new List <Feed>() { mockRSS } } }); var newsPres = new NewsPresent() { Description = "info", Title = "title", PublicationDate = DateTime.Now }; var backList = new List <NewsPresent>() { newsPres }; Assert.AreNotEqual(prss.ReadNews(1).Count, 20); }
/// <summary> /// RSS フィードリストの更新を行う /// </summary> /// <param name="item">サイト情報</param> /// <param name="isListUpdate">ListBoxの表示を更新するか</param> private void UpdateListBox(RssSiteInfo item, Boolean isListUpdate) { String url = item?.Link; if (url == null) { return; } Int32 masterID = item.ID; IEnumerable <FeedItem> feedItems = null; if (!IsOnline()) { // インターネット接続が無いため、強制的にオフラインモードに設定 App.Configure.IsOffLine = true; } using (var db = new SQLite(MASTER_PATH)) { db.Open(); // 更新間隔の確認とOffLineモードオプションを確認する if (CanRSSRead(db, masterID) && !(App.Configure?.IsOffLine ?? false)) { // フィードデータダウンロード feedItems = RSS.ReadFeedItems(url); // ダウンロード時刻アップデート UpdateLastSync(db, masterID); } // リスト・DBの更新 var items = GetFeedItems(db, feedItems, masterID, isListUpdate); if (isListUpdate) { this.FeedList.ItemsSource = items; } } }
/// <summary> /// サイト登録処理 /// </summary> /// <param name="url">webサイトURL</param> /// <param name="masterID">DB上のマスターID</param> /// <param name="title">サイト名</param> /// <returns>更新成功有無</returns> private Boolean InsertRSS(String url, out Int32 masterID, out String title) { masterID = ERROR_RESULT; // RSSを一度取得して有効か確かめる。 title = RSS.ReadFeedTitle(url); if (title == null) { MessageBox.Show("Failed to get information."); return(false); } var isCommit = false; using (var db = new SQLite(MASTER_PATH)) { db.Open(); // DB登録有無確認 if (SiteExists(db, url)) { MessageBox.Show("It is already registered."); } else { try { db.BeginTransaction(); masterID = SiteRegist(db, title, url); isCommit = true; } catch (Exception) { isCommit = false; } finally { db.EndTransaction(isCommit); } } } return(isCommit); }
public async Task ReadAsync(List <RssSource> listSource) { List <RSS> rssList = new List <RSS>(); foreach (var source in listSource) { if (!String.IsNullOrEmpty(source.Url)) { HttpClient httpClient = new HttpClient(); var rssContent = await httpClient.GetStringAsync(source.Url); rssContent = rssContent.Trim('\n'); using (XmlReader reader = XmlReader.Create(new StringReader(rssContent))) { SyndicationFeed feed = SyndicationFeed.Load(reader); Console.WriteLine(feed.Links[0].Uri); foreach (SyndicationItem item in feed.Items) { RSS rssItem = new RSS { Headline = item.Title.Text, Date = item.PublishDate.UtcDateTime.ToLocalTime(), Description = Regex.Replace(item.Summary.Text, "<.*?>", string.Empty).Trim('\n'), Url = item.Links.FirstOrDefault().Uri.ToString(), SourceId = source.Id }; rssList.Add(rssItem); } var newList = AddRange(rssList); Console.WriteLine("Прочитано новостей: " + rssList.Count()); Console.WriteLine("Сохранено новостей: " + newList.Count()); Console.WriteLine(); } } } }
/// <summary> /// ParseRSS gets only RSS items /// </summary> /// <param name="RSS">string RSS</param> /// <returns>items</returns> private IEnumerable <RSS> ParseRSS(string RSS) { RSS = DoRemovespace(RSS); XDocument xdoc = XDocument.Parse(RSS, LoadOptions.PreserveWhitespace); var items = from item in xdoc.Descendants().Elements("item") select item; var resp = new List <RSS>(); foreach (var item in items) { var rss = new RSS { Title = item.Element("title").Value, Description = item.Element("description").Value, LinkToOriginal = item.Element("link").Value, Date = DateTime.Parse(item.Element("pubDate").Value) }; resp.Add(rss); } return(resp); }
public void when_element_is_null() { Assert.Throws <ArgumentNullException>(() => RSS.Build(null)); }
// ------------------------------------------------------------------------------- // ------------------------------------------------------------------------------- /// <summary> /// This method retrieves enclosures of a specific mime type /// </summary> /// <param name="mimeType">Mime Type. NOTE... paramters such as image, video, audio alone are also allowed. </param> /// <param name="ExitAfterOneFound"></param> /// <returns></returns> // ------------------------------------------------------------------------------- // ------------------------------------------------------------------------------- public IEnumerable <RssCoreItemEnclosure> GetEnclosures(string medium = "", string mimeType = "", bool ExitAfterOneFound = false) { List <RssCoreItemEnclosure> rt = new List <RssCoreItemEnclosure>(); StringComparer cmp = StringComparer.OrdinalIgnoreCase; bool TestMedium = false; bool TestMimeType = false; // set the test flags if (medium.Length > 0) { TestMedium = true; } if (mimeType.Length > 0) { TestMimeType = true; } // now loop through each enclosure for (int i = 0; i < enclosures.Count; i++) { bool bRslt = false; if (!bRslt && TestMimeType) { if (TestMimeType) { if (cmp.Compare(mimeType, enclosures[i].type) == 0) { bRslt = true; } } } // if not good and we are testing for the general image, video, or // audio mime types if (!bRslt && TestMedium) { switch (medium) { case RSS.MEDIUM_TYPE_IMAGE: if (RSS.IsImageMimeType(enclosures[i].type)) { bRslt = true; } break; case RSS.MEDIUM_TYPE_VIDEO: if (RSS.IsVideoMimeType(enclosures[i].type)) { bRslt = true; } break; case RSS.MEDIUM_TYPE_AUDIO: if (RSS.IsAudioMimeType(enclosures[i].type)) { bRslt = true; } break; } } // if we are testing the url? if (!bRslt && !TestMimeType) { switch (mimeType) { case RSS.MEDIUM_TYPE_IMAGE: if (RSS.IsImageUrl(enclosures[i].url)) { bRslt = true; } break; case RSS.MEDIUM_TYPE_VIDEO: if (RSS.IsVideoUrl(enclosures[i].url)) { bRslt = true; } break; case RSS.MEDIUM_TYPE_AUDIO: if (RSS.IsAudioUrl(enclosures[i].url)) { bRslt = true; } break; } } // if there were no tests, if (!bRslt && !TestMimeType && !TestMedium) { bRslt = true; } // if enclosure is a goodie if (bRslt) { // add the item to the returned enclosures list rt.Add(enclosures[i]); // now, if we are looking for only the first one, then we need to break if (ExitAfterOneFound) { break; } } } return(rt); }
/// <summary> /// Gets all the feeds from database (with-in limits in settings) /// the try to gets all the new stuff from your sources /// add the new ones to the database if there is any /// then show the latest (with-in limits in settings) /// </summary> /// <param name="progress"></param> /// <param name="token"></param> /// <returns>Task Type</returns> public async Task LoadDataAsync(IProgress <int> progress, CancellationToken token) { IsLoadingData = true; FilterSources.Clear(); Feeds.Clear(); ProgressCurrent = 0; bool hasLoadedFeedNewItems = false; // Shows the user what's new in this version await WhatsNewDisplayService.ShowIfAppropriateAsync(); // Set Httpclient userAgent to the user selected one await RssRequest.SetCustomUserAgentAsync(); foreach (var rss in await RSSDataService.GetFeedsDataAsync(await ApplicationData.Current.LocalSettings.ReadAsync <int>("FeedsLimit"))) { Feeds.Add(rss); } SyndicationFeed feed = null; var sourcesDataList = await SourceDataService.GetSourcesDataAsync(); ProgressMax = sourcesDataList.Count(); int progressCount = 0; foreach (var source in sourcesDataList) { FilterSources.Add(source); if (token.IsCancellationRequested) { IsLoadingData = false; TokenSource = new CancellationTokenSource(); MarkAsReadCommand.OnCanExecuteChanged(); return; } } // if there is no internet just cut our loses and get out of here we already loaded the local data if (!new NetworkInformationHelper().HasInternetAccess) { await new MessageDialog("CheckInternetMessageDialog".GetLocalized()).ShowAsync(); return; } var WaitAfterLastCheckInMinutes = await ApplicationData.Current.LocalSettings.ReadAsync <int>("WaitAfterLastCheck"); foreach (var sourceItem in FilterSources) { bool isFirstItemInFeed = true; if (token.IsCancellationRequested) { IsLoadingData = false; TokenSource = new CancellationTokenSource(); MarkAsReadCommand.OnCanExecuteChanged(); return; } // don't get source feed if x number of minutes haven't passed since the last one - default is 2 hours var checkSourceAfter = sourceItem.LastBuildCheck.AddMinutes(WaitAfterLastCheckInMinutes); if (checkSourceAfter >= DateTimeOffset.Now) { continue; } if (!new NetworkInformationHelper().HasInternetAccess) { continue; } progress.Report(++progressCount); //if getting the feed crushed for (internet - not xml rss - other reasons) //move to the next source on the list to try it instead of stopping every thing try { var feedString = await RssRequest.GetFeedAsStringAsync(sourceItem.RssUrl, token); feed = new SyndicationFeed(); if (string.IsNullOrWhiteSpace(feedString)) { continue; } else { feed.Load(feedString); // Saves rss items count and last check time to source sourceItem.CurrentRssItemsCount = feed.Items.Count; sourceItem.LastBuildCheck = DateTimeOffset.Now; await SourceDataService.UpdateSourceAsync(sourceItem); } } catch (Exception ex) { Debug.WriteLine(ex); continue; } // Iterate through each feed item. foreach (SyndicationItem syndicationItem in feed.Items) { if (token.IsCancellationRequested) { IsLoadingData = false; TokenSource = new CancellationTokenSource(); MarkAsReadCommand.OnCanExecuteChanged(); return; } //handle edge cases like when they don't send that stuff or misplace them like freaking reddit r/worldnews if (syndicationItem.Title == null) { syndicationItem.Title = new SyndicationText("MainViewModelNoTitleFound".GetLocalized()); } if (syndicationItem.Summary == null) { syndicationItem.Summary = new SyndicationText("MainViewModelNoSummaryFound".GetLocalized()); } if (syndicationItem.PublishedDate.Year < 2000) { syndicationItem.PublishedDate = syndicationItem.LastUpdatedTime.Year > 2000 ? syndicationItem.LastUpdatedTime : DateTimeOffset.Now; } Uri itemNewUri = syndicationItem.ItemUri; if (itemNewUri == null) { if (syndicationItem.Links.Count > 0) { itemNewUri = syndicationItem.Links.FirstOrDefault().Uri; } } if (string.IsNullOrWhiteSpace(syndicationItem.Id)) { syndicationItem.Id = itemNewUri.ToString(); } var rss = new RSS { PostTitle = syndicationItem.Title.Text, Description = syndicationItem.Summary.Text, Authors = new List <Author>(), URL = itemNewUri, CreatedAt = syndicationItem.PublishedDate.DateTime, Guid = syndicationItem.Id, PostSource = sourceItem }; foreach (var author in syndicationItem.Authors) { rss.Authors.Add(new Author { Name = author.Name, Email = author.Email, Uri = author.Uri }); } if (!await RSSDataService.FeedExistAsync(rss)) { var newRss = await RSSDataService.AddNewFeedAsync(rss); Feeds.Add(newRss); hasLoadedFeedNewItems = true; // Add first item in each source feed to Windows Live Tiles if (isFirstItemInFeed) { Singleton <LiveTileService> .Instance.SampleUpdate(newRss.PostSource.SiteTitle, ShortenText(newRss.PostTitle, 80), ShortenText(newRss.Description, 95)); } isFirstItemInFeed = false; } } } if (hasLoadedFeedNewItems) { Feeds.Clear(); foreach (var rss in await RSSDataService.GetFeedsDataAsync(await ApplicationData.Current.LocalSettings.ReadAsync <int>("FeedsLimit"))) { Feeds.Add(rss); } } IsLoadingData = false; MarkAsReadCommand.OnCanExecuteChanged(); }
/// <summary> /// RSSフィードを使用して最新の動画を取得する /// </summary> /// <param name="channelID"></param> /// <returns></returns> private FeedItem[] GetItems(String channelID) { var url = $@"https://www.youtube.com/feeds/videos.xml?channel_id={channelID}"; return(RSS.ReadFeedItems(url).ToArray()); }
/// <summary> /// Gets all the feeds from database (with-in limits in settings) /// the try to gets all the new stuff from your sources /// add the new ones to the database if there is any /// then show the latest (with-in limits in settings) /// </summary> /// <param name="progress"></param> /// <param name="ct"></param> /// <returns>Task Type</returns> public async Task LoadDataAsync(IProgress <int> progress, CancellationToken token) { IsLoadingData = true; FilterSources.Clear(); Feeds.Clear(); bool hasLoadedFeedNewItems = false; foreach (var rss in await RSSDataService.GetFeedsDataAsync(await ApplicationData.Current.LocalSettings.ReadAsync <int>("FeedsLimit"))) { Feeds.Add(rss); } SyndicationFeed feed = new SyndicationFeed(); var sourcesDataList = await SourceDataService.GetSourcesDataAsync(); ProgressMax = sourcesDataList.Count(); ProgressCurrent = 0; int progressCount = 0; foreach (var source in sourcesDataList) { FilterSources.Add(source); if (token.IsCancellationRequested) { IsLoadingData = false; return; } } // if there is no internet just cut our loses and get out of here we already loaded the local data if (!new NetworkInformationHelper().HasInternetAccess) { await new MessageDialog("CheckInternetMessageDialog".GetLocalized()).ShowAsync(); return; } foreach (var sourceItem in FilterSources) { if (token.IsCancellationRequested) { IsLoadingData = false; return; } if (!new NetworkInformationHelper().HasInternetAccess) { continue; } progress.Report(++progressCount); //if getting the feed crushed for (internet - not xml rss - other reasons) //move to the next source on the list to try it instead of stoping every thing try { var feedString = await RssRequest.GetFeedAsStringAsync(sourceItem.RssUrl); if (string.IsNullOrWhiteSpace(feedString)) { continue; } else { var xmlFeed = feedString.TrimStart(); feed.Load(xmlFeed); } } catch (Exception ex) { Debug.WriteLine(ex); continue; } // Iterate through each feed item. foreach (SyndicationItem syndicationItem in feed.Items) { if (token.IsCancellationRequested) { IsLoadingData = false; return; } //handle edge cases like when they don't send that stuff or misplace them like freaking reddit r/worldnews if (syndicationItem.Title == null) { syndicationItem.Title = new SyndicationText("MainViewModelNoTitleFound".GetLocalized()); } if (syndicationItem.Summary == null) { syndicationItem.Summary = new SyndicationText("MainViewModelNoSummaryFound".GetLocalized()); } if (syndicationItem.PublishedDate.Year < 2000) { syndicationItem.PublishedDate = syndicationItem.LastUpdatedTime.Year > 2000 ? syndicationItem.LastUpdatedTime : DateTimeOffset.Now; } Uri itemNewUri = syndicationItem.ItemUri; if (itemNewUri == null) { if (syndicationItem.Links.Count > 0) { itemNewUri = syndicationItem.Links.FirstOrDefault().Uri; } } var rss = new RSS { PostTitle = syndicationItem.Title.Text, Description = syndicationItem.Summary.Text, Authors = new List <Author>(), URL = itemNewUri, CreatedAt = syndicationItem.PublishedDate.DateTime, Guid = syndicationItem.Id, PostSource = sourceItem }; foreach (var author in syndicationItem.Authors) { rss.Authors.Add(new Author { Name = author.Name, Email = author.Email, Uri = author.Uri }); } if (!await RSSDataService.FeedExistAsync(rss)) { var newRss = await RSSDataService.AddNewFeedAsync(rss); Feeds.Add(newRss); hasLoadedFeedNewItems = true; } } //shorten the text for windows 10 Live Tile Singleton <LiveTileService> .Instance.SampleUpdate(feed.Title.Text, ShortenText(feed.Items.FirstOrDefault()?.Title.Text, 80), ShortenText(feed.Items.FirstOrDefault()?.Summary.Text, 95)); } if (hasLoadedFeedNewItems) { Feeds.Clear(); foreach (var rss in await RSSDataService.GetFeedsDataAsync(await ApplicationData.Current.LocalSettings.ReadAsync <int>("FeedsLimit"))) { Feeds.Add(rss); } } MarkAsReadCommand.OnCanExecuteChanged(); IsLoadingData = false; }
public void GetShowDataAndPost(object o, DoWorkEventArgs args) { string XML = string.Empty; using (WebClient WebClient = new WebClient()) { WebClient.Headers.Add("Cache-Control", "no-cache"); if (MainLogic.WebProxy != null) { WebClient.Proxy = MainLogic.WebProxy; } try { XML = WebClient.DownloadString(API_URL + "api2/seriesdetail/" + Source); } catch (WebException) { args.Result = "Failed connect for " + Title; return; } // This occurs when retrieving something that is not in your region. if (XML == "FAILED" || string.IsNullOrEmpty(XML)) { return; } } GetDatabaseData(); XElement RSS; try { RSS = XElement.Parse(XML); } catch (System.Xml.XmlException) { args.Result = "Failed XML parse for " + Title; return; } DAISUKISeriesTitle = RSS.Element("abstruct").Element("title").Value; List <XElement> Episodes = new List <XElement>(); // We need to iterate over every movieset because depending on the region you're in // episodes may be in a different movieset. There's also cases where episodes are split across // moviesets. Special episodes always have their own movieset. IEnumerable <XElement> Moviesets = RSS.Elements("movieset"); foreach (XElement Movieset in Moviesets) { IEnumerable <XElement> SubEpisodes = Movieset.Element("items").Elements("item"); foreach (XElement SubEpisode in SubEpisodes) { Episodes.Add(SubEpisode); } } // Sort the episodes. Episodes that can't be parsed to a decimal are put in front of the list decimal dummy; Episodes = Episodes.OrderBy(e => (decimal.TryParse(e.Element("chapter").Value, out dummy) ? decimal.Parse(e.Element("chapter").Value) : 0)).ToList(); foreach (XElement XElement in Episodes) { // Skip this show if the results are not for the current show if (DAISUKISeriesTitle != InternalTitle) { return; } ParseDAISUKIData(XElement); if (DAISUKIEpisodeNumber == null || !ApplyOffsetAndCheckValidity()) { continue; } if (IsNewEpisode()) { // How does posting to reddit work?: // 1 - Insert the episode into the database without PostURL so that other bots won't post in the meantime // 2 - Post to reddit // 3 - If the post failed remove the entry from the database. If it succeeded update PostURL with the URL try { string InsertEpisodeQuery = @" INSERT INTO Episodes VALUES (@Id, @EpisodeNumber, '')"; using (SQLiteCommand InsertEpisodeCommand = new SQLiteCommand(InsertEpisodeQuery, MainLogic.CurrentDB)) { InsertEpisodeCommand.Parameters.AddWithValue("@Id", Id); InsertEpisodeCommand.Parameters.AddWithValue("@EpisodeNumber", DAISUKIEpisodeNumber); InsertEpisodeCommand.ExecuteNonQuery(); } } catch { MainLogic.MainForm.Invoke(new MethodInvoker(delegate() { MainLogic.MainForm.ErrorListBox.Items.Insert(0, (DateTime.Now.ToString("HH:mm:ss: ") + "Failed insert in database for " + Title + " episode " + DAISUKIEpisodeNumber)); })); continue; } if (PostOnReddit()) { try { string UpdateEpisodeQuery = @" UPDATE Episodes SET PostURL = @PostURL WHERE Id = @Id AND EpisodeNumber = @EpisodeNumber"; using (SQLiteCommand UpdateEpisodeCommand = new SQLiteCommand(UpdateEpisodeQuery, MainLogic.CurrentDB)) { UpdateEpisodeCommand.Parameters.AddWithValue("@PostURL", PostURL); UpdateEpisodeCommand.Parameters.AddWithValue("@Id", Id); UpdateEpisodeCommand.Parameters.AddWithValue("@EpisodeNumber", DAISUKIEpisodeNumber); UpdateEpisodeCommand.ExecuteNonQuery(); MainLogic.MainForm.Invoke(new MethodInvoker(delegate() { MainLogic.MainForm.RecentListBox.Items.Insert(0, (DateTime.Now.ToString("HH:mm:ss: ") + "Successful post for " + Title + " episode " + DAISUKIEpisodeNumber + " (" + PostURL + ')')); })); } } catch { MainLogic.MainForm.Invoke(new MethodInvoker(delegate() { MainLogic.MainForm.ErrorListBox.Items.Insert(0, (DateTime.Now.ToString("HH:mm:ss: ") + "!ALERT! Failed update in database for " + Title + " episode " + DAISUKIEpisodeNumber)); })); continue; } } else { MainLogic.MainForm.Invoke(new MethodInvoker(delegate() { MainLogic.MainForm.ErrorListBox.Items.Insert(0, (DateTime.Now.ToString("HH:mm:ss: ") + "Failed reddit post for " + Title + " episode " + DAISUKIEpisodeNumber)); })); try { string DeleteEpisodeQuery = @" DELETE FROM Episodes WHERE Id = @Id AND EpisodeNumber = @EpisodeNumber"; using (SQLiteCommand DeleteEpisodeCommand = new SQLiteCommand(DeleteEpisodeQuery, MainLogic.CurrentDB)) { DeleteEpisodeCommand.Parameters.AddWithValue("@Id", Id); DeleteEpisodeCommand.Parameters.AddWithValue("@EpisodeNumber", DAISUKIEpisodeNumber); DeleteEpisodeCommand.ExecuteNonQuery(); } } catch { MainLogic.MainForm.Invoke(new MethodInvoker(delegate() { MainLogic.MainForm.ErrorListBox.Items.Insert(0, (DateTime.Now.ToString("HH:mm:ss: ") + "!ALERT! Failed delete in database for " + Title + " episode " + DAISUKIEpisodeNumber)); })); continue; } } } else { continue; } } }
public void when_element_is_not_rss() { Assert.Throws <ArgumentException>(() => RSS.Build(XElement.Parse("<i></i>"))); }
public async Task <RSS> GetRSSAsync(string rssUrl) { var rssStream = await _httpClient.GetStreamAsync(rssUrl); return(RSS.Build(XElement.Load(rssStream))); }
private IEnumerator LoadFlickrFeed() { // Flickr's public feed WWW www = new WWW("http://api.flickr.com/services/feeds/photos_public.gne?lang=en-us&format=rss_200"); yield return www; if (string.IsNullOrEmpty(www.error)) { XmlSerializer serializer = new XmlSerializer(typeof(RSS)); rss = serializer.Deserialize(new StringReader(www.text)) as RSS; // Debug.Log(rss.channel.items.Count); foreach (RSS.Item item in rss.channel.items) { www = new WWW(item.thumbnail.url); yield return www; if (string.IsNullOrEmpty(www.error)) { item.small = www.textureNonReadable; } else { Debug.LogError(www.error + " " + www.url); } } } }