public static dynamic MapPageScrape(PageScrapeHistory scrape) { dynamic expanded = new ExpandoObject(); // TODO IDictionary <string, object> expandedDictionary = (IDictionary <string, object>)expanded; expanded.Date = scrape.ImportStart; foreach (PageMetadata page in scrape.Pages) { expandedDictionary.Add(page.Name, page.FanCount); } return(expanded); }
public PageScrapeHistory ScrapePages([FromBody] IEnumerable <string> request) { // If no pages were specified, scrape them all. PageMetadata[] pagesToScrape; if (request == null) { pagesToScrape = PageMetadataRepository.All().Data.ToArray(); } else { pagesToScrape = request.Select(id => PageMetadataRepository.Get(id)).ToArray(); } DateTime scrapeStart = DateTime.Now; ScrapedPage[] pages = PageScraper.Scrape(pagesToScrape, scrapeStart).ToArray(); // Now update the per-page list of all scraped pages. foreach (PageMetadata pageMetadata in pagesToScrape) { ScrapedPage scrapedPage = pages.First(p => p.FacebookId == pageMetadata.FacebookId); pageMetadata.FanCountHistory.Insert(0, new DatedFanCount { Date = scrapedPage.Date, FanCount = scrapedPage.FanCount, }); pageMetadata.LatestScrape = scrapeStart; PageMetadataRepository.Save(pageMetadata, Refresh.False); // Only save the fan count on this date. pageMetadata.FanCountHistory = pageMetadata.FanCountHistory.Take(1).ToList(); } // Now update the total-page list of the scrape. var pageScrapeHistory = new PageScrapeHistory { Id = Guid.NewGuid().ToString(), ImportStart = scrapeStart, ImportEnd = DateTime.Now, Pages = pagesToScrape }; return(PageScrapeHistoryRepository.Save(pageScrapeHistory)); }