public PictureList GetPictureList(PictureSearch ps) { PictureList Pictures = null; if (ps == null || ps.SearchProvider == null || ps.SearchProvider.Instance == null) { return(Pictures); } //load any in memory cached results Pictures = ps.SearchProvider.SearchResults; var loadedFromFile = false; var fPath = Path.Combine(ps.SaveFolder, "CACHE_" + ps.GetSearchHash().ToString() + "_" + ps.SearchProvider.Instance.GetType().ToString() + ".xml"); if (Pictures == null) { //if nothing in memory then try to load from disc Pictures = LoadCachedSearch(ps, fPath); loadedFromFile = Pictures != null; } else { loadedFromFile = false; } //if we have no pictures to work with, or our cached data has expired, try and get them if (Pictures == null || Pictures.Pictures.Count == 0 || Pictures.ExpirationDate < DateTime.Now) { Pictures = ((IInputProvider)ps.SearchProvider.Instance).GetPictures(ps); Pictures.SearchSettingsHash = ps.GetSearchHash(); loadedFromFile = false; } //cache the picture list to file if (!loadedFromFile) { //make sure the API GuID has been injected into all pictures Pictures.Pictures.ForEach(x => x.ProviderInstance = ps.SearchProvider.ProviderInstanceID); //save it Pictures.Save(fPath); } //return whatever list of pictures was found return(Pictures); }
public PictureList LoadCachedSearch(PictureSearch ps, string cachePath) { PictureList result = null; //check if we should load from file if (File.Exists(cachePath)) { try { result = PictureList.LoadFromFile(cachePath); } catch (Exception ex) { Log.Logger.Write(string.Format("Error loading picture cache from file, cache will not be used. Exception details: {0}", ex.ToString()), Log.LoggerLevels.Errors); } } return(result); }
protected void DownloadNextPicture() { if (CurrentInputProviders.Count == 0) { return; } //create the new picture batch PictureBatch pb = new PictureBatch() { PreviousBatch = CurrentBatch }; //create another view of the input providers, otherwise if the list changes // because user changes options then it breaks :) foreach (KeyValuePair <Guid, ActiveProviderInfo> kvpGAPI in CurrentInputProviders.ToArray()) { ActiveProviderInfo api = kvpGAPI.Value; var ps = new PictureSearch() { SaveFolder = Settings.CurrentSettings.CachePath, MaxPictureCount = Settings.CurrentSettings.MaxPictureDownloadCount, SearchProvider = api, BannedURLs = Settings.CurrentSettings.BannedImages }; //get new pictures PictureList pl = PictureManager.GetPictureList(ps); //save to picturebatch pb.AllPictures.Add(pl); } //process downloaded picture list ProcessDownloadedPicture(pb); //if prefetch is enabled, validate that all pictures have been downloaded if (Settings.CurrentSettings.PreFetch) { DownloadManager.PreFetchFiles(pb); } }
public Pulse.Base.PictureList GetPictures(Pulse.Base.PictureSearch ps) { WebClient wc = new WebClient(); //download archive webpage var pg = wc.DownloadString(_url); //regex out the links to the individual pages Regex reg = new Regex("<a href=\"(?<picPage>ap.*\\.html)\">"); Regex regPic = new Regex("<IMG SRC=\"(?<picURL>image.*)\""); var matches = reg.Matches(pg); var pl = new Pulse.Base.PictureList() { FetchDate = DateTime.Now }; //if max picture count is 0, then no maximum, else specified max var maxPictureCount = ps.MaxPictureCount > 0 ? (ps.MaxPictureCount + ps.BannedURLs.Where(u => u.StartsWith("http://apod.nasa.gov/apod/")).Count()) : int.MaxValue; maxPictureCount = Math.Min(matches.Count, maxPictureCount); //counts might be a bit off in the event of bannings, but hopefully it won't be too far off. var matchesToGet = (from Match c in matches select c) .OrderBy(x => Guid.NewGuid()) .Take(maxPictureCount); //build url's, skip banned items, randomly sort the items and only bring back the desired number // all in one go pl.Pictures.AddRange((from Match c in matchesToGet let photoPage = new WebClient().DownloadString("http://apod.nasa.gov/apod/" + c.Groups["picPage"].Value) let photoURL = "http://apod.nasa.gov/apod/" + regPic.Match(photoPage).Groups["picURL"].Value where !ps.BannedURLs.Contains(photoURL) select new Picture() { Url = photoURL, Id = System.IO.Path.GetFileNameWithoutExtension(photoURL) })); return(pl); }
public Pulse.Base.PictureList GetPictures(Pulse.Base.PictureSearch ps) { PictureList pl = new PictureList() { FetchDate = DateTime.Now }; //general purpose downloader WebClient wc = new WebClient(); //download pictures page var content = wc.DownloadString(_baseURL + "/wallpaper/download"); //get paths to the xml files var xmlPaths = ParseXMLPaths(content); //download and parse each xml file foreach (string xmlFile in xmlPaths) { try { var pics = ParsePictures(xmlFile); //clear out banned images pics = (from c in pics where !ps.BannedURLs.Contains(c.Url) select c).ToList(); pl.Pictures.AddRange(pics); } catch (Exception ex) { Log.Logger.Write(string.Format("Error loading/parsing National Geographic pictures from XML. XML file URL: '{0}'. Exception details: {1}", _baseURL + xmlFile, ex.ToString()), Log.LoggerLevels.Errors); } if (pl.Pictures.Count >= (ps.MaxPictureCount > 0 ? ps.MaxPictureCount : int.MaxValue)) { break; } } return(pl); }