public Pulse.Base.PictureList GetPictures(Pulse.Base.PictureSearch ps) { WebClient wc = new WebClient(); //download archive webpage var pg = wc.DownloadString(_url); //regex out the links to the individual pages Regex reg = new Regex("<a href=\"(?<picPage>ap.*\\.html)\">"); Regex regPic = new Regex("<IMG SRC=\"(?<picURL>image.*)\""); var matches = reg.Matches(pg); var pl = new Pulse.Base.PictureList() { FetchDate = DateTime.Now }; //if max picture count is 0, then no maximum, else specified max var maxPictureCount = ps.MaxPictureCount > 0 ? (ps.MaxPictureCount + ps.BannedURLs.Where(u => u.StartsWith("http://apod.nasa.gov/apod/")).Count()) : int.MaxValue; maxPictureCount = Math.Min(matches.Count, maxPictureCount); //counts might be a bit off in the event of bannings, but hopefully it won't be too far off. var matchesToGet = (from Match c in matches select c) .OrderBy(x => Guid.NewGuid()) .Take(maxPictureCount); //build url's, skip banned items, randomly sort the items and only bring back the desired number // all in one go pl.Pictures.AddRange((from Match c in matchesToGet let photoPage = new WebClient().DownloadString("http://apod.nasa.gov/apod/" + c.Groups["picPage"].Value) let photoURL = "http://apod.nasa.gov/apod/" + regPic.Match(photoPage).Groups["picURL"].Value where !ps.BannedURLs.Contains(photoURL) select new Picture() {Url = photoURL, Id=System.IO.Path.GetFileNameWithoutExtension(photoURL)})); return pl; }
public PictureList GetPictureList(PictureSearch ps) { PictureList Pictures = null; if (ps == null || ps.SearchProvider == null || ps.SearchProvider.Instance == null) { return(Pictures); } //load any in memory cached results Pictures = ps.SearchProvider.SearchResults; var loadedFromFile = false; var fPath = Path.Combine(ps.SaveFolder, "CACHE_" + ps.GetSearchHash().ToString() + "_" + ps.SearchProvider.Instance.GetType().ToString() + ".xml"); if (Pictures == null) { //if nothing in memory then try to load from disc Pictures = LoadCachedSearch(ps, fPath); loadedFromFile = Pictures != null; } else { loadedFromFile = false; } //if we have no pictures to work with, or our cached data has expired, try and get them if (Pictures == null || Pictures.Pictures.Count == 0 || Pictures.ExpirationDate < DateTime.Now) { Pictures = ((IInputProvider)ps.SearchProvider.Instance).GetPictures(ps); Pictures.SearchSettingsHash = ps.GetSearchHash(); loadedFromFile = false; } //cache the picture list to file if (!loadedFromFile) { //make sure the API GuID has been injected into all pictures Pictures.Pictures.ForEach(x => x.ProviderInstance = ps.SearchProvider.ProviderInstanceID); //save it Pictures.Save(fPath); } //return whatever list of pictures was found return(Pictures); }
public PictureList LoadCachedSearch(PictureSearch ps, string cachePath) { PictureList result = null; //check if we should load from file if (File.Exists(cachePath)) { try { result = PictureList.LoadFromFile(cachePath); } catch (Exception ex) { Log.Logger.Write(string.Format("Error loading picture cache from file, cache will not be used. Exception details: {0}", ex.ToString()), Log.LoggerLevels.Errors); } } return(result); }
protected void DownloadNextPicture() { if (CurrentInputProviders.Count == 0) { return; } //create the new picture batch PictureBatch pb = new PictureBatch() { PreviousBatch = CurrentBatch }; //create another view of the input providers, otherwise if the list changes // because user changes options then it breaks :) foreach (KeyValuePair <Guid, ActiveProviderInfo> kvpGAPI in CurrentInputProviders.ToArray()) { ActiveProviderInfo api = kvpGAPI.Value; var ps = new PictureSearch() { SaveFolder = Settings.CurrentSettings.CachePath, MaxPictureCount = Settings.CurrentSettings.MaxPictureDownloadCount, SearchProvider = api, BannedURLs = Settings.CurrentSettings.BannedImages }; //get new pictures PictureList pl = PictureManager.GetPictureList(ps); //save to picturebatch pb.AllPictures.Add(pl); } //process downloaded picture list ProcessDownloadedPicture(pb); //if prefetch is enabled, validate that all pictures have been downloaded if (Settings.CurrentSettings.PreFetch) { DownloadManager.PreFetchFiles(pb); } }
public Pulse.Base.PictureList GetPictures(Pulse.Base.PictureSearch ps) { WebClient wc = new WebClient(); //download archive webpage var pg = wc.DownloadString(_url); //regex out the links to the individual pages Regex reg = new Regex("<a href=\"(?<picPage>ap.*\\.html)\">"); Regex regPic = new Regex("<IMG SRC=\"(?<picURL>image.*)\""); var matches = reg.Matches(pg); var pl = new Pulse.Base.PictureList() { FetchDate = DateTime.Now }; //if max picture count is 0, then no maximum, else specified max var maxPictureCount = ps.MaxPictureCount > 0 ? (ps.MaxPictureCount + ps.BannedURLs.Where(u => u.StartsWith("http://apod.nasa.gov/apod/")).Count()) : int.MaxValue; maxPictureCount = Math.Min(matches.Count, maxPictureCount); //counts might be a bit off in the event of bannings, but hopefully it won't be too far off. var matchesToGet = (from Match c in matches select c) .OrderBy(x => Guid.NewGuid()) .Take(maxPictureCount); //build url's, skip banned items, randomly sort the items and only bring back the desired number // all in one go pl.Pictures.AddRange((from Match c in matchesToGet let photoPage = new WebClient().DownloadString("http://apod.nasa.gov/apod/" + c.Groups["picPage"].Value) let photoURL = "http://apod.nasa.gov/apod/" + regPic.Match(photoPage).Groups["picURL"].Value where !ps.BannedURLs.Contains(photoURL) select new Picture() { Url = photoURL, Id = System.IO.Path.GetFileNameWithoutExtension(photoURL) })); return(pl); }
/// <summary> /// Retrieves a random picture from the picture list /// </summary> /// <param name="pl">Picture list from which to retrieve pictures</param> /// <param name="saveFolder">Location where to save the picture</param> /// <param name="currentPicture">(optional) the current picture, to avoid repeates. Pass null if not needed or this is the first picture.</param> public PictureDownload GetPicture(PictureList pl, Picture currentPicture, bool queueForDownload) { Picture pic = null; if (pl == null || pl.Pictures.Count == 0) { return(null); } //pick the next picture at random // only "non-random" bit is that we make sure that the next random picture isn't the same as our current one var index = 0; do { index = rnd.Next(pl.Pictures.Count); } while (currentPicture != null && currentPicture.Url == pl.Pictures[index].Url); pic = pl.Pictures[index]; //download current picture first PictureDownload pd = GetPicture(pic, queueForDownload); return(pd); }