public HashSet <(string Name, Uri uri)> Search(string query) { if (Lookups.Any() && !string.IsNullOrWhiteSpace(query)) { var nameQueries = query.Split(null); IEnumerable <KeyValuePair <string, Uri> > matches = null; // Exact match: matches = Lookups.Where(kv => nameQueries.Any(q => kv.Key.Equals(q))); // Case sensitive: if (!matches.Any()) { matches = Lookups.Where(kv => nameQueries.Any(q => kv.Key.Contains(q))); } // Case insensitive: if (!matches.Any()) { matches = Lookups.Where(kv => nameQueries.Any(q => kv.Key.ToLowerInvariant().Contains(q.ToLowerInvariant()))); } return(matches.Select(kv => (kv.Key, kv.Value)).ToHashSet()); } else { return(new HashSet <(string Name, Uri uri)>()); } }
public HashSet <SDLWikiApiItem> SearchForWikiItems(string query) { if (Lookups.Any()) { var matches = Search(query); var results = matches .Select(match => { try { return(GetWikiItem(match.Name, out _)); } catch { return(null); } }) .Where(item => item != null); return(results.ToHashSet()); } else { return(new HashSet <SDLWikiApiItem>()); } }
private void UpdateCache() { if (DateTime.Now - Cache.LastUpdate >= SDLWikiApiCache.Expiration) { _logger.Info($"Updating the Wiki API Cache since it has expired (age: {(DateTime.Now - Cache.LastUpdate).TotalDays} days)"); } else { return; } if (HostUri == null) { _logger.Error($"Aborting Wiki API Cache Update, the Host URI for the {GetType().Name} is invalid or not set"); return; } if (Lookups.Any()) { int updatedNodeCount = 0; DateTime lastFileUpdateTime = DateTime.Now; // Start with now so that the first save isn't immediate foreach (var lookupItem in Lookups) { var name = lookupItem.Key; var uri = lookupItem.Value; // Save the cache every minute whle it's updating: if (DateTime.Now - lastFileUpdateTime >= TimeSpan.FromMinutes(1)) { if (!string.IsNullOrWhiteSpace(CacheFile)) { Export(CacheFile); } } if (!Cache.TryGetItem(name, out SDLWikiApiItem apiItem)) { apiItem = new SDLWikiApiItem() { Name = name, Uri = uri }; Cache.AddOrUpdate(apiItem); } if (DateTime.Now - apiItem.LastUpdate >= SDLWikiApiItem.Expiration) { apiItem.Update(_webClient); Cache.Update(); updatedNodeCount++; } else { _logger.Info($"Skipping category named '{name}' because it has not expired (age: {(DateTime.Now - apiItem.LastUpdate).TotalDays} days)"); continue; } } } else { _logger.Warn("The lookups are empty so the cache cannot be automatically upadted"); } if (!string.IsNullOrWhiteSpace(CacheFile)) { Export(CacheFile); } }
public SDLWikiApiItem GetWikiItem(string name, out string errorMessage) { if (!Cache.TryGetItem(name, out SDLWikiApiItem item) || DateTime.Now - item.LastUpdate >= SDLWikiApiItem.Expiration) { // Maybe the casing is wrong, or maybe there's a partial match: if (Lookups.Any()) { var match = Search(name).FirstOrDefault(); if (match != default && match.Name != name) { return(GetWikiItem(match.Name, out errorMessage)); } } // Don't hammer the website: SurgeProtection.CheckBeforeRequest(); // Download the document: string documentRawText; Uri documentUri = new Uri(HostUri, $"{name.Replace("sdl_", "SDL_")}?action=raw"); try { documentRawText = _webClient.DownloadString(documentUri); } catch (WebException ex) { errorMessage = ex.Message; return(null); } // The site responds with content indicating the Wiki document doesn't exist: string notFoundText = $"Page {name} not found"; if (string.Concat(documentRawText.Take(notFoundText.Length + 1)).ToLowerInvariant().Contains(notFoundText)) { errorMessage = documentRawText; return(null); } // The page should not be valid HTML since ?action=raw should return MoinMoin markup: var htmlDoc = new HtmlDocument(); htmlDoc.LoadHtml(documentRawText); if (!htmlDoc.ParseErrors.Any()) { errorMessage = $"Couldn't receive the correct document because the content was in HTML rather than Markup"; return(null); } item = new SDLWikiApiItem(name, new Uri(HostUri, name)); item.Update(documentRawText); item.Live = true; Cache.AddOrUpdate(item); Export(CacheFile); } else { // Pulled directly from cache: if (item != null) { item.Live = false; } } errorMessage = string.Empty; return(item); }