private static string MakeRobots(ISiteRobotSettings settings, HttpContext context) { var wtr = new StringBuilder(); if (settings.UserAgents != null && settings.UserAgents.Any()) { foreach (var agent in settings.UserAgents.Where(agent => !string.IsNullOrWhiteSpace(agent))) { wtr.AppendFormat("User-agent: {0}\n", agent); } } else { wtr.AppendLine("User-agent: *"); } if (settings.DisallowPaths != null && settings.DisallowPaths.Any()) { foreach (var path in settings.DisallowPaths.Where(path => !string.IsNullOrWhiteSpace(path))) { wtr.AppendFormat("Disallow: {0}\n", path); } } if (settings.AllowPaths != null && settings.AllowPaths.Any()) { foreach (var path in settings.AllowPaths.Where(path => !string.IsNullOrWhiteSpace(path))) { wtr.AppendFormat("Allow: {0}\n", path); } } if (settings.CrawlDelay.HasValue) { wtr.AppendFormat("Crawl-delay: {0}\n", settings.CrawlDelay); } if (!string.IsNullOrWhiteSpace(settings.SitemapUrl)) { string sitemapUri = null; if (Regex.IsMatch(settings.SitemapUrl, "^http(?:s|)://.*", RegexOptions.Singleline | RegexOptions.IgnoreCase)) { sitemapUri = settings.SitemapUrl; } else { var ub = new UriBuilder(context.Request.Url) { Path = settings.SitemapUrl }; sitemapUri = ub.ToString(); } if (!string.IsNullOrWhiteSpace(sitemapUri)) { wtr.AppendFormat("Sitemap: {0}\n", sitemapUri); } } return(wtr.ToString()); }
private static string GetRobotsContent(ISiteRobotSettings settings, HttpContext context) { if (string.IsNullOrWhiteSpace(_robotsContent)) { _robotsContent = MakeRobots(settings, context); } return(_robotsContent); }
public static void Set(ISiteRobotSettings settings) { _current = settings; }