Beispiel #1
0
 /// <summary>
 /// Whether the user agent is "allowed" to crawl the root url
 /// NOTE: "userAgentString" NOT EQUAL with User Agent from Crawl Config
 /// </summary>
 /// <param name="userAgentString"></param>
 /// <returns>True if find such user agent in content</returns>
 public bool IsUserAgentAllowed(string userAgentString)
 {
     return(!string.IsNullOrEmpty(userAgentString) &&
            Robots.Allowed(RootUri.AbsoluteUri, userAgentString));
 }
Beispiel #2
0
 /// <summary>
 /// Whether the spider is "allowed" to crawl the param link
 /// </summary>
 public bool IsUrlAllowed(string url, string userAgentString)
 {
     return(!RootUri.IsBaseOf(new Uri(url)) ||
            Robots.Allowed(url, userAgentString));
 }