protected override bool OnBeforeBrowse(IWebBrowser webBrowser, IBrowser browser, IFrame frame, IRequest request, bool userGesture, bool isRedirect) { if (IsQuitUrl(request)) { QuitUrlVisited?.Invoke(request.Url); return(true); } if (Block(request)) { if (request.ResourceType == ResourceType.MainFrame) { RequestBlocked?.Invoke(request.Url); } return(true); } if (IsConfigurationFile(request, out var downloadUrl)) { browser.GetHost().StartDownload(downloadUrl); return(true); } return(base.OnBeforeBrowse(webBrowser, browser, frame, request, userGesture, isRedirect)); }
protected override bool OnBeforeBrowse(IWebBrowser chromiumWebBrowser, IBrowser browser, IFrame frame, IRequest request, bool userGesture, bool isRedirect) { if (Block(request)) { RequestBlocked?.Invoke(request.Url); return(true); } return(base.OnBeforeBrowse(chromiumWebBrowser, browser, frame, request, userGesture, isRedirect)); }
public int OnBlacklist(Session args, string url, int[] categories) { try { RequestBlocked?.Invoke((short)categories[0], BlockType.None, new Uri(url), "NOT AVAILABLE"); sendBlockResponse(args, url, categories); } catch (Exception ex) { m_logger.Error("Exception occurred while processing blacklist notification."); LoggerUtil.RecursivelyLogException(m_logger, ex); } return(0); }
protected override bool OnBeforeBrowse(IWebBrowser webBrowser, IBrowser browser, IFrame frame, IRequest request, bool userGesture, bool isRedirect) { if (IsQuitUrl(request)) { QuitUrlVisited?.Invoke(request.Url); return(true); } if (Block(request)) { if (request.ResourceType == ResourceType.MainFrame) { RequestBlocked?.Invoke(request.Url); } return(true); } return(base.OnBeforeBrowse(webBrowser, browser, frame, request, userGesture, isRedirect)); }
/// <summary> /// /// </summary> /// <param name="e"></param> /// <param name="clientId">客户端ID,如有值,覆盖其他数据</param> /// <param name="clientIp">客户端IP,如有值,覆盖其他数据</param> /// <returns></returns> public async Task Invoke(EventRequestingArgs e, string clientId = null, string clientIp = null) { HttpRequest httpRequest = e.Request; // compute identity from request var identity = ResolveIdentity(httpRequest); if (!String.IsNullOrEmpty(clientId)) { identity.ClientId = clientId; } if (!String.IsNullOrEmpty(clientIp)) { identity.ClientIp = clientIp; } // check white list if (_processor.IsWhitelisted(identity)) { return; } var rules = await _processor.GetMatchingRulesAsync(identity); var rulesDict = new Dictionary <RateLimitRule, RateLimitCounter>(); foreach (var rule in rules) { // increment counter var rateLimitCounter = await _processor.ProcessRequestAsync(identity, rule); if (rule.Limit > 0) { // check if key expired if (rateLimitCounter.Timestamp + rule.PeriodTimespan.Value < DateTime.UtcNow) { continue; } // check if limit is reached if (rateLimitCounter.Count > rule.Limit) { //compute retry after value var retryAfter = rateLimitCounter.Timestamp.RetryAfterFrom(rule); //// log blocked request //LogBlockedRequest(context, identity, rateLimitCounter, rule); //if (_options.RequestBlockedBehavior != null) //{ // await _options.RequestBlockedBehavior(context, identity, rateLimitCounter, rule); //} // RequestBlocked?.Invoke(identity, rateLimitCounter, rule); if (RequestBlocked != null) { RequestBlocked.Invoke(this, new EventRequestBlockedArgs() { identity = identity, rateLimitCounter = rateLimitCounter, rateLimitRule = rule }); } //// break execution ReturnQuotaExceededResponse(e, rule, retryAfter).Wait(); return; } } // if limit is zero or less, block the request. else { //// log blocked request //LogBlockedRequest(context, identity, rateLimitCounter, rule); //if (_options.RequestBlockedBehavior != null) //{ // await _options.RequestBlockedBehavior(context, identity, rateLimitCounter, rule); //} if (RequestBlocked != null) { RequestBlocked.Invoke(this, new EventRequestBlockedArgs() { identity = identity, rateLimitCounter = rateLimitCounter, rateLimitRule = rule }); } //// break execution (Int32 max used to represent infinity) await ReturnQuotaExceededResponse(e, rule, int.MaxValue.ToString(System.Globalization.CultureInfo.InvariantCulture)); return; } rulesDict.Add(rule, rateLimitCounter); } // set X-Rate-Limit headers for the longest period if (rulesDict.Any() && !_options.DisableRateLimitHeaders) { var rule = rulesDict.OrderByDescending(x => x.Key.PeriodTimespan).FirstOrDefault(); var headers = _processor.GetRateLimitHeaders(rule.Value, rule.Key); // headers.Context = context; // context.Response.OnStarting(SetRateLimitHeaders, state: headers); await SetRateLimitHeaders(e, headers); } if (rulesDict.Any()) { //Requested?.Invoke(identity, rulesDict); if (Requested != null) { Requested.Invoke(this, new EventRequestedArgs() { identity = identity, rules = rulesDict }); } } await Task.CompletedTask; }
internal void OnBeforeResponse(GoproxyWrapper.Session args) { ProxyNextAction nextAction = ProxyNextAction.AllowButRequestContentInspection; bool shouldBlock = false; string customBlockResponseContentType = null; byte[] customBlockResponse = null; // Don't allow filtering if our user has been denied access and they // have not logged back in. if (m_ipcServer != null && m_ipcServer.WaitingForAuth) { return; } // The only thing we can really do in this callback, and the only thing we care to do, is // try to classify the content of the response payload, if there is any. try { Uri uri = new Uri(args.Request.Url); Uri serviceProviderPath = new Uri(CompileSecrets.ServiceProviderApiPath); if (uri.Host == serviceProviderPath.Host) { return; } // Check our certificate exemptions to see if we should allow this site through or not. if (args.Response.CertificateCount > 0 && !args.Response.IsCertificateVerified && !m_certificateExemptions.IsExempted(uri.Host, args.Response.Certificates[0])) { customBlockResponseContentType = "text/html"; customBlockResponse = m_templates.ResolveBadSslTemplate(new Uri(args.Request.Url), args.Response.Certificates[0].Thumbprint); nextAction = ProxyNextAction.DropConnection; return; } string contentType = null; if (args.Response.Headers.HeaderExists("Content-Type")) { contentType = args.Response.Headers.GetFirstHeader("Content-Type").Value; bool isHtml = contentType.IndexOf("html") != -1; bool isJson = contentType.IndexOf("json") != -1; bool isTextPlain = contentType.IndexOf("text/plain") != -1; // Is the response content type text/html or application/json? Inspect it, otherwise return before we do content classification. // Why enforce content classification on only these two? There are only a few MIME types which have a high risk of "carrying" explicit content. // Those are: // text/plain // text/html // application/json if (!(isHtml || isJson || isTextPlain)) { return; } } if (contentType != null && args.Response.HasBody) { contentType = contentType.ToLower(); BlockType blockType; string textTrigger; string textCategory; byte[] responseBody = args.Response.Body; var contentClassResult = OnClassifyContent(responseBody, contentType, out blockType, out textTrigger, out textCategory); if (contentClassResult > 0) { shouldBlock = true; List <MappedFilterListCategoryModel> categories = new List <MappedFilterListCategoryModel>(); nextAction = ProxyNextAction.DropConnection; if (contentType.IndexOf("html") != -1) { customBlockResponseContentType = "text/html"; customBlockResponse = m_templates.ResolveBlockedSiteTemplate(new Uri(args.Request.Url), contentClassResult, categories, blockType, textCategory); } else if (contentType.IndexOf("application/json", StringComparison.InvariantCultureIgnoreCase) != -1) { customBlockResponseContentType = "application/json"; customBlockResponse = new byte[0]; } RequestBlocked?.Invoke(contentClassResult, blockType, new Uri(args.Request.Url), ""); m_logger.Info("Response blocked by content classification."); } } } catch (Exception e) { LoggerUtil.RecursivelyLogException(m_logger, e); } finally { if (nextAction == ProxyNextAction.DropConnection) { // TODO: Do we really need this as Info? m_logger.Info("Response blocked: {0}", args.Request.Url); if (customBlockResponse != null) { args.SendCustomResponse((int)HttpStatusCode.OK, customBlockResponseContentType, customBlockResponse); } } } }