protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <WebHookNotification> context) { WebHookNotification body = context.QueueEntry.Value; bool shouldLog = body.ProjectId != Settings.Current.InternalProjectId; _logger.Trace().Project(body.ProjectId).Message("Process web hook call: id={0} project={1} url={2}", context.QueueEntry.Id, body.ProjectId, body.Url).WriteIf(shouldLog); var client = new HttpClient(); try { var response = await client.PostAsJsonAsync(body.Url, body.Data.ToJson(Formatting.Indented, _jsonSerializerSettings), context.CancellationToken).AnyContext(); if (response.StatusCode == HttpStatusCode.Gone) { _logger.Warn().Project(body.ProjectId).Message("Deleting web hook: org={0} project={1} url={2}", body.OrganizationId, body.ProjectId, body.Url).Write(); await _webHookRepository.RemoveAsync(body.WebHookId).AnyContext(); } _logger.Info().Project(body.ProjectId).Message("Web hook POST complete: status={0} org={1} project={2} url={3}", response.StatusCode, body.OrganizationId, body.ProjectId, body.Url).WriteIf(shouldLog); } catch (Exception ex) { return(JobResult.FromException(ex)); } return(JobResult.Success); }
protected override async Task <JobResult> RunInternalAsync(JobContext context) { try { var fi = await _storage.GetFileInfoAsync(MaxMindGeoIpService.GEO_IP_DATABASE_PATH).AnyContext(); if (fi != null && fi.Modified.IsAfter(SystemClock.UtcNow.StartOfDay())) { _logger.Info("The GeoIP database is already up-to-date."); return(JobResult.Success); } _logger.Info("Downloading GeoIP database."); var client = new HttpClient(); var file = await client.GetAsync("http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz", context.CancellationToken).AnyContext(); if (!file.IsSuccessStatusCode) { return(JobResult.FailedWithMessage("Unable to download GeoIP database.")); } _logger.Info("Extracting GeoIP database"); using (var decompressionStream = new GZipStream(await file.Content.ReadAsStreamAsync().AnyContext(), CompressionMode.Decompress)) await _storage.SaveFileAsync(MaxMindGeoIpService.GEO_IP_DATABASE_PATH, decompressionStream, context.CancellationToken).AnyContext(); } catch (Exception ex) { _logger.Error(ex, "An error occurred while downloading the GeoIP database."); return(JobResult.FromException(ex)); } _logger.Info("Finished downloading GeoIP database."); return(JobResult.Success); }
protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <WebHookNotification> context) { var body = context.QueueEntry.Value; bool shouldLog = body.ProjectId != Settings.Current.InternalProjectId; _logger.Trace().Project(body.ProjectId).Message("Process web hook call: id={0} project={1} url={2}", context.QueueEntry.Id, body.ProjectId, body.Url).WriteIf(shouldLog); HttpResponseMessage response = null; try { response = await _client.PostAsJsonAsync(body.Url, body.Data.ToJson(Formatting.Indented, _jsonSerializerSettings), context.CancellationToken).AnyContext(); } catch (Exception ex) { _logger.Error().Exception(ex).Project(body.ProjectId).Message("Error calling web hook: status={0} org={1} project={2} url={3}", response?.StatusCode, body.OrganizationId, body.ProjectId, body.Url).WriteIf(shouldLog); return(JobResult.FromException(ex)); } if ((int)response.StatusCode == 429 && response.Headers.RetryAfter.Date.HasValue) { // TODO: Better handle rate limits // throw new RateLimitException { RetryAfter = response.Headers.RetryAfter.Date.Value.UtcDateTime }; _logger.Warn().Project(body.ProjectId).Message("Web hook rate limit reached: status={0} org={1} project={2} url={3}", response.StatusCode, body.OrganizationId, body.ProjectId, body.Url).WriteIf(shouldLog); return(JobResult.FailedWithMessage("Rate limit exceeded")); } if (response.StatusCode == HttpStatusCode.Unauthorized || response.StatusCode == HttpStatusCode.Forbidden || response.StatusCode == HttpStatusCode.NotFound || response.StatusCode == HttpStatusCode.Gone) { _logger.Warn().Project(body.ProjectId).Message("Deleting web hook: status={0} org={1} project={2} url={3}", response.StatusCode, body.OrganizationId, body.ProjectId, body.Url).WriteIf(shouldLog); await RemoveIntegrationAsync(body).AnyContext(); } _logger.Info().Project(body.ProjectId).Message("Web hook POST complete: status={0} org={1} project={2} url={3}", response.StatusCode, body.OrganizationId, body.ProjectId, body.Url).WriteIf(shouldLog); return(JobResult.Success); }
protected override async Task <JobResult> RunInternalAsync(JobRunContext context) { try { if (await _storage.ExistsAsync(MaxMindGeoIpService.GEO_IP_DATABASE_PATH).AnyContext()) { Logger.Info().Message("Deleting existing GeoIP database.").Write(); await _storage.DeleteFileAsync(MaxMindGeoIpService.GEO_IP_DATABASE_PATH, context.CancellationToken).AnyContext(); } Logger.Info().Message("Downloading GeoIP database.").Write(); var client = new HttpClient(); var file = await client.GetAsync("http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz", context.CancellationToken).AnyContext(); if (!file.IsSuccessStatusCode) { return(JobResult.FailedWithMessage("Unable to download GeoIP database.")); } Logger.Info().Message("Extracting GeoIP database").Write(); using (GZipStream decompressionStream = new GZipStream(await file.Content.ReadAsStreamAsync().AnyContext(), CompressionMode.Decompress)) await _storage.SaveFileAsync(MaxMindGeoIpService.GEO_IP_DATABASE_PATH, decompressionStream, context.CancellationToken).AnyContext(); } catch (Exception ex) { Logger.Error().Exception(ex).Message("An error occurred while downloading the GeoIP database.").Write(); return(JobResult.FromException(ex)); } Logger.Info().Message("Finished downloading GeoIP database.").Write(); return(JobResult.Success); }
private Task <JobResult> TryRunAsync(CancellationToken token) { try { return(RunInternalAsync(token)); } catch (Exception ex) { return(Task.FromResult(JobResult.FromException(ex))); } }
protected override async Task <JobResult> ProcessQueueEntryAsync(JobQueueEntryContext <MailMessage> context) { Logger.Trace().Message("Processing message '{0}'.", context.QueueEntry.Id).Write(); try { await _mailSender.SendAsync(context.QueueEntry.Value).AnyContext(); Logger.Info().Message("Sent message: to={0} subject=\"{1}\"", context.QueueEntry.Value.To, context.QueueEntry.Value.Subject).Write(); } catch (Exception ex) { return(JobResult.FromException(ex)); } return(JobResult.Success); }
protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <MailMessage> context) { _logger.LogTrace("Processing message {Id}.", context.QueueEntry.Id); try { await _mailSender.SendAsync(context.QueueEntry.Value).AnyContext(); _logger.LogInformation("Sent message: to={To} subject={Subject}", context.QueueEntry.Value.To, context.QueueEntry.Value.Subject); } catch (Exception ex) { return(JobResult.FromException(ex)); } return(JobResult.Success); }
protected async override Task <JobResult> RunInternalAsync() { Log.Info().Message("Process user description job starting").Write(); int totalUserDescriptionsProcessed = 0; int totalUserDescriptionsToProcess = Context.GetWorkItemLimit(); while (!CancelPending && (totalUserDescriptionsToProcess == -1 || totalUserDescriptionsProcessed < totalUserDescriptionsToProcess)) { QueueEntry <EventUserDescription> queueEntry = null; try { queueEntry = await _queue.DequeueAsync(); } catch (Exception ex) { if (!(ex is TimeoutException)) { Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next EventUserDescription: {0}", ex.Message).Write(); return(JobResult.FromException(ex)); } } if (queueEntry == null) { continue; } _statsClient.Counter(StatNames.EventsUserDescriptionDequeued); Log.Info().Message("Processing EventUserDescription '{0}'.", queueEntry.Id).Write(); try { ProcessUserDescription(queueEntry.Value); totalUserDescriptionsProcessed++; _statsClient.Counter(StatNames.EventsUserDescriptionProcessed); } catch (DocumentNotFoundException ex) { _statsClient.Counter(StatNames.EventsUserDescriptionErrors); queueEntry.AbandonAsync().Wait(); Log.Error().Exception(ex).Message("An event with this reference id \"{0}\" has not been processed yet or was deleted. Queue Id: {1}", ex.Id, queueEntry.Id).Write(); continue; } catch (Exception ex) { _statsClient.Counter(StatNames.EventsUserDescriptionErrors); queueEntry.AbandonAsync().Wait(); // TODO: Add the EventUserDescription to the logged exception. Log.Error().Exception(ex).Message("An error occurred while processing the EventUserDescription '{0}': {1}", queueEntry.Id, ex.Message).Write(); return(JobResult.FromException(ex)); } await queueEntry.CompleteAsync(); } return(JobResult.Success); }
protected async override Task <JobResult> RunInternalAsync(CancellationToken token) { Log.Trace().Message("Web hook job starting").Write(); QueueEntry <WebHookNotification> queueEntry = null; try { queueEntry = _queue.Dequeue(); } catch (Exception ex) { if (!(ex is TimeoutException)) { Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next WebHookNotification: {0}", ex.Message).Write(); return(JobResult.FromException(ex)); } } if (queueEntry == null) { return(JobResult.Success); } Log.Trace().Message("Processing WebHookNotification '{0}'.", queueEntry.Id).Write(); WebHookNotification body = queueEntry.Value; Log.Trace().Project(body.ProjectId).Message("Process web hook call: project={0} url={1}", body.ProjectId, body.Url).Write(); var client = new HttpClient(); try { var result = client.PostAsJson(body.Url, body.Data.ToJson(Formatting.Indented)); if (result.StatusCode == HttpStatusCode.Gone) { _webHookRepository.RemoveByUrl(body.Url); Log.Warn().Project(body.ProjectId).Message("Deleting web hook: org={0} project={1} url={2}", body.OrganizationId, body.ProjectId, body.Url).Write(); } queueEntry.Complete(); Log.Info().Project(body.ProjectId).Message("Web hook POST complete: status={0} org={1} project={2} url={3}", result.StatusCode, body.OrganizationId, body.ProjectId, body.Url).Write(); } catch (Exception ex) { queueEntry.Abandon(); return(JobResult.FromException(ex)); } return(JobResult.Success); }
protected async override Task <JobResult> RunInternalAsync(CancellationToken token) { QueueEntry <EventUserDescription> queueEntry = null; try { queueEntry = _queue.Dequeue(); } catch (Exception ex) { if (!(ex is TimeoutException)) { Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next EventUserDescription: {0}", ex.Message).Write(); return(JobResult.FromException(ex)); } } if (queueEntry == null) { return(JobResult.Success); } await _metricsClient.CounterAsync(MetricNames.EventsUserDescriptionDequeued); Log.Trace().Message("Processing user description: id={0}", queueEntry.Id).Write(); try { ProcessUserDescription(queueEntry.Value); Log.Info().Message("Processed user description: id={0}", queueEntry.Id).Write(); await _metricsClient.CounterAsync(MetricNames.EventsUserDescriptionProcessed); } catch (DocumentNotFoundException ex) { // TODO: Change to async once vnext is released. _metricsClient.Counter(MetricNames.EventsUserDescriptionErrors); queueEntry.Abandon(); Log.Error().Exception(ex).Message("An event with this reference id \"{0}\" has not been processed yet or was deleted. Queue Id: {1}", ex.Id, queueEntry.Id).Write(); return(JobResult.FromException(ex)); } catch (Exception ex) { // TODO: Change to async once vnext is released. _metricsClient.Counter(MetricNames.EventsUserDescriptionErrors); queueEntry.Abandon(); Log.Error().Exception(ex).Message("An error occurred while processing the EventUserDescription '{0}': {1}", queueEntry.Id, ex.Message).Write(); return(JobResult.FromException(ex)); } queueEntry.Complete(); return(JobResult.Success); }
protected override async Task <JobResult> ProcessQueueEntryAsync(JobQueueEntryContext <EventUserDescription> context) { Logger.Trace().Message("Processing user description: id={0}", context.QueueEntry.Id).Write(); try { await ProcessUserDescriptionAsync(context.QueueEntry.Value).AnyContext(); Logger.Info().Message("Processed user description: id={0}", context.QueueEntry.Id).Write(); } catch (DocumentNotFoundException ex) { Logger.Error().Exception(ex).Message("An event with this reference id \"{0}\" has not been processed yet or was deleted. Queue Id: {1}", ex.Id, context.QueueEntry.Id).Write(); return(JobResult.FromException(ex)); } catch (Exception ex) { Logger.Error().Exception(ex).Message("An error occurred while processing the EventUserDescription '{0}': {1}", context.QueueEntry.Id, ex.Message).Write(); return(JobResult.FromException(ex)); } return(JobResult.Success); }
protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <EventUserDescription> context) { _logger.LogTrace("Processing user description: id={0}", context.QueueEntry.Id); try { await ProcessUserDescriptionAsync(context.QueueEntry.Value).AnyContext(); _logger.LogInformation("Processed user description: id={Id}", context.QueueEntry.Id); } catch (DocumentNotFoundException ex) { _logger.LogError(ex, "An event with this reference id {ReferenceId} has not been processed yet or was deleted. Queue Id: {Id}", ex.Id, context.QueueEntry.Id); return(JobResult.FromException(ex)); } catch (Exception ex) { _logger.LogError(ex, "An error occurred while processing the EventUserDescription {Id}: {Message}", context.QueueEntry.Id, ex.Message); return(JobResult.FromException(ex)); } return(JobResult.Success); }
protected async override Task <JobResult> RunInternalAsync() { Log.Info().Message("Process email message job starting").Write(); int totalEmailsProcessed = 0; int totalEmailsToProcess = Context.GetWorkItemLimit(); while (!CancelPending && (totalEmailsToProcess == -1 || totalEmailsProcessed < totalEmailsToProcess)) { QueueEntry <MailMessage> queueEntry = null; try { queueEntry = await _queue.DequeueAsync(); } catch (Exception ex) { if (!(ex is TimeoutException)) { Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next MailMessageNotification: {0}", ex.Message).Write(); return(JobResult.FromException(ex)); } } if (queueEntry == null) { continue; } _statsClient.Counter(StatNames.EmailsDequeued); Log.Info().Message("Processing MailMessageNotification '{0}'.", queueEntry.Id).Write(); try { await _mailSender.SendAsync(queueEntry.Value); totalEmailsProcessed++; _statsClient.Counter(StatNames.EmailsSent); } catch (Exception ex) { _statsClient.Counter(StatNames.EmailsSendErrors); queueEntry.AbandonAsync().Wait(); Log.Error().Exception(ex).Message("Error sending message '{0}': {1}", queueEntry.Id, ex.Message).Write(); } await queueEntry.CompleteAsync(); } return(JobResult.Success); }
protected async override Task <JobResult> RunInternalAsync(CancellationToken token) { Log.Info().Message("Process user description job starting").Write(); QueueEntry <EventUserDescription> queueEntry = null; try { queueEntry = _queue.Dequeue(); } catch (Exception ex) { if (!(ex is TimeoutException)) { Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next EventUserDescription: {0}", ex.Message).Write(); return(JobResult.FromException(ex)); } } if (queueEntry == null) { return(JobResult.Success); } _statsClient.Counter(StatNames.EventsUserDescriptionDequeued); Log.Info().Message("Processing EventUserDescription '{0}'.", queueEntry.Id).Write(); try { ProcessUserDescription(queueEntry.Value); _statsClient.Counter(StatNames.EventsUserDescriptionProcessed); } catch (DocumentNotFoundException ex) { _statsClient.Counter(StatNames.EventsUserDescriptionErrors); queueEntry.Abandon(); Log.Error().Exception(ex).Message("An event with this reference id \"{0}\" has not been processed yet or was deleted. Queue Id: {1}", ex.Id, queueEntry.Id).Write(); return(JobResult.FromException(ex)); } catch (Exception ex) { _statsClient.Counter(StatNames.EventsUserDescriptionErrors); queueEntry.Abandon(); // TODO: Add the EventUserDescription to the logged exception. Log.Error().Exception(ex).Message("An error occurred while processing the EventUserDescription '{0}': {1}", queueEntry.Id, ex.Message).Write(); return(JobResult.FromException(ex)); } queueEntry.Complete(); return(JobResult.Success); }
protected override async Task <JobResult> RunInternalAsync(JobContext context) { _lastRun = SystemClock.UtcNow; string licenseKey = _options.Value.MaxMindGeoIpKey; if (String.IsNullOrEmpty(licenseKey)) { _logger.LogInformation("Configure {SettingKey} to download GeoIP database.", nameof(AppOptions.MaxMindGeoIpKey)); return(JobResult.Success); } try { var fi = await _storage.GetFileInfoAsync(GEO_IP_DATABASE_PATH).AnyContext(); if (fi != null && fi.Modified.IsAfter(SystemClock.UtcNow.StartOfDay())) { _logger.LogInformation("The GeoIP database is already up-to-date."); return(JobResult.Success); } _logger.LogInformation("Downloading GeoIP database."); var client = new HttpClient(); string url = $"https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-City&license_key={licenseKey}&suffix=tar.gz"; var file = await client.GetAsync(url, context.CancellationToken).AnyContext(); if (!file.IsSuccessStatusCode) { return(JobResult.FailedWithMessage("Unable to download GeoIP database.")); } _logger.LogInformation("Extracting GeoIP database"); using (var decompressionStream = new GZipStream(await file.Content.ReadAsStreamAsync().AnyContext(), CompressionMode.Decompress)) await _storage.SaveFileAsync(GEO_IP_DATABASE_PATH, decompressionStream, context.CancellationToken).AnyContext(); } catch (Exception ex) { _logger.LogError(ex, "An error occurred while downloading the GeoIP database."); return(JobResult.FromException(ex)); } _logger.LogInformation("Finished downloading GeoIP database."); return(JobResult.Success); }
protected async override Task <JobResult> RunInternalAsync(CancellationToken token) { QueueEntry <MailMessage> queueEntry = null; try { queueEntry = _queue.Dequeue(); } catch (Exception ex) { if (!(ex is TimeoutException)) { Log.Error().Exception(ex).Message("Error trying to dequeue message: {0}", ex.Message).Write(); return(JobResult.FromException(ex)); } } if (queueEntry == null) { return(JobResult.Success); } await _metricsClient.CounterAsync(MetricNames.EmailsDequeued); Log.Trace().Message("Processing message '{0}'.", queueEntry.Id).Write(); try { await _mailSender.SendAsync(queueEntry.Value); await _metricsClient.CounterAsync(MetricNames.EmailsSent); Log.Info().Message("Sent message: to={0} subject=\"{1}\"", queueEntry.Value.To, queueEntry.Value.Subject).Write(); } catch (Exception ex) { // TODO: Change to async once vnext is released. _metricsClient.Counter(MetricNames.EmailsSendErrors); Log.Error().Exception(ex).Message("Error sending message: id={0} error={1}", queueEntry.Id, ex.Message).Write(); queueEntry.Abandon(); } queueEntry.Complete(); return(JobResult.Success); }
public Task <JobResult> RunAsync(CancellationToken?token = null) { if (!token.HasValue) { token = CancellationToken.None; } if (LockProvider == null) { return(TryRunAsync(token.Value)); } try { using (LockProvider.AcquireLock(GetType().FullName, acquireTimeout: TimeSpan.FromMinutes(1))) return(TryRunAsync(token.Value)); } catch (TimeoutException) { return(Task.FromResult(JobResult.FailedWithMessage("Timeout attempting to acquire lock."))); } catch (Exception ex) { return(Task.FromResult(JobResult.FromException(ex))); } }
protected override async Task <JobResult> RunInternalAsync(CancellationToken token) { var path = PathHelper.ExpandPath(Settings.Current.GeoIPDatabasePath); if (String.IsNullOrEmpty(path)) { Log.Error().Message("No GeoIPDatabasePath was specified.").Write(); return(JobResult.FailedWithMessage("No GeoIPDatabasePath was specified.")); } try { if (File.Exists(path)) { Log.Info().Message("Deleting existing GeoIP database \"{0}\".", path).Write(); File.Delete(path); } Log.Info().Message("Downloading GeoIP database.").Write(); var client = new HttpClient(); var file = await client.GetAsync("http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz", token); if (!file.IsSuccessStatusCode) { return(JobResult.FailedWithMessage("Unable to download GeoIP database.")); } Log.Info().Message("Extracting GeoIP database to \"{0}\".", path).Write(); using (FileStream decompressedFileStream = new FileStream(path, FileMode.CreateNew)) { using (GZipStream decompressionStream = new GZipStream(await file.Content.ReadAsStreamAsync(), CompressionMode.Decompress)) { decompressionStream.CopyTo(decompressedFileStream); } } } catch (Exception ex) { Log.Error().Exception(ex).Message("An error occurred while downloading the GeoIP database \"{0}\".", path).Write(); return(JobResult.FromException(ex)); } Log.Info().Message("Finished downloading GeoIP database.").Write(); return(JobResult.Success); }
protected async override Task <JobResult> RunInternalAsync(CancellationToken token) { Log.Info().Message("Process email message job starting").Write(); QueueEntry <MailMessage> queueEntry = null; try { queueEntry = _queue.Dequeue(); } catch (Exception ex) { if (!(ex is TimeoutException)) { Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next MailMessageNotification: {0}", ex.Message).Write(); return(JobResult.FromException(ex)); } } if (queueEntry == null) { return(JobResult.Success); } _statsClient.Counter(StatNames.EmailsDequeued); Log.Info().Message("Processing MailMessageNotification '{0}'.", queueEntry.Id).Write(); try { await _mailSender.SendAsync(queueEntry.Value); _statsClient.Counter(StatNames.EmailsSent); } catch (Exception ex) { _statsClient.Counter(StatNames.EmailsSendErrors); queueEntry.Abandon(); Log.Error().Exception(ex).Message("Error sending message '{0}': {1}", queueEntry.Id, ex.Message).Write(); } queueEntry.Complete(); return(JobResult.Success); }
protected async override Task <JobResult> RunInternalAsync(CancellationToken token) { QueueEntry <EventPost> queueEntry = null; try { queueEntry = _queue.Dequeue(TimeSpan.FromSeconds(1)); } catch (Exception ex) { if (!(ex is TimeoutException)) { Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next EventPost: {0}", ex.Message).Write(); return(JobResult.FromException(ex)); } } if (queueEntry == null) { return(JobResult.Success); } if (token.IsCancellationRequested) { queueEntry.Abandon(); return(JobResult.Cancelled); } EventPostInfo eventPostInfo = _storage.GetEventPostAndSetActive(queueEntry.Value.FilePath); if (eventPostInfo == null) { queueEntry.Abandon(); _storage.SetNotActive(queueEntry.Value.FilePath); return(JobResult.FailedWithMessage(String.Format("Unable to retrieve post data '{0}'.", queueEntry.Value.FilePath))); } bool isInternalProject = eventPostInfo.ProjectId == Settings.Current.InternalProjectId; _statsClient.Counter(MetricNames.PostsDequeued); Log.Info().Message("Processing post: id={0} path={1} project={2} ip={3} v={4} agent={5}", queueEntry.Id, queueEntry.Value.FilePath, eventPostInfo.ProjectId, eventPostInfo.IpAddress, eventPostInfo.ApiVersion, eventPostInfo.UserAgent).WriteIf(!isInternalProject); List <PersistentEvent> events = null; try { _statsClient.Time(() => { events = ParseEventPost(eventPostInfo); Log.Info().Message("Parsed {0} events for post: id={1}", events.Count, queueEntry.Id).WriteIf(!isInternalProject); }, MetricNames.PostsParsingTime); _statsClient.Counter(MetricNames.PostsParsed); _statsClient.Gauge(MetricNames.PostsEventCount, events.Count); } catch (Exception ex) { _statsClient.Counter(MetricNames.PostsParseErrors); queueEntry.Abandon(); _storage.SetNotActive(queueEntry.Value.FilePath); Log.Error().Exception(ex).Message("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message).Write(); return(JobResult.FromException(ex, String.Format("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message))); } if (token.IsCancellationRequested) { queueEntry.Abandon(); return(JobResult.Cancelled); } if (events == null) { queueEntry.Abandon(); _storage.SetNotActive(queueEntry.Value.FilePath); return(JobResult.Success); } int eventsToProcess = events.Count; bool isSingleEvent = events.Count == 1; if (!isSingleEvent) { var project = _projectRepository.GetById(eventPostInfo.ProjectId, true); // Don't process all the events if it will put the account over its limits. eventsToProcess = _organizationRepository.GetRemainingEventLimit(project.OrganizationId); // Add 1 because we already counted 1 against their limit when we received the event post. if (eventsToProcess < Int32.MaxValue) { eventsToProcess += 1; } // Increment by count - 1 since we already incremented it by 1 in the OverageHandler. _organizationRepository.IncrementUsage(project.OrganizationId, false, events.Count - 1); } if (events == null) { queueEntry.Abandon(); _storage.SetNotActive(queueEntry.Value.FilePath); return(JobResult.Success); } var errorCount = 0; var created = DateTime.UtcNow; try { events.ForEach(e => e.CreatedUtc = created); var results = _eventPipeline.Run(events.Take(eventsToProcess).ToList()); Log.Info().Message("Ran {0} events through the pipeline: id={1} project={2} success={3} error={4}", results.Count, queueEntry.Id, eventPostInfo.ProjectId, results.Count(r => r.IsProcessed), results.Count(r => r.HasError)).WriteIf(!isInternalProject); foreach (var eventContext in results) { if (eventContext.IsCancelled) { continue; } if (!eventContext.HasError) { continue; } Log.Error().Exception(eventContext.Exception).Project(eventPostInfo.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, eventContext.ErrorMessage).Write(); if (eventContext.Exception is ValidationException) { continue; } errorCount++; if (!isSingleEvent) { // Put this single event back into the queue so we can retry it separately. _queue.Enqueue(new EventPostInfo { ApiVersion = eventPostInfo.ApiVersion, CharSet = eventPostInfo.CharSet, Data = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(eventContext.Event)), IpAddress = eventPostInfo.IpAddress, MediaType = eventPostInfo.MediaType, ProjectId = eventPostInfo.ProjectId, UserAgent = eventPostInfo.UserAgent }, _storage, false); } } } catch (ArgumentException ex) { Log.Error().Exception(ex).Project(eventPostInfo.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, ex.Message).Write(); queueEntry.Complete(); } catch (Exception ex) { Log.Error().Exception(ex).Project(eventPostInfo.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, ex.Message).Write(); errorCount++; } if (isSingleEvent && errorCount > 0) { queueEntry.Abandon(); _storage.SetNotActive(queueEntry.Value.FilePath); } else { queueEntry.Complete(); if (queueEntry.Value.ShouldArchive) { _storage.CompleteEventPost(queueEntry.Value.FilePath, eventPostInfo.ProjectId, created, queueEntry.Value.ShouldArchive); } else { _storage.DeleteFile(queueEntry.Value.FilePath); _storage.SetNotActive(queueEntry.Value.FilePath); } } return(JobResult.Success); }
protected async override Task <JobResult> RunInternalAsync(CancellationToken token) { QueueEntry <EventNotification> queueEntry = null; try { queueEntry = _queue.Dequeue(); } catch (Exception ex) { if (!(ex is TimeoutException)) { return(JobResult.FromException(ex, "An error occurred while trying to dequeue the next EventNotification: {0}", ex.Message)); } } if (queueEntry == null) { return(JobResult.Success); } var eventNotification = queueEntry.Value; bool shouldLog = eventNotification.Event.ProjectId != Settings.Current.InternalProjectId; int emailsSent = 0; Log.Trace().Message("Process notification: project={0} event={1} stack={2}", eventNotification.Event.ProjectId, eventNotification.Event.Id, eventNotification.Event.StackId).WriteIf(shouldLog); var project = _projectRepository.GetById(eventNotification.Event.ProjectId, true); if (project == null) { queueEntry.Abandon(); return(JobResult.FailedWithMessage("Could not load project {0}.", eventNotification.Event.ProjectId)); } Log.Trace().Message("Loaded project: name={0}", project.Name).WriteIf(shouldLog); var organization = _organizationRepository.GetById(project.OrganizationId, true); if (organization == null) { queueEntry.Abandon(); return(JobResult.FailedWithMessage("Could not load organization {0}.", project.OrganizationId)); } Log.Trace().Message("Loaded organization: name={0}", organization.Name).WriteIf(shouldLog); var stack = _stackRepository.GetById(eventNotification.Event.StackId); if (stack == null) { queueEntry.Abandon(); return(JobResult.FailedWithMessage("Could not load stack {0}.", eventNotification.Event.StackId)); } if (!organization.HasPremiumFeatures) { queueEntry.Complete(); Log.Info().Message("Skipping \"{0}\" because organization \"{1}\" does not have premium features.", eventNotification.Event.Id, eventNotification.Event.OrganizationId).WriteIf(shouldLog); return(JobResult.Success); } if (stack.DisableNotifications || stack.IsHidden) { queueEntry.Complete(); Log.Info().Message("Skipping \"{0}\" because stack \"{1}\" notifications are disabled or stack is hidden.", eventNotification.Event.Id, eventNotification.Event.StackId).WriteIf(shouldLog); return(JobResult.Success); } if (token.IsCancellationRequested) { queueEntry.Abandon(); return(JobResult.Cancelled); } Log.Trace().Message("Loaded stack: title={0}", stack.Title).WriteIf(shouldLog); int totalOccurrences = stack.TotalOccurrences; // after the first 2 occurrences, don't send a notification for the same stack more then once every 30 minutes var lastTimeSent = _cacheClient.Get <DateTime>(String.Concat("notify:stack-throttle:", eventNotification.Event.StackId)); if (totalOccurrences > 2 && !eventNotification.IsRegression && lastTimeSent != DateTime.MinValue && lastTimeSent > DateTime.Now.AddMinutes(-30)) { queueEntry.Complete(); Log.Info().Message("Skipping message because of stack throttling: last sent={0} occurrences={1}", lastTimeSent, totalOccurrences).WriteIf(shouldLog); return(JobResult.Success); } // don't send more than 10 notifications for a given project every 30 minutes var projectTimeWindow = TimeSpan.FromMinutes(30); string cacheKey = String.Concat("notify:project-throttle:", eventNotification.Event.ProjectId, "-", DateTime.UtcNow.Floor(projectTimeWindow).Ticks); long notificationCount = _cacheClient.Increment(cacheKey, 1, projectTimeWindow); if (notificationCount > 10 && !eventNotification.IsRegression) { queueEntry.Complete(); Log.Info().Project(eventNotification.Event.ProjectId).Message("Skipping message because of project throttling: count={0}", notificationCount).WriteIf(shouldLog); return(JobResult.Success); } if (token.IsCancellationRequested) { queueEntry.Abandon(); return(JobResult.Cancelled); } foreach (var kv in project.NotificationSettings) { var settings = kv.Value; Log.Trace().Message("Processing notification: user={0}", kv.Key).WriteIf(shouldLog); var user = _userRepository.GetById(kv.Key); if (user == null || String.IsNullOrEmpty(user.EmailAddress)) { Log.Error().Message("Could not load user {0} or blank email address {1}.", kv.Key, user != null ? user.EmailAddress : "").Write(); continue; } if (!user.IsEmailAddressVerified) { Log.Info().Message("User {0} with email address {1} has not been verified.", kv.Key, user != null ? user.EmailAddress : "").WriteIf(shouldLog); continue; } if (!user.EmailNotificationsEnabled) { Log.Info().Message("User {0} with email address {1} has email notifications disabled.", kv.Key, user != null ? user.EmailAddress : "").WriteIf(shouldLog); continue; } if (!user.OrganizationIds.Contains(project.OrganizationId)) { Log.Error().Message("Unauthorized user: project={0} user={1} organization={2} event={3}", project.Id, kv.Key, project.OrganizationId, eventNotification.Event.Id).Write(); continue; } Log.Trace().Message("Loaded user: email={0}", user.EmailAddress).WriteIf(shouldLog); bool shouldReportNewError = settings.ReportNewErrors && eventNotification.IsNew && eventNotification.Event.IsError();; bool shouldReportCriticalError = settings.ReportCriticalErrors && eventNotification.IsCritical && eventNotification.Event.IsError();; bool shouldReportRegression = settings.ReportEventRegressions && eventNotification.IsRegression; bool shouldReportNewEvent = settings.ReportNewEvents && eventNotification.IsNew; bool shouldReportCriticalEvent = settings.ReportCriticalEvents && eventNotification.IsCritical; bool shouldReport = shouldReportNewError || shouldReportCriticalError || shouldReportRegression || shouldReportNewEvent || shouldReportCriticalEvent; Log.Trace().Message("Settings: newerror={0} criticalerror={1} regression={2} new={3} critical={4}", settings.ReportNewErrors, settings.ReportCriticalErrors, settings.ReportEventRegressions, settings.ReportNewEvents, settings.ReportCriticalEvents).WriteIf(shouldLog); Log.Trace().Message("Should process: newerror={0} criticalerror={1} regression={2} new={3} critical={4}", shouldReportNewError, shouldReportCriticalError, shouldReportRegression, shouldReportNewEvent, shouldReportCriticalEvent).WriteIf(shouldLog); var requestInfo = eventNotification.Event.GetRequestInfo(); // check for known bots if the user has elected to not report them if (shouldReport && requestInfo != null && !String.IsNullOrEmpty(requestInfo.UserAgent)) { ClientInfo info = null; try { info = Parser.GetDefault().Parse(requestInfo.UserAgent); } catch (Exception ex) { Log.Warn().Project(eventNotification.Event.ProjectId).Message("Unable to parse user agent {0}. Exception: {1}", requestInfo.UserAgent, ex.Message).Write(); } var botPatterns = project.Configuration.Settings.ContainsKey(SettingsDictionary.KnownKeys.UserAgentBotPatterns) ? project.Configuration.Settings.GetStringCollection(SettingsDictionary.KnownKeys.UserAgentBotPatterns).ToList() : new List <string>(); if (info != null && info.Device.IsSpider || requestInfo.UserAgent.AnyWildcardMatches(botPatterns)) { shouldReport = false; Log.Info().Message("Skipping because event is from a bot \"{0}\".", requestInfo.UserAgent).WriteIf(shouldLog); } } if (!shouldReport) { continue; } var model = new EventNotificationModel(eventNotification) { ProjectName = project.Name, TotalOccurrences = totalOccurrences }; // don't send notifications in non-production mode to email addresses that are not on the outbound email list. if (Settings.Current.WebsiteMode != WebsiteMode.Production && !Settings.Current.AllowedOutboundAddresses.Contains(v => user.EmailAddress.ToLowerInvariant().Contains(v))) { Log.Info().Message("Skipping because email is not on the outbound list and not in production mode.").WriteIf(shouldLog); continue; } Log.Trace().Message("Sending email to {0}...", user.EmailAddress).Write(); _mailer.SendNotice(user.EmailAddress, model); emailsSent++; Log.Trace().Message("Done sending email.").WriteIf(shouldLog); } // if we sent any emails, mark the last time a notification for this stack was sent. if (emailsSent > 0) { _cacheClient.Set(String.Concat("notify:stack-throttle:", eventNotification.Event.StackId), DateTime.Now, DateTime.Now.AddMinutes(15)); Log.Info().Message("Notifications sent: event={0} stack={1} count={2}", eventNotification.Event.Id, eventNotification.Event.StackId, emailsSent).WriteIf(shouldLog); } queueEntry.Complete(); return(JobResult.Success); }
protected async override Task <JobResult> RunInternalAsync() { Log.Info().Message("Process events job starting").Write(); int totalEventsProcessed = 0; int totalEventsToProcess = Context.GetWorkItemLimit(); while (!CancelPending && (totalEventsToProcess == -1 || totalEventsProcessed < totalEventsToProcess)) { QueueEntry <EventPost> queueEntry = null; try { queueEntry = await _queue.DequeueAsync(); } catch (Exception ex) { if (!(ex is TimeoutException)) { Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next EventPost: {0}", ex.Message).Write(); return(JobResult.FromException(ex)); } } if (queueEntry == null) { continue; } _statsClient.Counter(StatNames.PostsDequeued); Log.Info().Message("Processing EventPost '{0}'.", queueEntry.Id).Write(); List <PersistentEvent> events = null; try { _statsClient.Time(() => { events = ParseEventPost(queueEntry.Value); }, StatNames.PostsParsingTime); _statsClient.Counter(StatNames.PostsParsed); _statsClient.Gauge(StatNames.PostsBatchSize, events.Count); } catch (Exception ex) { _statsClient.Counter(StatNames.PostsParseErrors); queueEntry.AbandonAsync().Wait(); // TODO: Add the EventPost to the logged exception. Log.Error().Exception(ex).Message("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message).Write(); continue; } if (events == null) { queueEntry.AbandonAsync().Wait(); continue; } int eventsToProcess = events.Count; bool isSingleEvent = events.Count == 1; if (!isSingleEvent) { var project = _projectRepository.GetById(queueEntry.Value.ProjectId, true); // Don't process all the events if it will put the account over its limits. eventsToProcess = _organizationRepository.GetRemainingEventLimit(project.OrganizationId); // Add 1 because we already counted 1 against their limit when we received the event post. if (eventsToProcess < Int32.MaxValue) { eventsToProcess += 1; } // Increment by count - 1 since we already incremented it by 1 in the OverageHandler. _organizationRepository.IncrementUsage(project.OrganizationId, events.Count - 1); } int errorCount = 0; foreach (PersistentEvent ev in events.Take(eventsToProcess)) { try { _eventPipeline.Run(ev); totalEventsProcessed++; if (totalEventsToProcess > 0 && totalEventsProcessed >= totalEventsToProcess) { break; } } catch (ValidationException ex) { Log.Error().Exception(ex).Project(queueEntry.Value.ProjectId).Message("Event validation error occurred: {0}", ex.Message).Write(); } catch (Exception ex) { Log.Error().Exception(ex).Project(queueEntry.Value.ProjectId).Message("Error while processing event: {0}", ex.Message).Write(); if (!isSingleEvent) { // Put this single event back into the queue so we can retry it separately. _queue.EnqueueAsync(new EventPost { Data = Encoding.UTF8.GetBytes(ev.ToJson()).Compress(), ContentEncoding = "gzip", ProjectId = ev.ProjectId, CharSet = "utf-8", MediaType = "application/json", }).Wait(); } errorCount++; } } if (isSingleEvent && errorCount > 0) { queueEntry.AbandonAsync().Wait(); } else { queueEntry.CompleteAsync().Wait(); } } return(JobResult.Success); }
protected async override Task <JobResult> RunInternalAsync(CancellationToken token) { Log.Info().Message("Process events job starting").Write(); QueueEntry <EventPostFileInfo> queueEntry = null; try { queueEntry = _queue.Dequeue(TimeSpan.FromSeconds(1)); } catch (Exception ex) { if (!(ex is TimeoutException)) { Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next EventPost: {0}", ex.Message).Write(); return(JobResult.FromException(ex)); } } if (queueEntry == null) { return(JobResult.Success); } EventPost eventPost = _storage.GetEventPostAndSetActive(queueEntry.Value.FilePath); if (eventPost == null) { queueEntry.Abandon(); _storage.SetNotActive(queueEntry.Value.FilePath); return(JobResult.FailedWithMessage(String.Format("Unable to retrieve post data '{0}'.", queueEntry.Value.FilePath))); } _statsClient.Counter(StatNames.PostsDequeued); Log.Info().Message("Processing EventPost '{0}'.", queueEntry.Id).Write(); List <PersistentEvent> events = null; try { _statsClient.Time(() => { events = ParseEventPost(eventPost); }, StatNames.PostsParsingTime); _statsClient.Counter(StatNames.PostsParsed); _statsClient.Gauge(StatNames.PostsBatchSize, events.Count); } catch (Exception ex) { _statsClient.Counter(StatNames.PostsParseErrors); queueEntry.Abandon(); _storage.SetNotActive(queueEntry.Value.FilePath); // TODO: Add the EventPost to the logged exception. Log.Error().Exception(ex).Message("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message).Write(); return(JobResult.FromException(ex, String.Format("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message))); } if (events == null) { queueEntry.Abandon(); _storage.SetNotActive(queueEntry.Value.FilePath); return(JobResult.Success); } int eventsToProcess = events.Count; bool isSingleEvent = events.Count == 1; if (!isSingleEvent) { var project = _projectRepository.GetById(eventPost.ProjectId, true); // Don't process all the events if it will put the account over its limits. eventsToProcess = _organizationRepository.GetRemainingEventLimit(project.OrganizationId); // Add 1 because we already counted 1 against their limit when we received the event post. if (eventsToProcess < Int32.MaxValue) { eventsToProcess += 1; } // Increment by count - 1 since we already incremented it by 1 in the OverageHandler. _organizationRepository.IncrementUsage(project.OrganizationId, events.Count - 1); } int errorCount = 0; DateTime created = DateTime.UtcNow; foreach (PersistentEvent ev in events.Take(eventsToProcess)) { try { ev.CreatedUtc = created; _eventPipeline.Run(ev); } catch (ValidationException ex) { Log.Error().Exception(ex).Project(eventPost.ProjectId).Message("Event validation error occurred: {0}", ex.Message).Write(); } catch (Exception ex) { Log.Error().Exception(ex).Project(eventPost.ProjectId).Message("Error while processing event: {0}", ex.Message).Write(); if (!isSingleEvent) { // Put this single event back into the queue so we can retry it separately. _queue.Enqueue(new EventPost { ApiVersion = eventPost.ApiVersion, CharSet = eventPost.CharSet, ContentEncoding = "application/json", Data = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(ev)), IpAddress = eventPost.IpAddress, MediaType = eventPost.MediaType, ProjectId = eventPost.ProjectId, UserAgent = eventPost.UserAgent }, _storage, false); } errorCount++; } } if (isSingleEvent && errorCount > 0) { queueEntry.Abandon(); _storage.SetNotActive(queueEntry.Value.FilePath); } else { queueEntry.Complete(); if (queueEntry.Value.ShouldArchive) { _storage.CompleteEventPost(queueEntry.Value.FilePath, eventPost.ProjectId, created, queueEntry.Value.ShouldArchive); } else { _storage.DeleteFile(queueEntry.Value.FilePath); _storage.SetNotActive(queueEntry.Value.FilePath); } } return(JobResult.Success); }
protected override async Task <JobResult> ProcessQueueEntryAsync(JobQueueEntryContext <EventPost> context) { var queueEntry = context.QueueEntry; EventPostInfo eventPostInfo = await _storage.GetEventPostAndSetActiveAsync(queueEntry.Value.FilePath, context.CancellationToken).AnyContext(); if (eventPostInfo == null) { await queueEntry.AbandonAsync().AnyContext(); await _storage.SetNotActiveAsync(queueEntry.Value.FilePath).AnyContext(); return(JobResult.FailedWithMessage($"Unable to retrieve post data '{queueEntry.Value.FilePath}'.")); } bool isInternalProject = eventPostInfo.ProjectId == Settings.Current.InternalProjectId; Logger.Info().Message("Processing post: id={0} path={1} project={2} ip={3} v={4} agent={5}", queueEntry.Id, queueEntry.Value.FilePath, eventPostInfo.ProjectId, eventPostInfo.IpAddress, eventPostInfo.ApiVersion, eventPostInfo.UserAgent).WriteIf(!isInternalProject); List <PersistentEvent> events = null; try { _metricsClient.Time(() => { events = ParseEventPost(eventPostInfo); Logger.Info().Message("Parsed {0} events for post: id={1}", events.Count, queueEntry.Id).WriteIf(!isInternalProject); }, MetricNames.PostsParsingTime); await _metricsClient.CounterAsync(MetricNames.PostsParsed).AnyContext(); await _metricsClient.GaugeAsync(MetricNames.PostsEventCount, events.Count).AnyContext(); } catch (Exception ex) { await queueEntry.AbandonAsync().AnyContext(); await _metricsClient.CounterAsync(MetricNames.PostsParseErrors).AnyContext(); await _storage.SetNotActiveAsync(queueEntry.Value.FilePath).AnyContext(); Logger.Error().Exception(ex).Message("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message).Write(); return(JobResult.FromException(ex, $"An error occurred while processing the EventPost '{queueEntry.Id}': {ex.Message}")); } if (!events.Any() || context.CancellationToken.IsCancellationRequested) { await queueEntry.AbandonAsync().AnyContext(); await _storage.SetNotActiveAsync(queueEntry.Value.FilePath).AnyContext(); return(!events.Any() ? JobResult.Success : JobResult.Cancelled); } int eventsToProcess = events.Count; bool isSingleEvent = events.Count == 1; if (!isSingleEvent) { var project = await _projectRepository.GetByIdAsync(eventPostInfo.ProjectId, true).AnyContext(); if (project == null) { // NOTE: This could archive the data for a project that no longer exists. Logger.Error().Project(eventPostInfo.ProjectId).Message($"Unable to process EventPost \"{queueEntry.Value.FilePath}\": Unable to load project: {eventPostInfo.ProjectId}").Write(); await CompleteEntryAsync(queueEntry, eventPostInfo, DateTime.UtcNow).AnyContext(); return(JobResult.Success); } // Don't process all the events if it will put the account over its limits. eventsToProcess = await _organizationRepository.GetRemainingEventLimitAsync(project.OrganizationId).AnyContext(); // Add 1 because we already counted 1 against their limit when we received the event post. if (eventsToProcess < Int32.MaxValue) { eventsToProcess += 1; } // Increment by count - 1 since we already incremented it by 1 in the OverageHandler. await _organizationRepository.IncrementUsageAsync(project.OrganizationId, false, events.Count - 1).AnyContext(); } var errorCount = 0; var created = DateTime.UtcNow; try { events.ForEach(e => e.CreatedUtc = created); var results = await _eventPipeline.RunAsync(events.Take(eventsToProcess).ToList()).AnyContext(); Logger.Info().Message("Ran {0} events through the pipeline: id={1} project={2} success={3} error={4}", results.Count, queueEntry.Id, eventPostInfo.ProjectId, results.Count(r => r.IsProcessed), results.Count(r => r.HasError)).WriteIf(!isInternalProject); foreach (var eventContext in results) { if (eventContext.IsCancelled) { continue; } if (!eventContext.HasError) { continue; } Logger.Error().Exception(eventContext.Exception).Project(eventPostInfo.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, eventContext.ErrorMessage).Write(); if (eventContext.Exception is ValidationException) { continue; } errorCount++; if (!isSingleEvent) { // Put this single event back into the queue so we can retry it separately. await _queue.EnqueueAsync(new EventPostInfo { ApiVersion = eventPostInfo.ApiVersion, Data = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(eventContext.Event)), IpAddress = eventPostInfo.IpAddress, MediaType = eventPostInfo.MediaType, CharSet = eventPostInfo.CharSet, ProjectId = eventPostInfo.ProjectId, UserAgent = eventPostInfo.UserAgent }, _storage, false, context.CancellationToken).AnyContext(); } } } catch (Exception ex) { Logger.Error().Exception(ex).Project(eventPostInfo.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, ex.Message).Write(); if (ex is ArgumentException || ex is DocumentNotFoundException) { await queueEntry.CompleteAsync().AnyContext(); } else { errorCount++; } } if (isSingleEvent && errorCount > 0) { await queueEntry.AbandonAsync().AnyContext(); await _storage.SetNotActiveAsync(queueEntry.Value.FilePath).AnyContext(); } else { await CompleteEntryAsync(queueEntry, eventPostInfo, created).AnyContext(); } return(JobResult.Success); }
protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <WebHookNotification> context) { var body = context.QueueEntry.Value; bool shouldLog = body.ProjectId != _appOptions.Value.InternalProjectId; using (_logger.BeginScope(new ExceptionlessState().Organization(body.OrganizationId).Project(body.ProjectId))) { if (shouldLog) { _logger.LogTrace("Process web hook call: id={Id} project={1} url={Url}", context.QueueEntry.Id, body.ProjectId, body.Url); } if (!await IsEnabledAsync(body).AnyContext()) { _logger.LogInformation("Web hook cancelled: Web hook is disabled"); return(JobResult.Cancelled); } var cache = new ScopedCacheClient(_cacheClient, GetCacheKeyScope(body)); long consecutiveErrors = await cache.GetAsync <long>(ConsecutiveErrorsCacheKey, 0).AnyContext(); if (consecutiveErrors > 10) { var lastAttempt = await cache.GetAsync(LastAttemptCacheKey, SystemClock.UtcNow).AnyContext(); var nextAttemptAllowedAt = lastAttempt.AddMinutes(15); if (nextAttemptAllowedAt >= SystemClock.UtcNow) { _logger.LogInformation("Web hook cancelled due to {FailureCount} consecutive failed attempts. Will be allowed to try again at {NextAttempt}.", consecutiveErrors, nextAttemptAllowedAt); return(JobResult.Cancelled); } } bool successful = true; HttpResponseMessage response = null; try { using (var timeoutCancellationTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(5))) { using (var postCancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(context.CancellationToken, timeoutCancellationTokenSource.Token)) { response = await _client.PostAsJsonAsync(body.Url, body.Data.ToJson(Formatting.Indented, _jsonSerializerSettings), postCancellationTokenSource.Token).AnyContext(); if (!response.IsSuccessStatusCode) { successful = false; } else if (consecutiveErrors > 0) { await cache.RemoveAllAsync(_cacheKeys).AnyContext(); } } } } catch (OperationCanceledException ex) { successful = false; if (shouldLog) { _logger.LogError(ex, "Timeout calling web hook: status={Status} org={organization} project={project} url={Url}", response?.StatusCode, body.OrganizationId, body.ProjectId, body.Url); } return(JobResult.Cancelled); } catch (Exception ex) { successful = false; if (shouldLog) { _logger.LogError(ex, "Error calling web hook: status={Status} org={organization} project={project} url={Url}", response?.StatusCode, body.OrganizationId, body.ProjectId, body.Url); } return(JobResult.FromException(ex)); } finally { if (successful) { _logger.LogInformation("Web hook POST complete: status={Status} org={organization} project={project} url={Url}", response?.StatusCode, body.OrganizationId, body.ProjectId, body.Url); } else if (response != null && (response.StatusCode == HttpStatusCode.Unauthorized || response.StatusCode == HttpStatusCode.Forbidden || response.StatusCode == HttpStatusCode.Gone)) { _logger.LogWarning("Disabling Web hook instance {WebHookId} due to status code: status={Status} org={organization} project={project} url={Url}", body.Type == WebHookType.Slack ? "Slack" : body.WebHookId, response.StatusCode, body.OrganizationId, body.ProjectId, body.Url); await DisableIntegrationAsync(body).AnyContext(); await cache.RemoveAllAsync(_cacheKeys).AnyContext(); } else { var now = SystemClock.UtcNow; await cache.SetAsync(LastAttemptCacheKey, now, TimeSpan.FromDays(3)).AnyContext(); consecutiveErrors = await cache.IncrementAsync(ConsecutiveErrorsCacheKey, TimeSpan.FromDays(3)).AnyContext(); DateTime firstAttempt; if (consecutiveErrors == 1) { await cache.SetAsync(FirstAttemptCacheKey, now, TimeSpan.FromDays(3)).AnyContext(); firstAttempt = now; } else { firstAttempt = await cache.GetAsync(FirstAttemptCacheKey, now).AnyContext(); } if (consecutiveErrors >= 10) { // don't retry any more context.QueueEntry.MarkCompleted(); // disable if more than 10 consecutive errors over the course of multiple days if (firstAttempt.IsBefore(now.SubtractDays(2))) { _logger.LogWarning("Disabling Web hook instance {WebHookId} due to too many consecutive failures.", body.Type == WebHookType.Slack ? "Slack" : body.WebHookId); await DisableIntegrationAsync(body).AnyContext(); await cache.RemoveAllAsync(_cacheKeys).AnyContext(); } } } } } return(JobResult.Success); }