private void UpdateStats() { if (_stats == null || String.IsNullOrEmpty(QueueSizeStatName)) { return; } long count = GetQueueCount(); _stats.Gauge(QueueSizeStatName, count); }
protected override Task <HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) { if (!IsEventPost(request)) { return(base.SendAsync(request, cancellationToken)); } if (_cacheClient.TryGet("ApiDisabled", false)) { return(CreateResponse(request, HttpStatusCode.ServiceUnavailable, "Service Unavailable")); } var project = request.GetDefaultProject(); if (project == null) { return(CreateResponse(request, HttpStatusCode.Unauthorized, "Unauthorized")); } bool tooBig = false; if (request.Content != null && request.Content.Headers != null) { long size = request.Content.Headers.ContentLength.GetValueOrDefault(); _statsClient.Gauge(StatNames.PostsSize, size); if (size > Settings.Current.MaximumEventPostSize) { Log.Warn().Message("Event submission discarded for being too large: {0}", size).Project(project.Id).Write(); _statsClient.Counter(StatNames.PostsDiscarded); tooBig = true; } } bool overLimit = _organizationRepository.IncrementUsage(project.OrganizationId, tooBig); // block large submissions, but return success status code so the client doesn't keep sending them if (tooBig) { return(CreateResponse(request, HttpStatusCode.Accepted, "Event submission discarded for being too large.")); } if (overLimit) { _statsClient.Counter(StatNames.PostsBlocked); return(CreateResponse(request, HttpStatusCode.PaymentRequired, "Event limit exceeded.")); } return(base.SendAsync(request, cancellationToken)); }
protected async override Task <JobResult> RunInternalAsync(CancellationToken token) { Log.Info().Message("Process events job starting").Write(); QueueEntry <EventPostFileInfo> queueEntry = null; try { queueEntry = _queue.Dequeue(TimeSpan.FromSeconds(1)); } catch (Exception ex) { if (!(ex is TimeoutException)) { Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next EventPost: {0}", ex.Message).Write(); return(JobResult.FromException(ex)); } } if (queueEntry == null) { return(JobResult.Success); } EventPost eventPost = _storage.GetEventPostAndSetActive(queueEntry.Value.FilePath); if (eventPost == null) { queueEntry.Abandon(); _storage.SetNotActive(queueEntry.Value.FilePath); return(JobResult.FailedWithMessage(String.Format("Unable to retrieve post data '{0}'.", queueEntry.Value.FilePath))); } _statsClient.Counter(StatNames.PostsDequeued); Log.Info().Message("Processing EventPost '{0}'.", queueEntry.Id).Write(); List <PersistentEvent> events = null; try { _statsClient.Time(() => { events = ParseEventPost(eventPost); }, StatNames.PostsParsingTime); _statsClient.Counter(StatNames.PostsParsed); _statsClient.Gauge(StatNames.PostsBatchSize, events.Count); } catch (Exception ex) { _statsClient.Counter(StatNames.PostsParseErrors); queueEntry.Abandon(); _storage.SetNotActive(queueEntry.Value.FilePath); // TODO: Add the EventPost to the logged exception. Log.Error().Exception(ex).Message("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message).Write(); return(JobResult.FromException(ex, String.Format("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message))); } if (events == null) { queueEntry.Abandon(); _storage.SetNotActive(queueEntry.Value.FilePath); return(JobResult.Success); } int eventsToProcess = events.Count; bool isSingleEvent = events.Count == 1; if (!isSingleEvent) { var project = _projectRepository.GetById(eventPost.ProjectId, true); // Don't process all the events if it will put the account over its limits. eventsToProcess = _organizationRepository.GetRemainingEventLimit(project.OrganizationId); // Add 1 because we already counted 1 against their limit when we received the event post. if (eventsToProcess < Int32.MaxValue) { eventsToProcess += 1; } // Increment by count - 1 since we already incremented it by 1 in the OverageHandler. _organizationRepository.IncrementUsage(project.OrganizationId, events.Count - 1); } int errorCount = 0; DateTime created = DateTime.UtcNow; foreach (PersistentEvent ev in events.Take(eventsToProcess)) { try { ev.CreatedUtc = created; _eventPipeline.Run(ev); } catch (ValidationException ex) { Log.Error().Exception(ex).Project(eventPost.ProjectId).Message("Event validation error occurred: {0}", ex.Message).Write(); } catch (Exception ex) { Log.Error().Exception(ex).Project(eventPost.ProjectId).Message("Error while processing event: {0}", ex.Message).Write(); if (!isSingleEvent) { // Put this single event back into the queue so we can retry it separately. _queue.Enqueue(new EventPost { ApiVersion = eventPost.ApiVersion, CharSet = eventPost.CharSet, ContentEncoding = "application/json", Data = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(ev)), IpAddress = eventPost.IpAddress, MediaType = eventPost.MediaType, ProjectId = eventPost.ProjectId, UserAgent = eventPost.UserAgent }, _storage, false); } errorCount++; } } if (isSingleEvent && errorCount > 0) { queueEntry.Abandon(); _storage.SetNotActive(queueEntry.Value.FilePath); } else { queueEntry.Complete(); if (queueEntry.Value.ShouldArchive) { _storage.CompleteEventPost(queueEntry.Value.FilePath, eventPost.ProjectId, created, queueEntry.Value.ShouldArchive); } else { _storage.DeleteFile(queueEntry.Value.FilePath); _storage.SetNotActive(queueEntry.Value.FilePath); } } return(JobResult.Success); }
protected async override Task <JobResult> RunInternalAsync() { Log.Info().Message("Process events job starting").Write(); int totalEventsProcessed = 0; int totalEventsToProcess = Context.GetWorkItemLimit(); while (!CancelPending && (totalEventsToProcess == -1 || totalEventsProcessed < totalEventsToProcess)) { QueueEntry <EventPost> queueEntry = null; try { queueEntry = await _queue.DequeueAsync(); } catch (Exception ex) { if (!(ex is TimeoutException)) { Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next EventPost: {0}", ex.Message).Write(); return(JobResult.FromException(ex)); } } if (queueEntry == null) { continue; } _statsClient.Counter(StatNames.PostsDequeued); Log.Info().Message("Processing EventPost '{0}'.", queueEntry.Id).Write(); List <PersistentEvent> events = null; try { _statsClient.Time(() => { events = ParseEventPost(queueEntry.Value); }, StatNames.PostsParsingTime); _statsClient.Counter(StatNames.PostsParsed); _statsClient.Gauge(StatNames.PostsBatchSize, events.Count); } catch (Exception ex) { _statsClient.Counter(StatNames.PostsParseErrors); queueEntry.AbandonAsync().Wait(); // TODO: Add the EventPost to the logged exception. Log.Error().Exception(ex).Message("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message).Write(); continue; } if (events == null) { queueEntry.AbandonAsync().Wait(); continue; } int eventsToProcess = events.Count; bool isSingleEvent = events.Count == 1; if (!isSingleEvent) { var project = _projectRepository.GetById(queueEntry.Value.ProjectId, true); // Don't process all the events if it will put the account over its limits. eventsToProcess = _organizationRepository.GetRemainingEventLimit(project.OrganizationId); // Add 1 because we already counted 1 against their limit when we received the event post. if (eventsToProcess < Int32.MaxValue) { eventsToProcess += 1; } // Increment by count - 1 since we already incremented it by 1 in the OverageHandler. _organizationRepository.IncrementUsage(project.OrganizationId, events.Count - 1); } int errorCount = 0; foreach (PersistentEvent ev in events.Take(eventsToProcess)) { try { _eventPipeline.Run(ev); totalEventsProcessed++; if (totalEventsToProcess > 0 && totalEventsProcessed >= totalEventsToProcess) { break; } } catch (ValidationException ex) { Log.Error().Exception(ex).Project(queueEntry.Value.ProjectId).Message("Event validation error occurred: {0}", ex.Message).Write(); } catch (Exception ex) { Log.Error().Exception(ex).Project(queueEntry.Value.ProjectId).Message("Error while processing event: {0}", ex.Message).Write(); if (!isSingleEvent) { // Put this single event back into the queue so we can retry it separately. _queue.EnqueueAsync(new EventPost { Data = Encoding.UTF8.GetBytes(ev.ToJson()).Compress(), ContentEncoding = "gzip", ProjectId = ev.ProjectId, CharSet = "utf-8", MediaType = "application/json", }).Wait(); } errorCount++; } } if (isSingleEvent && errorCount > 0) { queueEntry.AbandonAsync().Wait(); } else { queueEntry.CompleteAsync().Wait(); } } return(JobResult.Success); }
protected async override Task <JobResult> RunInternalAsync(CancellationToken token) { QueueEntry <EventPost> queueEntry = null; try { queueEntry = _queue.Dequeue(TimeSpan.FromSeconds(1)); } catch (Exception ex) { if (!(ex is TimeoutException)) { Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next EventPost: {0}", ex.Message).Write(); return(JobResult.FromException(ex)); } } if (queueEntry == null) { return(JobResult.Success); } if (token.IsCancellationRequested) { queueEntry.Abandon(); return(JobResult.Cancelled); } EventPostInfo eventPostInfo = _storage.GetEventPostAndSetActive(queueEntry.Value.FilePath); if (eventPostInfo == null) { queueEntry.Abandon(); _storage.SetNotActive(queueEntry.Value.FilePath); return(JobResult.FailedWithMessage(String.Format("Unable to retrieve post data '{0}'.", queueEntry.Value.FilePath))); } bool isInternalProject = eventPostInfo.ProjectId == Settings.Current.InternalProjectId; _statsClient.Counter(StatNames.PostsDequeued); Log.Info().Message("Processing post: id={0} path={1} project={2} ip={3} v={4} agent={5}", queueEntry.Id, queueEntry.Value.FilePath, eventPostInfo.ProjectId, eventPostInfo.IpAddress, eventPostInfo.ApiVersion, eventPostInfo.UserAgent).WriteIf(!isInternalProject); List <PersistentEvent> events = null; try { _statsClient.Time(() => { events = ParseEventPost(eventPostInfo); Log.Info().Message("Parsed {0} events for post: id={1}", events.Count, queueEntry.Id).WriteIf(!isInternalProject); }, StatNames.PostsParsingTime); _statsClient.Counter(StatNames.PostsParsed); _statsClient.Gauge(StatNames.PostsEventCount, events.Count); } catch (Exception ex) { _statsClient.Counter(StatNames.PostsParseErrors); queueEntry.Abandon(); _storage.SetNotActive(queueEntry.Value.FilePath); Log.Error().Exception(ex).Message("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message).Write(); return(JobResult.FromException(ex, String.Format("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message))); } if (token.IsCancellationRequested) { queueEntry.Abandon(); return(JobResult.Cancelled); } if (events == null) { queueEntry.Abandon(); _storage.SetNotActive(queueEntry.Value.FilePath); return(JobResult.Success); } int eventsToProcess = events.Count; bool isSingleEvent = events.Count == 1; if (!isSingleEvent) { var project = _projectRepository.GetById(eventPostInfo.ProjectId, true); // Don't process all the events if it will put the account over its limits. eventsToProcess = _organizationRepository.GetRemainingEventLimit(project.OrganizationId); // Add 1 because we already counted 1 against their limit when we received the event post. if (eventsToProcess < Int32.MaxValue) { eventsToProcess += 1; } // Increment by count - 1 since we already incremented it by 1 in the OverageHandler. _organizationRepository.IncrementUsage(project.OrganizationId, false, events.Count - 1); } if (events == null) { queueEntry.Abandon(); _storage.SetNotActive(queueEntry.Value.FilePath); return(JobResult.Success); } var errorCount = 0; var created = DateTime.UtcNow; try { events.ForEach(e => e.CreatedUtc = created); var results = _eventPipeline.Run(events.Take(eventsToProcess).ToList()); Log.Info().Message("Ran {0} events through the pipeline: id={1} project={2} success={3} error={4}", results.Count, queueEntry.Id, eventPostInfo.ProjectId, results.Count(r => r.IsProcessed), results.Count(r => r.HasError)).WriteIf(!isInternalProject); foreach (var eventContext in results) { if (eventContext.IsCancelled) { continue; } if (!eventContext.HasError) { continue; } Log.Error().Exception(eventContext.Exception).Project(eventPostInfo.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, eventContext.ErrorMessage).Write(); if (eventContext.Exception is ValidationException) { continue; } errorCount++; if (!isSingleEvent) { // Put this single event back into the queue so we can retry it separately. _queue.Enqueue(new EventPostInfo { ApiVersion = eventPostInfo.ApiVersion, CharSet = eventPostInfo.CharSet, ContentEncoding = "application/json", Data = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(eventContext.Event)), IpAddress = eventPostInfo.IpAddress, MediaType = eventPostInfo.MediaType, ProjectId = eventPostInfo.ProjectId, UserAgent = eventPostInfo.UserAgent }, _storage, false); } } } catch (ArgumentException ex) { Log.Error().Exception(ex).Project(eventPostInfo.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, ex.Message).Write(); queueEntry.Complete(); } catch (Exception ex) { Log.Error().Exception(ex).Project(eventPostInfo.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, ex.Message).Write(); errorCount++; } if (isSingleEvent && errorCount > 0) { queueEntry.Abandon(); _storage.SetNotActive(queueEntry.Value.FilePath); } else { queueEntry.Complete(); if (queueEntry.Value.ShouldArchive) { _storage.CompleteEventPost(queueEntry.Value.FilePath, eventPostInfo.ProjectId, created, queueEntry.Value.ShouldArchive); } else { _storage.DeleteFile(queueEntry.Value.FilePath); _storage.SetNotActive(queueEntry.Value.FilePath); } } return(JobResult.Success); }