Exemple #1
0
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <AddressTransactedWorkItem> context)
        {
            string address = context.QueueEntry.Value.Address;

            if (string.IsNullOrEmpty(address))
            {
                return(JobResult.SuccessWithMessage("Address was empty (maybe a contract deployment?), skipping"));
            }

            if (await _addressCache.ExistsAsync(address).AnyContext())
            {
                return(JobResult.SuccessWithMessage($"Address {address} exists in cache, skipping"));
            }

            try
            {
                if (!await _addressRepository.AddressExists(address).AnyContext())
                {
                    await _addressRepository.UpsertAddress(new CinderAddress { Hash = address, ForceRefresh = true }).AnyContext();
                }

                await _addressCache.AddAsync(address, string.Empty).AnyContext();
            }
            catch (Exception e)
            {
                return(JobResult.FromException(e));
            }

            return(JobResult.Success);
        }
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <WebHookNotification> context)
        {
            WebHookNotification body = context.QueueEntry.Value;
            bool shouldLog           = body.ProjectId != Settings.Current.InternalProjectId;

            _logger.Trace().Project(body.ProjectId).Message("Process web hook call: id={0} project={1} url={2}", context.QueueEntry.Id, body.ProjectId, body.Url).WriteIf(shouldLog);

            var client = new HttpClient();

            try {
                var response = await client.PostAsJsonAsync(body.Url, body.Data.ToJson(Formatting.Indented, _jsonSerializerSettings), context.CancellationToken).AnyContext();

                if (response.StatusCode == HttpStatusCode.Gone)
                {
                    _logger.Warn().Project(body.ProjectId).Message("Deleting web hook: org={0} project={1} url={2}", body.OrganizationId, body.ProjectId, body.Url).Write();
                    await _webHookRepository.RemoveAsync(body.WebHookId).AnyContext();
                }

                _logger.Info().Project(body.ProjectId).Message("Web hook POST complete: status={0} org={1} project={2} url={3}", response.StatusCode, body.OrganizationId, body.ProjectId, body.Url).WriteIf(shouldLog);
            } catch (Exception ex) {
                return(JobResult.FromException(ex));
            }

            return(JobResult.Success);
        }
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <WebHookNotification> context)
        {
            var  body      = context.QueueEntry.Value;
            bool shouldLog = body.ProjectId != Settings.Current.InternalProjectId;

            _logger.Trace().Project(body.ProjectId).Message("Process web hook call: id={0} project={1} url={2}", context.QueueEntry.Id, body.ProjectId, body.Url).WriteIf(shouldLog);

            HttpResponseMessage response = null;

            try {
                response = await _client.PostAsJsonAsync(body.Url, body.Data.ToJson(Formatting.Indented, _jsonSerializerSettings), context.CancellationToken).AnyContext();
            } catch (Exception ex) {
                _logger.Error().Exception(ex).Project(body.ProjectId).Message("Error calling web hook: status={0} org={1} project={2} url={3}", response?.StatusCode, body.OrganizationId, body.ProjectId, body.Url).WriteIf(shouldLog);
                return(JobResult.FromException(ex));
            }

            if ((int)response.StatusCode == 429 && response.Headers.RetryAfter.Date.HasValue)
            {
                // TODO: Better handle rate limits
                // throw new RateLimitException { RetryAfter = response.Headers.RetryAfter.Date.Value.UtcDateTime };

                _logger.Warn().Project(body.ProjectId).Message("Web hook rate limit reached: status={0} org={1} project={2} url={3}", response.StatusCode, body.OrganizationId, body.ProjectId, body.Url).WriteIf(shouldLog);
                return(JobResult.FailedWithMessage("Rate limit exceeded"));
            }

            if (response.StatusCode == HttpStatusCode.Unauthorized || response.StatusCode == HttpStatusCode.Forbidden || response.StatusCode == HttpStatusCode.NotFound || response.StatusCode == HttpStatusCode.Gone)
            {
                _logger.Warn().Project(body.ProjectId).Message("Deleting web hook: status={0} org={1} project={2} url={3}", response.StatusCode, body.OrganizationId, body.ProjectId, body.Url).WriteIf(shouldLog);
                await RemoveIntegrationAsync(body).AnyContext();
            }

            _logger.Info().Project(body.ProjectId).Message("Web hook POST complete: status={0} org={1} project={2} url={3}", response.StatusCode, body.OrganizationId, body.ProjectId, body.Url).WriteIf(shouldLog);
            return(JobResult.Success);
        }
Exemple #4
0
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <NetInfoWorkItem> context)
        {
            try
            {
                NetInfoWorkItem block = context.QueueEntry.Value;
                _logger.LogDebug("NetStatsJob fired, Block: {@Block}", block);

                NetInfo netInfo = await _statsCache.GetAsync(NetInfo.DefaultCacheKey, new NetInfo()).AnyContext();

                netInfo.BestBlock          = block.BlockNumber;
                netInfo.BestBlockTimestamp = block.Timestamp;
                netInfo.Difficulty         = await _netInfoService.GetDifficulty(block.Difficulty).AnyContext();

                netInfo.AverageBlockTime = await _netInfoService.GetAverageBlockTime(block.Timestamp).AnyContext();

                netInfo.AverageNetworkHashRate = await _netInfoService.GetAverageNetworkHashRate(block.Difficulty).AnyContext();

                netInfo.ConnectedPeerCount = await _netInfoService.GetConnectedPeerCount().AnyContext();

                await _statsCache.SetAsync(NetInfo.DefaultCacheKey, netInfo).AnyContext();
            }
            catch (Exception e)
            {
                return(JobResult.FromException(e));
            }

            return(JobResult.Success);
        }
Exemple #5
0
        protected override Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <Message> context)
        {
            var message = context.QueueEntry.Value;

            _broadcastMessage.BroadcastMessage(message.Body, message.Username);

            return(Task.FromResult(JobResult.Success));
        }
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <MailMessage> context)
        {
            _logger.LogTrace("Processing message {Id}.", context.QueueEntry.Id);

            try {
                await _mailSender.SendAsync(context.QueueEntry.Value).AnyContext();

                _logger.LogInformation("Sent message: to={To} subject={Subject}", context.QueueEntry.Value.To, context.QueueEntry.Value.Subject);
            } catch (Exception ex) {
                return(JobResult.FromException(ex));
            }

            return(JobResult.Success);
        }
Exemple #7
0
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <PingRequest> context)
        {
            Interlocked.Increment(ref _runCount);

            _logger.Info(() => $"Got {RunCount.ToOrdinal()} ping. Sending pong!");
            await SystemClock.SleepAsync(TimeSpan.FromMilliseconds(1)).AnyContext();

            if (RandomData.GetBool(context.QueueEntry.Value.PercentChanceOfException))
            {
                throw new ApplicationException("Boom!");
            }

            return(JobResult.Success);
        }
Exemple #8
0
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <MailMessage> context)
        {
            _logger.Trace().Message("Processing message '{0}'.", context.QueueEntry.Id).Write();

            try {
                await _mailSender.SendAsync(context.QueueEntry.Value).AnyContext();

                _logger.Info().Message("Sent message: to={0} subject=\"{1}\"", context.QueueEntry.Value.To, context.QueueEntry.Value.Subject).Write();
            } catch (Exception ex) {
                return(JobResult.FromException(ex));
            }

            return(JobResult.Success);
        }
Exemple #9
0
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <MailMessage> context)
        {
            _logger.Trace("Processing message '{0}'.", context.QueueEntry.Id);

            try {
                await _mailSender.SendAsync(context.QueueEntry.Value).ConfigureAwait(false);

                _logger.Info()
                .Message(() => $"Sent message: to={context.QueueEntry.Value.To.ToDelimitedString()} subject=\"{context.QueueEntry.Value.Subject}\"")
                .Write();
            } catch (Exception ex) {
                await context.QueueEntry.AbandonAsync().AnyContext();

                return(JobResult.FromException(ex));
            }

            return(JobResult.Success);
        }
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <EventUserDescription> context)
        {
            _logger.LogTrace("Processing user description: id={0}", context.QueueEntry.Id);

            try {
                await ProcessUserDescriptionAsync(context.QueueEntry.Value).AnyContext();

                _logger.LogInformation("Processed user description: id={Id}", context.QueueEntry.Id);
            } catch (DocumentNotFoundException ex) {
                _logger.LogError(ex, "An event with this reference id {ReferenceId} has not been processed yet or was deleted. Queue Id: {Id}", ex.Id, context.QueueEntry.Id);
                return(JobResult.FromException(ex));
            } catch (Exception ex) {
                _logger.LogError(ex, "An error occurred while processing the EventUserDescription {Id}: {Message}", context.QueueEntry.Id, ex.Message);
                return(JobResult.FromException(ex));
            }

            return(JobResult.Success);
        }
        protected override Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <SampleQueueWorkItem> context)
        {
            _metrics.Counter("dequeued");

            if (RandomData.GetBool(10))
            {
                _metrics.Counter("errors");
                throw new Exception("Boom!");
            }

            if (RandomData.GetBool(10))
            {
                _metrics.Counter("abandoned");
                return(Task.FromResult(JobResult.FailedWithMessage("Abandoned")));
            }

            _metrics.Counter("completed");
            return(Task.FromResult(JobResult.Success));
        }
Exemple #12
0
        private async Task RetryEvents(QueueEntryContext <EventPost> context, List <PersistentEvent> eventsToRetry, EventPostInfo ep, IQueueEntry <EventPost> queueEntry)
        {
            await _metricsClient.GaugeAsync(MetricNames.EventsRetryCount, eventsToRetry.Count).AnyContext();

            foreach (var ev in eventsToRetry)
            {
                try {
                    string contentEncoding = null;
                    byte[] data            = ev.GetBytes(_jsonSerializerSettings);
                    if (data.Length > 1000)
                    {
                        data = await data.CompressAsync().AnyContext();

                        contentEncoding = "gzip";
                    }

                    // Put this single event back into the queue so we can retry it separately.
                    await _queue.Value.EnqueueAsync(new EventPostInfo {
                        ApiVersion      = ep.ApiVersion,
                        CharSet         = ep.CharSet,
                        ContentEncoding = contentEncoding,
                        Data            = data,
                        IpAddress       = ep.IpAddress,
                        MediaType       = ep.MediaType,
                        ProjectId       = ep.ProjectId,
                        UserAgent       = ep.UserAgent
                    }, _storage, false, context.CancellationToken).AnyContext();
                } catch (Exception ex) {
                    _logger.Error()
                    .Exception(ex)
                    .Critical()
                    .Message("Error while requeuing event post \"{0}\": {1}", queueEntry.Value.FilePath, ex.Message)
                    .Property("Event", new { ev.Date, ev.StackId, ev.Type, ev.Source, ev.Message, ev.Value, ev.Geo, ev.ReferenceId, ev.Tags })
                    .Project(ep.ProjectId)
                    .Write();

                    await _metricsClient.CounterAsync(MetricNames.EventsRetryErrors).AnyContext();
                }
            }
        }
Exemple #13
0
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <SampleQueueWorkItem> context)
        {
            await _metrics.CounterAsync("dequeued").AnyContext();

            if (RandomData.GetBool(10))
            {
                await _metrics.CounterAsync("errors").AnyContext();

                throw new ApplicationException("Boom!");
            }

            if (RandomData.GetBool(10))
            {
                await _metrics.CounterAsync("abandoned").AnyContext();

                return(JobResult.FailedWithMessage("Abandoned"));
            }

            await _metrics.CounterAsync("completed").AnyContext();

            return(JobResult.Success);
        }
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <WebHookNotification> context)
        {
            var  body      = context.QueueEntry.Value;
            bool shouldLog = body.ProjectId != _appOptions.Value.InternalProjectId;

            using (_logger.BeginScope(new ExceptionlessState().Organization(body.OrganizationId).Project(body.ProjectId))) {
                if (shouldLog)
                {
                    _logger.LogTrace("Process web hook call: id={Id} project={1} url={Url}", context.QueueEntry.Id, body.ProjectId, body.Url);
                }

                if (!await IsEnabledAsync(body).AnyContext())
                {
                    _logger.LogInformation("Web hook cancelled: Web hook is disabled");
                    return(JobResult.Cancelled);
                }

                var  cache             = new ScopedCacheClient(_cacheClient, GetCacheKeyScope(body));
                long consecutiveErrors = await cache.GetAsync <long>(ConsecutiveErrorsCacheKey, 0).AnyContext();

                if (consecutiveErrors > 10)
                {
                    var lastAttempt = await cache.GetAsync(LastAttemptCacheKey, SystemClock.UtcNow).AnyContext();

                    var nextAttemptAllowedAt = lastAttempt.AddMinutes(15);
                    if (nextAttemptAllowedAt >= SystemClock.UtcNow)
                    {
                        _logger.LogInformation("Web hook cancelled due to {FailureCount} consecutive failed attempts. Will be allowed to try again at {NextAttempt}.", consecutiveErrors, nextAttemptAllowedAt);
                        return(JobResult.Cancelled);
                    }
                }

                bool successful = true;
                HttpResponseMessage response = null;
                try {
                    using (var timeoutCancellationTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(5))) {
                        using (var postCancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(context.CancellationToken, timeoutCancellationTokenSource.Token)) {
                            response = await _client.PostAsJsonAsync(body.Url, body.Data.ToJson(Formatting.Indented, _jsonSerializerSettings), postCancellationTokenSource.Token).AnyContext();

                            if (!response.IsSuccessStatusCode)
                            {
                                successful = false;
                            }
                            else if (consecutiveErrors > 0)
                            {
                                await cache.RemoveAllAsync(_cacheKeys).AnyContext();
                            }
                        }
                    }
                } catch (OperationCanceledException ex) {
                    successful = false;
                    if (shouldLog)
                    {
                        _logger.LogError(ex, "Timeout calling web hook: status={Status} org={organization} project={project} url={Url}", response?.StatusCode, body.OrganizationId, body.ProjectId, body.Url);
                    }
                    return(JobResult.Cancelled);
                } catch (Exception ex) {
                    successful = false;
                    if (shouldLog)
                    {
                        _logger.LogError(ex, "Error calling web hook: status={Status} org={organization} project={project} url={Url}", response?.StatusCode, body.OrganizationId, body.ProjectId, body.Url);
                    }
                    return(JobResult.FromException(ex));
                } finally {
                    if (successful)
                    {
                        _logger.LogInformation("Web hook POST complete: status={Status} org={organization} project={project} url={Url}", response?.StatusCode, body.OrganizationId, body.ProjectId, body.Url);
                    }
                    else if (response != null && (response.StatusCode == HttpStatusCode.Unauthorized || response.StatusCode == HttpStatusCode.Forbidden || response.StatusCode == HttpStatusCode.Gone))
                    {
                        _logger.LogWarning("Disabling Web hook instance {WebHookId} due to status code: status={Status} org={organization} project={project} url={Url}", body.Type == WebHookType.Slack ? "Slack" : body.WebHookId, response.StatusCode, body.OrganizationId, body.ProjectId, body.Url);
                        await DisableIntegrationAsync(body).AnyContext();

                        await cache.RemoveAllAsync(_cacheKeys).AnyContext();
                    }
                    else
                    {
                        var now = SystemClock.UtcNow;
                        await cache.SetAsync(LastAttemptCacheKey, now, TimeSpan.FromDays(3)).AnyContext();

                        consecutiveErrors = await cache.IncrementAsync(ConsecutiveErrorsCacheKey, TimeSpan.FromDays(3)).AnyContext();

                        DateTime firstAttempt;
                        if (consecutiveErrors == 1)
                        {
                            await cache.SetAsync(FirstAttemptCacheKey, now, TimeSpan.FromDays(3)).AnyContext();

                            firstAttempt = now;
                        }
                        else
                        {
                            firstAttempt = await cache.GetAsync(FirstAttemptCacheKey, now).AnyContext();
                        }

                        if (consecutiveErrors >= 10)
                        {
                            // don't retry any more
                            context.QueueEntry.MarkCompleted();

                            // disable if more than 10 consecutive errors over the course of multiple days
                            if (firstAttempt.IsBefore(now.SubtractDays(2)))
                            {
                                _logger.LogWarning("Disabling Web hook instance {WebHookId} due to too many consecutive failures.", body.Type == WebHookType.Slack ? "Slack" : body.WebHookId);
                                await DisableIntegrationAsync(body).AnyContext();

                                await cache.RemoveAllAsync(_cacheKeys).AnyContext();
                            }
                        }
                    }
                }
            }

            return(JobResult.Success);
        }
 protected override Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <QueueItem> context)
 {
     return(Task.FromResult(JobResult.Success));
 }
Exemple #16
0
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <EventPost> context)
        {
            var      queueEntry = context.QueueEntry;
            FileSpec fileInfo   = null;
            await _metricsClient.TimeAsync(async() => fileInfo = await _storage.GetFileInfoAsync(queueEntry.Value.FilePath).AnyContext(), MetricNames.PostsFileInfoTime).AnyContext();

            if (fileInfo == null)
            {
                await _metricsClient.TimeAsync(() => queueEntry.AbandonAsync(), MetricNames.PostsAbandonTime).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to retrieve post data info '{queueEntry.Value.FilePath}'."));
            }

            await _metricsClient.GaugeAsync(MetricNames.PostsMessageSize, fileInfo.Size).AnyContext();

            if (fileInfo.Size > GetMaximumEventPostFileSize())
            {
                await _metricsClient.TimeAsync(() => queueEntry.CompleteAsync(), MetricNames.PostsCompleteTime).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to process post data '{queueEntry.Value.FilePath}' ({fileInfo.Size} bytes): Maximum event post size limit ({Settings.Current.MaximumEventPostSize} bytes) reached."));
            }

            EventPostInfo ep = null;
            await _metricsClient.TimeAsync(async() => ep = await _storage.GetEventPostAndSetActiveAsync(queueEntry.Value.FilePath, _logger, context.CancellationToken).AnyContext(), MetricNames.PostsMarkFileActiveTime).AnyContext();

            if (ep == null)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to retrieve post data '{queueEntry.Value.FilePath}'."));
            }

            await _metricsClient.GaugeAsync(MetricNames.PostsCompressedSize, ep.Data.Length).AnyContext();

            bool isInternalProject = ep.ProjectId == Settings.Current.InternalProjectId;

            _logger.Info()
            .Message("Processing post: id={0} path={1} project={2} ip={3} v={4} agent={5}", queueEntry.Id, queueEntry.Value.FilePath, ep.ProjectId, ep.IpAddress, ep.ApiVersion, ep.UserAgent)
            .Property("Id", queueEntry.Id)
            .Property("ApiVersion", ep.ApiVersion)
            .Property("IpAddress", ep.IpAddress)
            .Property("Client", ep.UserAgent)
            .Tag("processing", "compressed", ep.ContentEncoding)
            .Value(ep.Data.Length)
            .Project(ep.ProjectId)
            .WriteIf(!isInternalProject);

            var project = await _projectRepository.GetByIdAsync(ep.ProjectId, o => o.Cache()).AnyContext();

            if (project == null)
            {
                _logger.Error().Message("Unable to process EventPost \"{0}\": Unable to load project: {1}", queueEntry.Value.FilePath, ep.ProjectId).Property("Id", queueEntry.Id).Project(ep.ProjectId).WriteIf(!isInternalProject);
                await CompleteEntryAsync(queueEntry, ep, SystemClock.UtcNow).AnyContext();

                return(JobResult.Success);
            }

            long maxEventPostSize = Settings.Current.MaximumEventPostSize;

            byte[] uncompressedData = ep.Data;
            if (!String.IsNullOrEmpty(ep.ContentEncoding))
            {
                _logger.Debug().Message("Decompressing EventPost: {0} ({1} bytes)", queueEntry.Id, ep.Data.Length).Property("Id", queueEntry.Id).Tag("decompressing", ep.ContentEncoding).Project(ep.ProjectId).WriteIf(!isInternalProject);
                maxEventPostSize = GetMaximumUncompressedEventPostSize();
                try {
                    await _metricsClient.TimeAsync(async() => {
                        uncompressedData = await uncompressedData.DecompressAsync(ep.ContentEncoding).AnyContext();
                    }, MetricNames.PostsDecompressionTime).AnyContext();
                } catch (Exception ex) {
                    await _metricsClient.CounterAsync(MetricNames.PostsDecompressionErrors).AnyContext();
                    await CompleteEntryAsync(queueEntry, ep, SystemClock.UtcNow).AnyContext();

                    return(JobResult.FailedWithMessage($"Unable to decompress EventPost data '{queueEntry.Value.FilePath}' ({ep.Data.Length} bytes compressed): {ex.Message}"));
                }
            }

            await _metricsClient.GaugeAsync(MetricNames.PostsUncompressedSize, fileInfo.Size).AnyContext();

            if (uncompressedData.Length > maxEventPostSize)
            {
                await CompleteEntryAsync(queueEntry, ep, SystemClock.UtcNow).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to process decompressed EventPost data '{queueEntry.Value.FilePath}' ({ep.Data.Length} bytes compressed, {uncompressedData.Length} bytes): Maximum uncompressed event post size limit ({maxEventPostSize} bytes) reached."));
            }

            _logger.Debug().Message("Processing uncompressed EventPost: {0}  ({1} bytes)", queueEntry.Id, uncompressedData.Length).Property("Id", queueEntry.Id).Tag("uncompressed").Value(uncompressedData.Length).Project(ep.ProjectId).WriteIf(!isInternalProject);
            var createdUtc = SystemClock.UtcNow;
            var events     = await ParseEventPostAsync(ep, createdUtc, uncompressedData, queueEntry.Id, isInternalProject).AnyContext();

            if (events == null || events.Count == 0)
            {
                await CompleteEntryAsync(queueEntry, ep, createdUtc).AnyContext();

                return(JobResult.Success);
            }

            if (context.CancellationToken.IsCancellationRequested)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();

                return(JobResult.Cancelled);
            }

            bool isSingleEvent = events.Count == 1;

            if (!isSingleEvent)
            {
                await _metricsClient.TimeAsync(async() => {
                    // Don't process all the events if it will put the account over its limits.
                    int eventsToProcess = await _organizationRepository.GetRemainingEventLimitAsync(project.OrganizationId).AnyContext();

                    // Add 1 because we already counted 1 against their limit when we received the event post.
                    if (eventsToProcess < Int32.MaxValue)
                    {
                        eventsToProcess += 1;
                    }

                    // Discard any events over there limit.
                    events = events.Take(eventsToProcess).ToList();

                    // Increment the count if greater than 1, since we already incremented it by 1 in the OverageHandler.
                    if (events.Count > 1)
                    {
                        await _organizationRepository.IncrementUsageAsync(project.OrganizationId, false, events.Count - 1, applyHourlyLimit: false).AnyContext();
                    }
                }, MetricNames.PostsUpdateEventLimitTime).AnyContext();
            }

            int errorCount    = 0;
            var eventsToRetry = new List <PersistentEvent>();

            try {
                var contexts = await _eventPipeline.RunAsync(events, ep).AnyContext();

                _logger.Debug().Message(() => $"Ran {contexts.Count} events through the pipeline: id={queueEntry.Id} success={contexts.Count(r => r.IsProcessed)} error={contexts.Count(r => r.HasError)}").Property("Id", queueEntry.Id).Value(contexts.Count).Project(ep.ProjectId).WriteIf(!isInternalProject);
                foreach (var ctx in contexts)
                {
                    if (ctx.IsCancelled)
                    {
                        continue;
                    }

                    if (!ctx.HasError)
                    {
                        continue;
                    }

                    _logger.Error().Exception(ctx.Exception).Message("Error processing EventPost \"{0}\": {1}", queueEntry.Value.FilePath, ctx.ErrorMessage).Property("Id", queueEntry.Id).Project(ep.ProjectId).WriteIf(!isInternalProject);
                    if (ctx.Exception is ValidationException)
                    {
                        continue;
                    }

                    errorCount++;
                    if (!isSingleEvent)
                    {
                        // Put this single event back into the queue so we can retry it separately.
                        eventsToRetry.Add(ctx.Event);
                    }
                }
            } catch (Exception ex) {
                _logger.Error().Exception(ex).Message("Error processing EventPost \"{0}\": {1}", queueEntry.Value.FilePath, ex.Message).Property("Id", queueEntry.Id).Project(ep.ProjectId).WriteIf(!isInternalProject);
                if (ex is ArgumentException || ex is DocumentNotFoundException)
                {
                    await CompleteEntryAsync(queueEntry, ep, createdUtc).AnyContext();

                    return(JobResult.Success);
                }

                errorCount++;
                if (!isSingleEvent)
                {
                    eventsToRetry.AddRange(events);
                }
            }

            if (eventsToRetry.Count > 0)
            {
                await _metricsClient.TimeAsync(() => RetryEvents(context, eventsToRetry, ep, queueEntry), MetricNames.PostsRetryTime).AnyContext();
            }

            if (isSingleEvent && errorCount > 0)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();
            }
            else
            {
                await CompleteEntryAsync(queueEntry, ep, createdUtc).AnyContext();
            }

            return(JobResult.Success);
        }
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <EventPost> context)
        {
            var    entry            = context.QueueEntry;
            var    ep               = entry.Value;
            string payloadPath      = Path.ChangeExtension(entry.Value.FilePath, ".payload");
            var    payloadTask      = _metrics.TimeAsync(() => _eventPostService.GetEventPostPayloadAsync(payloadPath, context.CancellationToken), MetricNames.PostsMarkFileActiveTime);
            var    projectTask      = _projectRepository.GetByIdAsync(ep.ProjectId, o => o.Cache());
            var    organizationTask = _organizationRepository.GetByIdAsync(ep.OrganizationId, o => o.Cache());

            var payload = await payloadTask.AnyContext();

            if (payload == null)
            {
                await Task.WhenAll(AbandonEntryAsync(entry), projectTask, organizationTask).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to retrieve payload '{payloadPath}'."));
            }

            _metrics.Gauge(MetricNames.PostsMessageSize, payload.LongLength);
            if (payload.LongLength > _maximumEventPostFileSize)
            {
                await Task.WhenAll(_metrics.TimeAsync(() => entry.CompleteAsync(), MetricNames.PostsCompleteTime), projectTask, organizationTask).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to process payload '{payloadPath}' ({payload.LongLength} bytes): Maximum event post size limit ({Settings.Current.MaximumEventPostSize} bytes) reached."));
            }

            using (_logger.BeginScope(new ExceptionlessState().Organization(ep.OrganizationId).Project(ep.ProjectId))) {
                _metrics.Gauge(MetricNames.PostsCompressedSize, payload.Length);

                bool isDebugLogLevelEnabled = _logger.IsEnabled(LogLevel.Debug);
                bool isInternalProject      = ep.ProjectId == Settings.Current.InternalProjectId;
                if (!isInternalProject && _logger.IsEnabled(LogLevel.Information))
                {
                    using (_logger.BeginScope(new ExceptionlessState().Tag("processing").Tag("compressed").Tag(ep.ContentEncoding).Value(payload.Length)))
                        _logger.LogInformation("Processing post: id={QueueEntryId} path={FilePath} project={project} ip={IpAddress} v={ApiVersion} agent={UserAgent}", entry.Id, payloadPath, ep.ProjectId, ep.IpAddress, ep.ApiVersion, ep.UserAgent);
                }

                var project = await projectTask.AnyContext();

                if (project == null)
                {
                    if (!isInternalProject)
                    {
                        _logger.LogError("Unable to process EventPost {FilePath}: Unable to load project: {Project}", payloadPath, ep.ProjectId);
                    }
                    await Task.WhenAll(CompleteEntryAsync(entry, ep, SystemClock.UtcNow), organizationTask).AnyContext();

                    return(JobResult.Success);
                }

                long maxEventPostSize = Settings.Current.MaximumEventPostSize;
                var  uncompressedData = payload;
                if (!String.IsNullOrEmpty(ep.ContentEncoding))
                {
                    if (!isInternalProject && isDebugLogLevelEnabled)
                    {
                        using (_logger.BeginScope(new ExceptionlessState().Tag("decompressing").Tag(ep.ContentEncoding)))
                            _logger.LogDebug("Decompressing EventPost: {QueueEntryId} ({CompressedBytes} bytes)", entry.Id, payload.Length);
                    }

                    maxEventPostSize = _maximumUncompressedEventPostSize;
                    try {
                        _metrics.Time(() => {
                            uncompressedData = uncompressedData.Decompress(ep.ContentEncoding);
                        }, MetricNames.PostsDecompressionTime);
                    } catch (Exception ex) {
                        _metrics.Counter(MetricNames.PostsDecompressionErrors);
                        await Task.WhenAll(CompleteEntryAsync(entry, ep, SystemClock.UtcNow), organizationTask).AnyContext();

                        return(JobResult.FailedWithMessage($"Unable to decompress EventPost data '{payloadPath}' ({payload.Length} bytes compressed): {ex.Message}"));
                    }
                }

                _metrics.Gauge(MetricNames.PostsUncompressedSize, payload.LongLength);
                if (uncompressedData.Length > maxEventPostSize)
                {
                    await Task.WhenAll(CompleteEntryAsync(entry, ep, SystemClock.UtcNow), organizationTask).AnyContext();

                    return(JobResult.FailedWithMessage($"Unable to process decompressed EventPost data '{payloadPath}' ({payload.Length} bytes compressed, {uncompressedData.Length} bytes): Maximum uncompressed event post size limit ({maxEventPostSize} bytes) reached."));
                }

                if (!isInternalProject && isDebugLogLevelEnabled)
                {
                    using (_logger.BeginScope(new ExceptionlessState().Tag("uncompressed").Value(uncompressedData.Length)))
                        _logger.LogDebug("Processing uncompressed EventPost: {QueueEntryId}  ({UncompressedBytes} bytes)", entry.Id, uncompressedData.Length);
                }

                var createdUtc = SystemClock.UtcNow;
                var events     = ParseEventPost(ep, payload, createdUtc, uncompressedData, entry.Id, isInternalProject);
                if (events == null || events.Count == 0)
                {
                    await Task.WhenAll(CompleteEntryAsync(entry, ep, createdUtc), organizationTask).AnyContext();

                    return(JobResult.Success);
                }

                if (context.CancellationToken.IsCancellationRequested)
                {
                    await Task.WhenAll(AbandonEntryAsync(entry), organizationTask).AnyContext();

                    return(JobResult.Cancelled);
                }

                var organization = await organizationTask.AnyContext();

                if (organization == null)
                {
                    if (!isInternalProject)
                    {
                        _logger.LogError("Unable to process EventPost {FilePath}: Unable to load organization: {OrganizationId}", payloadPath, project.OrganizationId);
                    }

                    await CompleteEntryAsync(entry, ep, SystemClock.UtcNow).AnyContext();

                    return(JobResult.Success);
                }

                bool isSingleEvent = events.Count == 1;
                if (!isSingleEvent)
                {
                    await _metrics.TimeAsync(async() => {
                        // Don't process all the events if it will put the account over its limits.
                        int eventsToProcess = await _usageService.GetRemainingEventLimitAsync(organization).AnyContext();

                        // Add 1 because we already counted 1 against their limit when we received the event post.
                        if (eventsToProcess < Int32.MaxValue)
                        {
                            eventsToProcess += 1;
                        }

                        // Discard any events over there limit.
                        events = events.Take(eventsToProcess).ToList();

                        // Increment the count if greater than 1, since we already incremented it by 1 in the OverageHandler.
                        if (events.Count > 1)
                        {
                            await _usageService.IncrementUsageAsync(organization, project, false, events.Count - 1, applyHourlyLimit: false).AnyContext();
                        }
                    }, MetricNames.PostsUpdateEventLimitTime).AnyContext();
                }

                int errorCount    = 0;
                var eventsToRetry = new List <PersistentEvent>();
                try {
                    var contexts = await _eventPipeline.RunAsync(events, organization, project, ep).AnyContext();

                    if (!isInternalProject && isDebugLogLevelEnabled)
                    {
                        using (_logger.BeginScope(new ExceptionlessState().Value(contexts.Count)))
                            _logger.LogDebug("Ran {@value} events through the pipeline: id={QueueEntryId} success={SuccessCount} error={ErrorCount}", contexts.Count, entry.Id, contexts.Count(r => r.IsProcessed), contexts.Count(r => r.HasError));
                    }

                    foreach (var ctx in contexts)
                    {
                        if (ctx.IsCancelled)
                        {
                            continue;
                        }

                        if (!ctx.HasError)
                        {
                            continue;
                        }

                        if (!isInternalProject)
                        {
                            _logger.LogError(ctx.Exception, "Error processing EventPost {QueueEntryId} {FilePath}: {Message}", entry.Id, payloadPath, ctx.ErrorMessage);
                        }
                        if (ctx.Exception is ValidationException)
                        {
                            continue;
                        }

                        errorCount++;
                        if (!isSingleEvent)
                        {
                            // Put this single event back into the queue so we can retry it separately.
                            eventsToRetry.Add(ctx.Event);
                        }
                    }
                } catch (Exception ex) {
                    if (!isInternalProject)
                    {
                        _logger.LogError(ex, "Error processing EventPost {QueueEntryId} {FilePath}: {Message}", entry.Id, payloadPath, ex.Message);
                    }
                    if (ex is ArgumentException || ex is DocumentNotFoundException)
                    {
                        await CompleteEntryAsync(entry, ep, createdUtc).AnyContext();

                        return(JobResult.Success);
                    }

                    errorCount++;
                    if (!isSingleEvent)
                    {
                        eventsToRetry.AddRange(events);
                    }
                }

                if (eventsToRetry.Count > 0)
                {
                    await _metrics.TimeAsync(() => RetryEventsAsync(eventsToRetry, ep, entry, project, isInternalProject), MetricNames.PostsRetryTime).AnyContext();
                }

                if (isSingleEvent && errorCount > 0)
                {
                    await AbandonEntryAsync(entry).AnyContext();
                }
                else
                {
                    await CompleteEntryAsync(entry, ep, createdUtc).AnyContext();
                }

                return(JobResult.Success);
            }
        }
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <EventPost> context)
        {
            var queueEntry    = context.QueueEntry;
            var eventPostInfo = await _storage.GetEventPostAndSetActiveAsync(queueEntry.Value.FilePath, _logger, context.CancellationToken).AnyContext();

            if (eventPostInfo == null)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to retrieve post data '{queueEntry.Value.FilePath}'."));
            }

            bool isInternalProject = eventPostInfo.ProjectId == Settings.Current.InternalProjectId;

            _logger.Info().Message("Processing post: id={0} path={1} project={2} ip={3} v={4} agent={5}", queueEntry.Id, queueEntry.Value.FilePath, eventPostInfo.ProjectId, eventPostInfo.IpAddress, eventPostInfo.ApiVersion, eventPostInfo.UserAgent).WriteIf(!isInternalProject);

            var project = await _projectRepository.GetByIdAsync(eventPostInfo.ProjectId, true).AnyContext();

            if (project == null)
            {
                _logger.Error().Project(eventPostInfo.ProjectId).Message("Unable to process EventPost \"{0}\": Unable to load project: {1}", queueEntry.Value.FilePath, eventPostInfo.ProjectId).WriteIf(!isInternalProject);
                await CompleteEntryAsync(queueEntry, eventPostInfo, DateTime.UtcNow).AnyContext();

                return(JobResult.Success);
            }

            var createdUtc = DateTime.UtcNow;
            var events     = await ParseEventPostAsync(eventPostInfo, createdUtc, queueEntry.Id, isInternalProject).AnyContext();

            if (events == null || !events.Any())
            {
                await CompleteEntryAsync(queueEntry, eventPostInfo, createdUtc).AnyContext();

                return(JobResult.Success);
            }

            if (context.CancellationToken.IsCancellationRequested)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();

                return(JobResult.Cancelled);
            }

            bool isSingleEvent = events.Count == 1;

            if (!isSingleEvent)
            {
                // Don't process all the events if it will put the account over its limits.
                int eventsToProcess = await _organizationRepository.GetRemainingEventLimitAsync(project.OrganizationId).AnyContext();

                // Add 1 because we already counted 1 against their limit when we received the event post.
                if (eventsToProcess < Int32.MaxValue)
                {
                    eventsToProcess += 1;
                }

                // Discard any events over there limit.
                events = events.Take(eventsToProcess).ToList();

                // Increment by count - 1 since we already incremented it by 1 in the OverageHandler.
                await _organizationRepository.IncrementUsageAsync(project.OrganizationId, false, events.Count - 1, applyHourlyLimit : false).AnyContext();
            }

            int errorCount    = 0;
            var eventsToRetry = new List <PersistentEvent>();

            try {
                var results = await _eventPipeline.RunAsync(events, eventPostInfo).AnyContext();

                _logger.Info().Message(() => $"Ran {results.Count} events through the pipeline: id={queueEntry.Id} project={eventPostInfo.ProjectId} success={results.Count(r => r.IsProcessed)} error={results.Count(r => r.HasError)}").WriteIf(!isInternalProject);
                foreach (var eventContext in results)
                {
                    if (eventContext.IsCancelled)
                    {
                        continue;
                    }

                    if (!eventContext.HasError)
                    {
                        continue;
                    }

                    _logger.Error().Exception(eventContext.Exception).Project(eventPostInfo.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, eventContext.ErrorMessage).Write();

                    if (eventContext.Exception is ValidationException)
                    {
                        continue;
                    }

                    errorCount++;
                    if (!isSingleEvent)
                    {
                        // Put this single event back into the queue so we can retry it separately.
                        eventsToRetry.Add(eventContext.Event);
                    }
                }
            } catch (Exception ex) {
                _logger.Error().Exception(ex).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, ex.Message).Project(eventPostInfo.ProjectId).Write();
                if (ex is ArgumentException || ex is DocumentNotFoundException)
                {
                    await CompleteEntryAsync(queueEntry, eventPostInfo, createdUtc).AnyContext();

                    return(JobResult.Success);
                }

                errorCount++;
                if (!isSingleEvent)
                {
                    eventsToRetry.AddRange(events);
                }
            }

            foreach (var requeueEvent in eventsToRetry)
            {
                // Put this single event back into the queue so we can retry it separately.
                await _queue.EnqueueAsync(new EventPostInfo {
                    ApiVersion = eventPostInfo.ApiVersion,
                    Data       = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(requeueEvent)),
                    IpAddress  = eventPostInfo.IpAddress,
                    MediaType  = eventPostInfo.MediaType,
                    CharSet    = eventPostInfo.CharSet,
                    ProjectId  = eventPostInfo.ProjectId,
                    UserAgent  = eventPostInfo.UserAgent
                }, _storage, false, context.CancellationToken).AnyContext();
            }

            if (isSingleEvent && errorCount > 0)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();
            }
            else
            {
                await CompleteEntryAsync(queueEntry, eventPostInfo, createdUtc).AnyContext();
            }

            return(JobResult.Success);
        }
Exemple #19
0
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <EventNotificationWorkItem> context)
        {
            var eventModel = await _eventRepository.GetByIdAsync(context.QueueEntry.Value.EventId).AnyContext();

            if (eventModel == null)
            {
                return(JobResult.FailedWithMessage($"Could not load event: {context.QueueEntry.Value.EventId}"));
            }

            var  eventNotification = new EventNotification(context.QueueEntry.Value, eventModel);
            bool shouldLog         = eventNotification.Event.ProjectId != Settings.Current.InternalProjectId;
            int  emailsSent        = 0;

            _logger.Trace().Message(() => $"Process notification: project={eventNotification.Event.ProjectId} event={eventNotification.Event.Id} stack={eventNotification.Event.StackId}").WriteIf(shouldLog);

            var project = await _projectRepository.GetByIdAsync(eventNotification.Event.ProjectId, true).AnyContext();

            if (project == null)
            {
                return(JobResult.FailedWithMessage($"Could not load project: {eventNotification.Event.ProjectId}."));
            }
            _logger.Trace().Message(() => $"Loaded project: name={project.Name}").WriteIf(shouldLog);

            var organization = await _organizationRepository.GetByIdAsync(project.OrganizationId, true).AnyContext();

            if (organization == null)
            {
                return(JobResult.FailedWithMessage($"Could not load organization: {project.OrganizationId}"));
            }

            _logger.Trace().Message(() => $"Loaded organization: {organization.Name}").WriteIf(shouldLog);

            var stack = await _stackRepository.GetByIdAsync(eventNotification.Event.StackId).AnyContext();

            if (stack == null)
            {
                return(JobResult.FailedWithMessage($"Could not load stack: {eventNotification.Event.StackId}"));
            }

            if (!organization.HasPremiumFeatures)
            {
                _logger.Info().Message("Skipping \"{0}\" because organization \"{1}\" does not have premium features.", eventNotification.Event.Id, eventNotification.Event.OrganizationId).WriteIf(shouldLog);
                return(JobResult.Success);
            }

            if (stack.DisableNotifications || stack.IsHidden)
            {
                _logger.Info().Message("Skipping \"{0}\" because stack \"{1}\" notifications are disabled or stack is hidden.", eventNotification.Event.Id, eventNotification.Event.StackId).WriteIf(shouldLog);
                return(JobResult.Success);
            }

            if (context.CancellationToken.IsCancellationRequested)
            {
                return(JobResult.Cancelled);
            }

            _logger.Trace().Message(() => $"Loaded stack: title={stack.Title}").WriteIf(shouldLog);
            int totalOccurrences = stack.TotalOccurrences;

            // after the first 2 occurrences, don't send a notification for the same stack more then once every 30 minutes
            var lastTimeSentUtc = await _cacheClient.GetAsync <DateTime>(String.Concat("notify:stack-throttle:", eventNotification.Event.StackId), DateTime.MinValue).AnyContext();

            if (totalOccurrences > 2 &&
                !eventNotification.IsRegression &&
                lastTimeSentUtc != DateTime.MinValue &&
                lastTimeSentUtc > DateTime.UtcNow.AddMinutes(-30))
            {
                _logger.Info().Message("Skipping message because of stack throttling: last sent={0} occurrences={1}", lastTimeSentUtc, totalOccurrences).WriteIf(shouldLog);
                return(JobResult.Success);
            }

            // don't send more than 10 notifications for a given project every 30 minutes
            var    projectTimeWindow = TimeSpan.FromMinutes(30);
            string cacheKey          = String.Concat("notify:project-throttle:", eventNotification.Event.ProjectId, "-", DateTime.UtcNow.Floor(projectTimeWindow).Ticks);
            double notificationCount = await _cacheClient.IncrementAsync(cacheKey, 1, projectTimeWindow).AnyContext();

            if (notificationCount > 10 && !eventNotification.IsRegression)
            {
                _logger.Info().Project(eventNotification.Event.ProjectId).Message("Skipping message because of project throttling: count={0}", notificationCount).WriteIf(shouldLog);
                return(JobResult.Success);
            }

            if (context.CancellationToken.IsCancellationRequested)
            {
                return(JobResult.Cancelled);
            }

            foreach (var kv in project.NotificationSettings)
            {
                var settings = kv.Value;
                _logger.Trace().Message(() => $"Processing notification: user={kv.Key}").WriteIf(shouldLog);

                var user = await _userRepository.GetByIdAsync(kv.Key).AnyContext();

                if (String.IsNullOrEmpty(user?.EmailAddress))
                {
                    _logger.Error("Could not load user {0} or blank email address {1}.", kv.Key, user?.EmailAddress ?? "");
                    continue;
                }

                if (!user.IsEmailAddressVerified)
                {
                    _logger.Info().Message("User {0} with email address {1} has not been verified.", user.Id, user.EmailAddress).WriteIf(shouldLog);
                    continue;
                }

                if (!user.EmailNotificationsEnabled)
                {
                    _logger.Info().Message("User {0} with email address {1} has email notifications disabled.", user.Id, user.EmailAddress).WriteIf(shouldLog);
                    continue;
                }

                if (!user.OrganizationIds.Contains(project.OrganizationId))
                {
                    _logger.Error().Message("Unauthorized user: project={0} user={1} organization={2} event={3}", project.Id, kv.Key, project.OrganizationId, eventNotification.Event.Id).Write();
                    continue;
                }

                _logger.Trace().Message(() => $"Loaded user: email={user.EmailAddress}").WriteIf(shouldLog);

                bool shouldReportNewError      = settings.ReportNewErrors && eventNotification.IsNew && eventNotification.Event.IsError();
                bool shouldReportCriticalError = settings.ReportCriticalErrors && eventNotification.IsCritical && eventNotification.Event.IsError();
                bool shouldReportRegression    = settings.ReportEventRegressions && eventNotification.IsRegression;
                bool shouldReportNewEvent      = settings.ReportNewEvents && eventNotification.IsNew;
                bool shouldReportCriticalEvent = settings.ReportCriticalEvents && eventNotification.IsCritical;
                bool shouldReport = shouldReportNewError || shouldReportCriticalError || shouldReportRegression || shouldReportNewEvent || shouldReportCriticalEvent;

                _logger.Trace().Message(() => $"Settings: newerror={settings.ReportNewErrors} criticalerror={settings.ReportCriticalErrors} regression={settings.ReportEventRegressions} new={settings.ReportNewEvents} critical={settings.ReportCriticalEvents}").WriteIf(shouldLog);
                _logger.Trace().Message(() => $"Should process: newerror={shouldReportNewError} criticalerror={shouldReportCriticalError} regression={shouldReportRegression} new={shouldReportNewEvent} critical={shouldReportCriticalEvent}").WriteIf(shouldLog);

                var request = eventNotification.Event.GetRequestInfo();
                // check for known bots if the user has elected to not report them
                if (shouldReport && !String.IsNullOrEmpty(request?.UserAgent))
                {
                    var botPatterns = project.Configuration.Settings.ContainsKey(SettingsDictionary.KnownKeys.UserAgentBotPatterns)
                        ? project.Configuration.Settings.GetStringCollection(SettingsDictionary.KnownKeys.UserAgentBotPatterns).ToList()
                        : new List <string>();

                    var info = await _parser.ParseAsync(request.UserAgent, eventNotification.Event.ProjectId).AnyContext();

                    if (info != null && info.Device.IsSpider || request.UserAgent.AnyWildcardMatches(botPatterns))
                    {
                        shouldReport = false;
                        _logger.Info().Message("Skipping because event is from a bot \"{0}\".", request.UserAgent).WriteIf(shouldLog);
                    }
                }

                if (!shouldReport)
                {
                    continue;
                }

                var model = new EventNotificationModel(eventNotification)
                {
                    ProjectName      = project.Name,
                    TotalOccurrences = totalOccurrences
                };

                // don't send notifications in non-production mode to email addresses that are not on the outbound email list.
                if (Settings.Current.WebsiteMode != WebsiteMode.Production &&
                    !Settings.Current.AllowedOutboundAddresses.Contains(v => user.EmailAddress.ToLowerInvariant().Contains(v)))
                {
                    _logger.Info().Message("Skipping because email is not on the outbound list and not in production mode.").WriteIf(shouldLog);
                    continue;
                }

                _logger.Trace("Sending email to {0}...", user.EmailAddress);
                await _mailer.SendEventNoticeAsync(user.EmailAddress, model).AnyContext();

                emailsSent++;
                _logger.Trace().Message(() => "Done sending email.").WriteIf(shouldLog);
            }

            // if we sent any emails, mark the last time a notification for this stack was sent.
            if (emailsSent > 0)
            {
                await _cacheClient.SetAsync(String.Concat("notify:stack-throttle:", eventNotification.Event.StackId), DateTime.UtcNow, DateTime.UtcNow.AddMinutes(15)).AnyContext();

                _logger.Info().Message("Notifications sent: event={0} stack={1} count={2}", eventNotification.Event.Id, eventNotification.Event.StackId, emailsSent).WriteIf(shouldLog);
            }

            return(JobResult.Success);
        }
Exemple #20
0
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <SampleQueueWorkItem> context)
        {
            await _metrics.CounterAsync("completed").AnyContext();

            return(JobResult.Success);
        }
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <EventPost> context)
        {
            var queueEntry = context.QueueEntry;
            var ep         = await _storage.GetEventPostAndSetActiveAsync(queueEntry.Value.FilePath, _logger, context.CancellationToken).AnyContext();

            if (ep == null)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to retrieve post data '{queueEntry.Value.FilePath}'."));
            }

            bool isInternalProject = ep.ProjectId == Settings.Current.InternalProjectId;

            _logger.Info()
            .Message("Processing post: id={0} path={1} project={2} ip={3} v={4} agent={5}", queueEntry.Id, queueEntry.Value.FilePath, ep.ProjectId, ep.IpAddress, ep.ApiVersion, ep.UserAgent)
            .Property("ApiVersion", ep.ApiVersion)
            .Property("IpAddress", ep.IpAddress)
            .Property("Client", ep.UserAgent)
            .Property("Project", ep.ProjectId)
            .Property("@stack", "event-posted")
            .WriteIf(!isInternalProject);

            var project = await _projectRepository.GetByIdAsync(ep.ProjectId, true).AnyContext();

            if (project == null)
            {
                _logger.Error().Project(ep.ProjectId).Message("Unable to process EventPost \"{0}\": Unable to load project: {1}", queueEntry.Value.FilePath, ep.ProjectId).WriteIf(!isInternalProject);
                await CompleteEntryAsync(queueEntry, ep, SystemClock.UtcNow).AnyContext();

                return(JobResult.Success);
            }

            long maxEventPostSize = Settings.Current.MaximumEventPostSize;

            byte[] uncompressedData = ep.Data;
            if (!String.IsNullOrEmpty(ep.ContentEncoding))
            {
                try {
                    // increase the absolute max just due to the content was compressed and might be a batch of events.
                    maxEventPostSize *= 10;
                    uncompressedData  = await uncompressedData.DecompressAsync(ep.ContentEncoding).AnyContext();
                } catch (Exception ex) {
                    await CompleteEntryAsync(queueEntry, ep, SystemClock.UtcNow).AnyContext();

                    return(JobResult.FailedWithMessage($"Unable to decompress post data '{queueEntry.Value.FilePath}' ({ep.Data.Length} bytes compressed): {ex.Message}"));
                }
            }

            if (uncompressedData.Length > maxEventPostSize)
            {
                await CompleteEntryAsync(queueEntry, ep, SystemClock.UtcNow).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to process decompressed post data '{queueEntry.Value.FilePath}' ({ep.Data.Length} bytes compressed, {uncompressedData.Length} bytes): Maximum uncompressed event post size limit ({maxEventPostSize} bytes) reached."));
            }

            var createdUtc = SystemClock.UtcNow;
            var events     = await ParseEventPostAsync(ep, createdUtc, uncompressedData, queueEntry.Id, isInternalProject).AnyContext();

            if (events == null || events.Count == 0)
            {
                await CompleteEntryAsync(queueEntry, ep, createdUtc).AnyContext();

                return(JobResult.Success);
            }

            if (context.CancellationToken.IsCancellationRequested)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();

                return(JobResult.Cancelled);
            }

            bool isSingleEvent = events.Count == 1;

            if (!isSingleEvent)
            {
                // Don't process all the events if it will put the account over its limits.
                int eventsToProcess = await _organizationRepository.GetRemainingEventLimitAsync(project.OrganizationId).AnyContext();

                // Add 1 because we already counted 1 against their limit when we received the event post.
                if (eventsToProcess < Int32.MaxValue)
                {
                    eventsToProcess += 1;
                }

                // Discard any events over there limit.
                events = events.Take(eventsToProcess).ToList();

                // Increment the count if greater than 1, since we already incremented it by 1 in the OverageHandler.
                if (events.Count > 1)
                {
                    await _organizationRepository.IncrementUsageAsync(project.OrganizationId, false, events.Count - 1, applyHourlyLimit : false).AnyContext();
                }
            }

            int errorCount    = 0;
            var eventsToRetry = new List <PersistentEvent>();

            try {
                var results = await _eventPipeline.RunAsync(events, ep).AnyContext();

                _logger.Info()
                .Message(() => $"Ran {results.Count} events through the pipeline: id={queueEntry.Id} project={ep.ProjectId} success={results.Count(r => r.IsProcessed)} error={results.Count(r => r.HasError)}")
                .Property("@value", results.Count)
                .Property("@stack", "event-processed")
                .Property("Project", ep.ProjectId)
                .WriteIf(!isInternalProject);

                foreach (var eventContext in results)
                {
                    if (eventContext.IsCancelled)
                    {
                        continue;
                    }

                    if (!eventContext.HasError)
                    {
                        continue;
                    }

                    _logger.Error().Exception(eventContext.Exception).Project(ep.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, eventContext.ErrorMessage).Write();

                    if (eventContext.Exception is ValidationException)
                    {
                        continue;
                    }

                    errorCount++;
                    if (!isSingleEvent)
                    {
                        // Put this single event back into the queue so we can retry it separately.
                        eventsToRetry.Add(eventContext.Event);
                    }
                }
            } catch (Exception ex) {
                _logger.Error().Exception(ex).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, ex.Message).Project(ep.ProjectId).Write();
                if (ex is ArgumentException || ex is DocumentNotFoundException)
                {
                    await CompleteEntryAsync(queueEntry, ep, createdUtc).AnyContext();

                    return(JobResult.Success);
                }

                errorCount++;
                if (!isSingleEvent)
                {
                    eventsToRetry.AddRange(events);
                }
            }

            foreach (var requeueEvent in eventsToRetry)
            {
                string contentEncoding = null;
                byte[] data            = requeueEvent.GetBytes(_jsonSerializerSettings);
                if (data.Length > 1000)
                {
                    data = await data.CompressAsync().AnyContext();

                    contentEncoding = "gzip";
                }

                // Put this single event back into the queue so we can retry it separately.
                await _queue.EnqueueAsync(new EventPostInfo {
                    ApiVersion      = ep.ApiVersion,
                    CharSet         = ep.CharSet,
                    ContentEncoding = contentEncoding,
                    Data            = data,
                    IpAddress       = ep.IpAddress,
                    MediaType       = ep.MediaType,
                    ProjectId       = ep.ProjectId,
                    UserAgent       = ep.UserAgent
                }, _storage, false, context.CancellationToken).AnyContext();
            }

            if (isSingleEvent && errorCount > 0)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();
            }
            else
            {
                await CompleteEntryAsync(queueEntry, ep, createdUtc).AnyContext();
            }

            return(JobResult.Success);
        }
 protected override Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <SampleQueueWorkItem> context)
 {
     _metrics.Counter("completed");
     return(Task.FromResult(JobResult.Success));
 }
    protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <EventNotification> context)
    {
        var wi = context.QueueEntry.Value;
        var ev = await _eventRepository.GetByIdAsync(wi.EventId).AnyContext();

        if (ev == null)
        {
            return(JobResult.SuccessWithMessage($"Could not load event: {wi.EventId}"));
        }

        bool shouldLog = ev.ProjectId != _appOptions.InternalProjectId;
        int  sent      = 0;

        if (shouldLog)
        {
            _logger.LogTrace("Process notification: project={project} event={id} stack={stack}", ev.ProjectId, ev.Id, ev.StackId);
        }

        var project = await _projectRepository.GetByIdAsync(ev.ProjectId, o => o.Cache()).AnyContext();

        if (project == null)
        {
            return(JobResult.SuccessWithMessage($"Could not load project: {ev.ProjectId}."));
        }

        using (_logger.BeginScope(new ExceptionlessState().Organization(project.OrganizationId).Project(project.Id))) {
            if (shouldLog)
            {
                _logger.LogTrace("Loaded project: name={ProjectName}", project.Name);
            }

            // after the first 2 occurrences, don't send a notification for the same stack more then once every 30 minutes
            var lastTimeSentUtc = await _cache.GetAsync <DateTime>(String.Concat("notify:stack-throttle:", ev.StackId), DateTime.MinValue).AnyContext();

            if (wi.TotalOccurrences > 2 && !wi.IsRegression && lastTimeSentUtc != DateTime.MinValue && lastTimeSentUtc > SystemClock.UtcNow.AddMinutes(-30))
            {
                if (shouldLog)
                {
                    _logger.LogInformation("Skipping message because of stack throttling: last sent={LastSentUtc} occurrences={TotalOccurrences}", lastTimeSentUtc, wi.TotalOccurrences);
                }
                return(JobResult.Success);
            }

            if (context.CancellationToken.IsCancellationRequested)
            {
                return(JobResult.Cancelled);
            }

            // don't send more than 10 notifications for a given project every 30 minutes
            var    projectTimeWindow = TimeSpan.FromMinutes(30);
            string cacheKey          = String.Concat("notify:project-throttle:", ev.ProjectId, "-", SystemClock.UtcNow.Floor(projectTimeWindow).Ticks);
            double notificationCount = await _cache.IncrementAsync(cacheKey, 1, projectTimeWindow).AnyContext();

            if (notificationCount > 10 && !wi.IsRegression)
            {
                if (shouldLog)
                {
                    _logger.LogInformation("Skipping message because of project throttling: count={NotificationCount}", notificationCount);
                }
                return(JobResult.Success);
            }

            foreach (var kv in project.NotificationSettings)
            {
                var settings = kv.Value;
                if (shouldLog)
                {
                    _logger.LogTrace("Processing notification: {Key}", kv.Key);
                }

                bool isCritical                = ev.IsCritical();
                bool shouldReportNewError      = settings.ReportNewErrors && wi.IsNew && ev.IsError();
                bool shouldReportCriticalError = settings.ReportCriticalErrors && isCritical && ev.IsError();
                bool shouldReportRegression    = settings.ReportEventRegressions && wi.IsRegression;
                bool shouldReportNewEvent      = settings.ReportNewEvents && wi.IsNew;
                bool shouldReportCriticalEvent = settings.ReportCriticalEvents && isCritical;
                bool shouldReport              = shouldReportNewError || shouldReportCriticalError || shouldReportRegression || shouldReportNewEvent || shouldReportCriticalEvent;

                if (shouldLog)
                {
                    _logger.LogTrace("Settings: new error={ReportNewErrors} critical error={ReportCriticalErrors} regression={ReportEventRegressions} new={ReportNewEvents} critical={ReportCriticalEvents}", settings.ReportNewErrors, settings.ReportCriticalErrors, settings.ReportEventRegressions, settings.ReportNewEvents, settings.ReportCriticalEvents);
                    _logger.LogTrace("Should process: new error={ShouldReportNewError} critical error={ShouldReportCriticalError} regression={ShouldReportRegression} new={ShouldReportNewEvent} critical={ShouldReportCriticalEvent}", shouldReportNewError, shouldReportCriticalError, shouldReportRegression, shouldReportNewEvent, shouldReportCriticalEvent);
                }
                var request = ev.GetRequestInfo();
                // check for known bots if the user has elected to not report them
                if (shouldReport && !String.IsNullOrEmpty(request?.UserAgent))
                {
                    var botPatterns = project.Configuration.Settings.GetStringCollection(SettingsDictionary.KnownKeys.UserAgentBotPatterns).ToList();

                    var info = await _parser.ParseAsync(request.UserAgent).AnyContext();

                    if (info != null && info.Device.IsSpider || request.UserAgent.AnyWildcardMatches(botPatterns))
                    {
                        shouldReport = false;
                        if (shouldLog)
                        {
                            _logger.LogInformation("Skipping because event is from a bot {UserAgent}.", request.UserAgent);
                        }
                    }
                }

                if (!shouldReport)
                {
                    continue;
                }

                bool processed;
                switch (kv.Key)
                {
                case Project.NotificationIntegrations.Slack:
                    processed = await _slackService.SendEventNoticeAsync(ev, project, wi.IsNew, wi.IsRegression).AnyContext();

                    break;

                default:
                    processed = await SendEmailNotificationAsync(kv.Key, project, ev, wi, shouldLog).AnyContext();

                    break;
                }

                if (shouldLog)
                {
                    _logger.LogTrace("Finished processing notification: {Key}", kv.Key);
                }
                if (processed)
                {
                    sent++;
                }
            }

            // if we sent any notifications, mark the last time a notification for this stack was sent.
            if (sent > 0)
            {
                await _cache.SetAsync(String.Concat("notify:stack-throttle:", ev.StackId), SystemClock.UtcNow, SystemClock.UtcNow.AddMinutes(15)).AnyContext();

                if (shouldLog)
                {
                    _logger.LogInformation("Notifications sent: event={id} stack={stack} count={SentCount}", ev.Id, ev.StackId, sent);
                }
            }
        }
        return(JobResult.Success);
    }
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <EventPost> context)
        {
            var           queueEntry    = context.QueueEntry;
            EventPostInfo eventPostInfo = await _storage.GetEventPostAndSetActiveAsync(queueEntry.Value.FilePath, _logger, context.CancellationToken).AnyContext();

            if (eventPostInfo == null)
            {
                await queueEntry.AbandonAsync().AnyContext();

                await _storage.SetNotActiveAsync(queueEntry.Value.FilePath, _logger).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to retrieve post data '{queueEntry.Value.FilePath}'."));
            }

            bool isInternalProject = eventPostInfo.ProjectId == Settings.Current.InternalProjectId;

            _logger.Info().Message("Processing post: id={0} path={1} project={2} ip={3} v={4} agent={5}", queueEntry.Id, queueEntry.Value.FilePath, eventPostInfo.ProjectId, eventPostInfo.IpAddress, eventPostInfo.ApiVersion, eventPostInfo.UserAgent).WriteIf(!isInternalProject);

            List <PersistentEvent> events = null;

            try {
                await _metricsClient.TimeAsync(async() => {
                    events = ParseEventPost(eventPostInfo);
                    _logger.Info().Message("Parsed {0} events for post: id={1}", events.Count, queueEntry.Id).WriteIf(!isInternalProject);
                }, MetricNames.PostsParsingTime).AnyContext();

                await _metricsClient.CounterAsync(MetricNames.PostsParsed).AnyContext();

                await _metricsClient.GaugeAsync(MetricNames.PostsEventCount, events.Count).AnyContext();
            } catch (Exception ex) {
                await queueEntry.AbandonAsync().AnyContext();

                await _metricsClient.CounterAsync(MetricNames.PostsParseErrors).AnyContext();

                await _storage.SetNotActiveAsync(queueEntry.Value.FilePath, _logger).AnyContext();

                _logger.Error(ex, "An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message);
                return(JobResult.FromException(ex, $"An error occurred while processing the EventPost '{queueEntry.Id}': {ex.Message}"));
            }

            if (!events.Any() || context.CancellationToken.IsCancellationRequested)
            {
                await queueEntry.AbandonAsync().AnyContext();

                await _storage.SetNotActiveAsync(queueEntry.Value.FilePath, _logger).AnyContext();

                return(!events.Any() ? JobResult.Success : JobResult.Cancelled);
            }

            int  eventsToProcess = events.Count;
            bool isSingleEvent   = events.Count == 1;

            if (!isSingleEvent)
            {
                var project = await _projectRepository.GetByIdAsync(eventPostInfo.ProjectId, true).AnyContext();

                if (project == null)
                {
                    // NOTE: This could archive the data for a project that no longer exists.
                    _logger.Error().Project(eventPostInfo.ProjectId).Message("Unable to process EventPost \"{0}\": Unable to load project: {1}", queueEntry.Value.FilePath, eventPostInfo.ProjectId).Write();
                    await CompleteEntryAsync(queueEntry, eventPostInfo, DateTime.UtcNow).AnyContext();

                    return(JobResult.Success);
                }

                // Don't process all the events if it will put the account over its limits.
                eventsToProcess = await _organizationRepository.GetRemainingEventLimitAsync(project.OrganizationId).AnyContext();

                // Add 1 because we already counted 1 against their limit when we received the event post.
                if (eventsToProcess < Int32.MaxValue)
                {
                    eventsToProcess += 1;
                }

                // Increment by count - 1 since we already incremented it by 1 in the OverageHandler.
                await _organizationRepository.IncrementUsageAsync(project.OrganizationId, false, events.Count - 1).AnyContext();
            }

            var errorCount = 0;
            var created    = DateTime.UtcNow;

            try {
                events.ForEach(e => e.CreatedUtc = created);
                var results = await _eventPipeline.RunAsync(events.Take(eventsToProcess).ToList(), eventPostInfo).AnyContext();

                _logger.Info().Message("Ran {0} events through the pipeline: id={1} project={2} success={3} error={4}", results.Count, queueEntry.Id, eventPostInfo.ProjectId, results.Count(r => r.IsProcessed), results.Count(r => r.HasError)).WriteIf(!isInternalProject);
                foreach (var eventContext in results)
                {
                    if (eventContext.IsCancelled)
                    {
                        continue;
                    }

                    if (!eventContext.HasError)
                    {
                        continue;
                    }

                    _logger.Error().Exception(eventContext.Exception).Project(eventPostInfo.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, eventContext.ErrorMessage).Write();
                    if (eventContext.Exception is ValidationException)
                    {
                        continue;
                    }

                    errorCount++;

                    if (!isSingleEvent)
                    {
                        // Put this single event back into the queue so we can retry it separately.
                        await _queue.EnqueueAsync(new EventPostInfo {
                            ApiVersion = eventPostInfo.ApiVersion,
                            Data       = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(eventContext.Event)),
                            IpAddress  = eventPostInfo.IpAddress,
                            MediaType  = eventPostInfo.MediaType,
                            CharSet    = eventPostInfo.CharSet,
                            ProjectId  = eventPostInfo.ProjectId,
                            UserAgent  = eventPostInfo.UserAgent
                        }, _storage, false, context.CancellationToken).AnyContext();
                    }
                }
            } catch (Exception ex) {
                _logger.Error().Exception(ex).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, ex.Message).Project(eventPostInfo.ProjectId).Write();
                if (ex is ArgumentException || ex is DocumentNotFoundException)
                {
                    await queueEntry.CompleteAsync().AnyContext();
                }
                else
                {
                    errorCount++;
                }
            }

            if (isSingleEvent && errorCount > 0)
            {
                await queueEntry.AbandonAsync().AnyContext();

                await _storage.SetNotActiveAsync(queueEntry.Value.FilePath, _logger).AnyContext();
            }
            else
            {
                await CompleteEntryAsync(queueEntry, eventPostInfo, created).AnyContext();
            }

            return(JobResult.Success);
        }