Ejemplo n.º 1
0
        protected override async Task <JobResult> ProcessQueueEntryAsync(JobQueueEntryContext <MailMessage> context)
        {
            Logger.Trace().Message("Processing message '{0}'.", context.QueueEntry.Id).Write();

            try {
                await _mailSender.SendAsync(context.QueueEntry.Value).AnyContext();

                Logger.Info().Message("Sent message: to={0} subject=\"{1}\"", context.QueueEntry.Value.To, context.QueueEntry.Value.Subject).Write();
            } catch (Exception ex) {
                Logger.Error().Exception(ex).Message("Error sending message: id={0} error={1}", context.QueueEntry.Id, ex.Message).Write();
                return(JobResult.FromException(ex));
            }

            return(JobResult.Success);
        }
Ejemplo n.º 2
0
        protected override Task <JobResult> RunInternalAsync(CancellationToken token)
        {
            if (!Settings.Current.EnableDailySummary)
            {
                return(Task.FromResult(JobResult.SuccessWithMessage("Summary notifications are disabled.")));
            }

            if (_mailer == null)
            {
                return(Task.FromResult(JobResult.SuccessWithMessage("Summary notifications are disabled due to null mailer.")));
            }

            const int BATCH_SIZE = 25;

            var projects = _projectRepository.GetByNextSummaryNotificationOffset(9, BATCH_SIZE);

            while (projects.Count > 0 && !token.IsCancellationRequested)
            {
                var documentsUpdated = _projectRepository.IncrementNextSummaryEndOfDayTicks(projects.Select(p => p.Id).ToList());
                Log.Info().Message("Got {0} projects to process. ", projects.Count).Write();
                Debug.Assert(projects.Count == documentsUpdated);

                foreach (var project in projects)
                {
                    var utcStartTime = new DateTime(project.NextSummaryEndOfDayTicks - TimeSpan.TicksPerDay);
                    if (utcStartTime < DateTime.UtcNow.Date.SubtractDays(2))
                    {
                        Log.Info().Message("Skipping daily summary older than two days for project \"{0}\" with a start time of \"{1}\".", project.Id, utcStartTime).Write();
                        continue;
                    }

                    var notification = new SummaryNotification {
                        Id           = project.Id,
                        UtcStartTime = utcStartTime,
                        UtcEndTime   = new DateTime(project.NextSummaryEndOfDayTicks - TimeSpan.TicksPerSecond)
                    };

                    ProcessSummaryNotification(notification);
                }

                projects = _projectRepository.GetByNextSummaryNotificationOffset(9, BATCH_SIZE);
            }

            return(Task.FromResult(new JobResult {
                Message = "Successfully sent summary notifications."
            }));
        }
Ejemplo n.º 3
0
        protected async override Task <JobResult> RunInternalAsync(CancellationToken token)
        {
            Log.Trace().Message("Web hook job starting").Write();

            QueueEntry <WebHookNotification> queueEntry = null;

            try {
                queueEntry = _queue.Dequeue();
            } catch (Exception ex) {
                if (!(ex is TimeoutException))
                {
                    Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next WebHookNotification: {0}", ex.Message).Write();
                    return(JobResult.FromException(ex));
                }
            }
            if (queueEntry == null)
            {
                return(JobResult.Success);
            }

            Log.Trace().Message("Processing WebHookNotification '{0}'.", queueEntry.Id).Write();

            WebHookNotification body = queueEntry.Value;

            Log.Trace().Project(body.ProjectId).Message("Process web hook call: project={0} url={1}", body.ProjectId, body.Url).Write();

            var client = new HttpClient();

            try {
                var result = client.PostAsJson(body.Url, body.Data.ToJson(Formatting.Indented));

                if (result.StatusCode == HttpStatusCode.Gone)
                {
                    _webHookRepository.RemoveByUrl(body.Url);
                    Log.Warn().Project(body.ProjectId).Message("Deleting web hook: org={0} project={1} url={2}", body.OrganizationId, body.ProjectId, body.Url).Write();
                }

                queueEntry.Complete();

                Log.Info().Project(body.ProjectId).Message("Web hook POST complete: status={0} org={1} project={2} url={3}", result.StatusCode, body.OrganizationId, body.ProjectId, body.Url).Write();
            } catch (Exception ex) {
                queueEntry.Abandon();
                return(JobResult.FromException(ex));
            }

            return(JobResult.Success);
        }
        protected async override Task <JobResult> RunInternalAsync(CancellationToken token)
        {
            QueueEntry <EventUserDescription> queueEntry = null;

            try {
                queueEntry = _queue.Dequeue();
            } catch (Exception ex) {
                if (!(ex is TimeoutException))
                {
                    Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next EventUserDescription: {0}", ex.Message).Write();
                    return(JobResult.FromException(ex));
                }
            }
            if (queueEntry == null)
            {
                return(JobResult.Success);
            }

            await _metricsClient.CounterAsync(MetricNames.EventsUserDescriptionDequeued);

            Log.Trace().Message("Processing user description: id={0}", queueEntry.Id).Write();

            try {
                ProcessUserDescription(queueEntry.Value);
                Log.Info().Message("Processed user description: id={0}", queueEntry.Id).Write();
                await _metricsClient.CounterAsync(MetricNames.EventsUserDescriptionProcessed);
            } catch (DocumentNotFoundException ex) {
                // TODO: Change to async once vnext is released.
                _metricsClient.Counter(MetricNames.EventsUserDescriptionErrors);
                queueEntry.Abandon();
                Log.Error().Exception(ex).Message("An event with this reference id \"{0}\" has not been processed yet or was deleted. Queue Id: {1}", ex.Id, queueEntry.Id).Write();
                return(JobResult.FromException(ex));
            } catch (Exception ex) {
                // TODO: Change to async once vnext is released.
                _metricsClient.Counter(MetricNames.EventsUserDescriptionErrors);
                queueEntry.Abandon();

                Log.Error().Exception(ex).Message("An error occurred while processing the EventUserDescription '{0}': {1}", queueEntry.Id, ex.Message).Write();
                return(JobResult.FromException(ex));
            }

            queueEntry.Complete();

            return(JobResult.Success);
        }
        protected override async Task <JobResult> ProcessQueueEntryAsync(JobQueueEntryContext <EventUserDescription> context)
        {
            Logger.Trace().Message("Processing user description: id={0}", context.QueueEntry.Id).Write();

            try {
                await ProcessUserDescriptionAsync(context.QueueEntry.Value).AnyContext();

                Logger.Info().Message("Processed user description: id={0}", context.QueueEntry.Id).Write();
            } catch (DocumentNotFoundException ex) {
                Logger.Error().Exception(ex).Message("An event with this reference id \"{0}\" has not been processed yet or was deleted. Queue Id: {1}", ex.Id, context.QueueEntry.Id).Write();
                return(JobResult.FromException(ex));
            } catch (Exception ex) {
                Logger.Error().Exception(ex).Message("An error occurred while processing the EventUserDescription '{0}': {1}", context.QueueEntry.Id, ex.Message).Write();
                return(JobResult.FromException(ex));
            }

            return(JobResult.Success);
        }
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <EventUserDescription> context)
        {
            _logger.LogTrace("Processing user description: id={0}", context.QueueEntry.Id);

            try {
                await ProcessUserDescriptionAsync(context.QueueEntry.Value).AnyContext();

                _logger.LogInformation("Processed user description: id={Id}", context.QueueEntry.Id);
            } catch (DocumentNotFoundException ex) {
                _logger.LogError(ex, "An event with this reference id {ReferenceId} has not been processed yet or was deleted. Queue Id: {Id}", ex.Id, context.QueueEntry.Id);
                return(JobResult.FromException(ex));
            } catch (Exception ex) {
                _logger.LogError(ex, "An error occurred while processing the EventUserDescription {Id}: {Message}", context.QueueEntry.Id, ex.Message);
                return(JobResult.FromException(ex));
            }

            return(JobResult.Success);
        }
        protected async override Task <JobResult> RunInternalAsync()
        {
            Log.Info().Message("Process email message job starting").Write();
            int totalEmailsProcessed = 0;
            int totalEmailsToProcess = Context.GetWorkItemLimit();

            while (!CancelPending && (totalEmailsToProcess == -1 || totalEmailsProcessed < totalEmailsToProcess))
            {
                QueueEntry <MailMessage> queueEntry = null;
                try {
                    queueEntry = await _queue.DequeueAsync();
                } catch (Exception ex) {
                    if (!(ex is TimeoutException))
                    {
                        Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next MailMessageNotification: {0}", ex.Message).Write();
                        return(JobResult.FromException(ex));
                    }
                }
                if (queueEntry == null)
                {
                    continue;
                }

                _statsClient.Counter(StatNames.EmailsDequeued);

                Log.Info().Message("Processing MailMessageNotification '{0}'.", queueEntry.Id).Write();

                try {
                    await _mailSender.SendAsync(queueEntry.Value);

                    totalEmailsProcessed++;
                    _statsClient.Counter(StatNames.EmailsSent);
                } catch (Exception ex) {
                    _statsClient.Counter(StatNames.EmailsSendErrors);
                    queueEntry.AbandonAsync().Wait();

                    Log.Error().Exception(ex).Message("Error sending message '{0}': {1}", queueEntry.Id, ex.Message).Write();
                }

                await queueEntry.CompleteAsync();
            }

            return(JobResult.Success);
        }
        protected async override Task <JobResult> RunInternalAsync(CancellationToken token)
        {
            Log.Info().Message("Process user description job starting").Write();

            QueueEntry <EventUserDescription> queueEntry = null;

            try {
                queueEntry = _queue.Dequeue();
            } catch (Exception ex) {
                if (!(ex is TimeoutException))
                {
                    Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next EventUserDescription: {0}", ex.Message).Write();
                    return(JobResult.FromException(ex));
                }
            }
            if (queueEntry == null)
            {
                return(JobResult.Success);
            }

            _statsClient.Counter(StatNames.EventsUserDescriptionDequeued);
            Log.Info().Message("Processing EventUserDescription '{0}'.", queueEntry.Id).Write();

            try {
                ProcessUserDescription(queueEntry.Value);
                _statsClient.Counter(StatNames.EventsUserDescriptionProcessed);
            } catch (DocumentNotFoundException ex) {
                _statsClient.Counter(StatNames.EventsUserDescriptionErrors);
                queueEntry.Abandon();
                Log.Error().Exception(ex).Message("An event with this reference id \"{0}\" has not been processed yet or was deleted. Queue Id: {1}", ex.Id, queueEntry.Id).Write();
                return(JobResult.FromException(ex));
            } catch (Exception ex) {
                _statsClient.Counter(StatNames.EventsUserDescriptionErrors);
                queueEntry.Abandon();

                // TODO: Add the EventUserDescription to the logged exception.
                Log.Error().Exception(ex).Message("An error occurred while processing the EventUserDescription '{0}': {1}", queueEntry.Id, ex.Message).Write();
                return(JobResult.FromException(ex));
            }

            queueEntry.Complete();

            return(JobResult.Success);
        }
Ejemplo n.º 9
0
        public Task <JobResult> RunAsync(CancellationToken?token = null)
        {
            if (!token.HasValue)
            {
                token = CancellationToken.None;
            }

            if (LockProvider == null)
            {
                return(TryRunAsync(token.Value));
            }

            try {
                using (LockProvider.AcquireLock(GetType().FullName, acquireTimeout: TimeSpan.FromMinutes(1)))
                    return(TryRunAsync(token.Value));
            } catch (TimeoutException) {
                return(Task.FromResult(JobResult.FailedWithMessage("Timeout attempting to acquire lock.")));
            }
        }
Ejemplo n.º 10
0
        protected override async Task <JobResult> RunInternalAsync(JobContext context)
        {
            _lastRun = SystemClock.UtcNow;

            string licenseKey = _options.Value.MaxMindGeoIpKey;

            if (String.IsNullOrEmpty(licenseKey))
            {
                _logger.LogInformation("Configure {SettingKey} to download GeoIP database.", nameof(AppOptions.MaxMindGeoIpKey));
                return(JobResult.Success);
            }

            try {
                var fi = await _storage.GetFileInfoAsync(GEO_IP_DATABASE_PATH).AnyContext();

                if (fi != null && fi.Modified.IsAfter(SystemClock.UtcNow.StartOfDay()))
                {
                    _logger.LogInformation("The GeoIP database is already up-to-date.");
                    return(JobResult.Success);
                }

                _logger.LogInformation("Downloading GeoIP database.");
                var    client = new HttpClient();
                string url    = $"https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-City&license_key={licenseKey}&suffix=tar.gz";
                var    file   = await client.GetAsync(url, context.CancellationToken).AnyContext();

                if (!file.IsSuccessStatusCode)
                {
                    return(JobResult.FailedWithMessage("Unable to download GeoIP database."));
                }

                _logger.LogInformation("Extracting GeoIP database");
                using (var decompressionStream = new GZipStream(await file.Content.ReadAsStreamAsync().AnyContext(), CompressionMode.Decompress))
                    await _storage.SaveFileAsync(GEO_IP_DATABASE_PATH, decompressionStream, context.CancellationToken).AnyContext();
            } catch (Exception ex) {
                _logger.LogError(ex, "An error occurred while downloading the GeoIP database.");
                return(JobResult.FromException(ex));
            }

            _logger.LogInformation("Finished downloading GeoIP database.");
            return(JobResult.Success);
        }
Ejemplo n.º 11
0
        protected async override Task <JobResult> RunInternalAsync(CancellationToken token)
        {
            QueueEntry <MailMessage> queueEntry = null;

            try {
                queueEntry = _queue.Dequeue();
            } catch (Exception ex) {
                if (!(ex is TimeoutException))
                {
                    Log.Error().Exception(ex).Message("Error trying to dequeue message: {0}", ex.Message).Write();
                    return(JobResult.FromException(ex));
                }
            }

            if (queueEntry == null)
            {
                return(JobResult.Success);
            }

            await _metricsClient.CounterAsync(MetricNames.EmailsDequeued);

            Log.Trace().Message("Processing message '{0}'.", queueEntry.Id).Write();

            try {
                await _mailSender.SendAsync(queueEntry.Value);

                await _metricsClient.CounterAsync(MetricNames.EmailsSent);

                Log.Info().Message("Sent message: to={0} subject=\"{1}\"", queueEntry.Value.To, queueEntry.Value.Subject).Write();
            } catch (Exception ex) {
                // TODO: Change to async once vnext is released.
                _metricsClient.Counter(MetricNames.EmailsSendErrors);
                Log.Error().Exception(ex).Message("Error sending message: id={0} error={1}", queueEntry.Id, ex.Message).Write();

                queueEntry.Abandon();
            }

            queueEntry.Complete();

            return(JobResult.Success);
        }
Ejemplo n.º 12
0
        protected override async Task <JobResult> RunInternalAsync(CancellationToken token)
        {
            var path = PathHelper.ExpandPath(Settings.Current.GeoIPDatabasePath);

            if (String.IsNullOrEmpty(path))
            {
                Log.Error().Message("No GeoIPDatabasePath was specified.").Write();
                return(JobResult.FailedWithMessage("No GeoIPDatabasePath was specified."));
            }

            try {
                if (File.Exists(path))
                {
                    Log.Info().Message("Deleting existing GeoIP database \"{0}\".", path).Write();
                    File.Delete(path);
                }

                Log.Info().Message("Downloading GeoIP database.").Write();
                var client = new HttpClient();
                var file   = await client.GetAsync("http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz", token);

                if (!file.IsSuccessStatusCode)
                {
                    return(JobResult.FailedWithMessage("Unable to download GeoIP database."));
                }

                Log.Info().Message("Extracting GeoIP database to \"{0}\".", path).Write();
                using (FileStream decompressedFileStream = new FileStream(path, FileMode.CreateNew)) {
                    using (GZipStream decompressionStream = new GZipStream(await file.Content.ReadAsStreamAsync(), CompressionMode.Decompress)) {
                        decompressionStream.CopyTo(decompressedFileStream);
                    }
                }
            } catch (Exception ex) {
                Log.Error().Exception(ex).Message("An error occurred while downloading the GeoIP database \"{0}\".", path).Write();
                return(JobResult.FromException(ex));
            }

            Log.Info().Message("Finished downloading GeoIP database.").Write();
            return(JobResult.Success);
        }
Ejemplo n.º 13
0
        protected async override Task <JobResult> RunInternalAsync(CancellationToken token)
        {
            Log.Info().Message("Process email message job starting").Write();

            QueueEntry <MailMessage> queueEntry = null;

            try {
                queueEntry = _queue.Dequeue();
            } catch (Exception ex) {
                if (!(ex is TimeoutException))
                {
                    Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next MailMessageNotification: {0}", ex.Message).Write();
                    return(JobResult.FromException(ex));
                }
            }
            if (queueEntry == null)
            {
                return(JobResult.Success);
            }

            _statsClient.Counter(StatNames.EmailsDequeued);

            Log.Info().Message("Processing MailMessageNotification '{0}'.", queueEntry.Id).Write();

            try {
                await _mailSender.SendAsync(queueEntry.Value);

                _statsClient.Counter(StatNames.EmailsSent);
            } catch (Exception ex) {
                _statsClient.Counter(StatNames.EmailsSendErrors);
                queueEntry.Abandon();

                Log.Error().Exception(ex).Message("Error sending message '{0}': {1}", queueEntry.Id, ex.Message).Write();
            }

            queueEntry.Complete();

            return(JobResult.Success);
        }
Ejemplo n.º 14
0
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <EventNotification> context)
        {
            var wi = context.QueueEntry.Value;
            var ev = await _eventRepository.GetByIdAsync(wi.EventId).AnyContext();

            if (ev == null)
            {
                return(JobResult.SuccessWithMessage($"Could not load event: {wi.EventId}"));
            }

            bool shouldLog = ev.ProjectId != _appOptions.InternalProjectId;
            int  sent      = 0;

            if (shouldLog)
            {
                _logger.LogTrace("Process notification: project={project} event={id} stack={stack}", ev.ProjectId, ev.Id, ev.StackId);
            }

            var project = await _projectRepository.GetByIdAsync(ev.ProjectId, o => o.Cache()).AnyContext();

            if (project == null)
            {
                return(JobResult.SuccessWithMessage($"Could not load project: {ev.ProjectId}."));
            }

            using (_logger.BeginScope(new ExceptionlessState().Organization(project.OrganizationId).Project(project.Id))) {
                if (shouldLog)
                {
                    _logger.LogTrace("Loaded project: name={ProjectName}", project.Name);
                }

                // after the first 2 occurrences, don't send a notification for the same stack more then once every 30 minutes
                var lastTimeSentUtc = await _cache.GetAsync <DateTime>(String.Concat("notify:stack-throttle:", ev.StackId), DateTime.MinValue).AnyContext();

                if (wi.TotalOccurrences > 2 && !wi.IsRegression && lastTimeSentUtc != DateTime.MinValue && lastTimeSentUtc > SystemClock.UtcNow.AddMinutes(-30))
                {
                    if (shouldLog)
                    {
                        _logger.LogInformation("Skipping message because of stack throttling: last sent={LastSentUtc} occurrences={TotalOccurrences}", lastTimeSentUtc, wi.TotalOccurrences);
                    }
                    return(JobResult.Success);
                }

                if (context.CancellationToken.IsCancellationRequested)
                {
                    return(JobResult.Cancelled);
                }

                // don't send more than 10 notifications for a given project every 30 minutes
                var    projectTimeWindow = TimeSpan.FromMinutes(30);
                string cacheKey          = String.Concat("notify:project-throttle:", ev.ProjectId, "-", SystemClock.UtcNow.Floor(projectTimeWindow).Ticks);
                double notificationCount = await _cache.IncrementAsync(cacheKey, 1, projectTimeWindow).AnyContext();

                if (notificationCount > 10 && !wi.IsRegression)
                {
                    if (shouldLog)
                    {
                        _logger.LogInformation("Skipping message because of project throttling: count={NotificationCount}", notificationCount);
                    }
                    return(JobResult.Success);
                }

                foreach (var kv in project.NotificationSettings)
                {
                    var settings = kv.Value;
                    if (shouldLog)
                    {
                        _logger.LogTrace("Processing notification: {Key}", kv.Key);
                    }

                    bool isCritical                = ev.IsCritical();
                    bool shouldReportNewError      = settings.ReportNewErrors && wi.IsNew && ev.IsError();
                    bool shouldReportCriticalError = settings.ReportCriticalErrors && isCritical && ev.IsError();
                    bool shouldReportRegression    = settings.ReportEventRegressions && wi.IsRegression;
                    bool shouldReportNewEvent      = settings.ReportNewEvents && wi.IsNew;
                    bool shouldReportCriticalEvent = settings.ReportCriticalEvents && isCritical;
                    bool shouldReport              = shouldReportNewError || shouldReportCriticalError || shouldReportRegression || shouldReportNewEvent || shouldReportCriticalEvent;

                    if (shouldLog)
                    {
                        _logger.LogTrace("Settings: new error={ReportNewErrors} critical error={ReportCriticalErrors} regression={ReportEventRegressions} new={ReportNewEvents} critical={ReportCriticalEvents}", settings.ReportNewErrors, settings.ReportCriticalErrors, settings.ReportEventRegressions, settings.ReportNewEvents, settings.ReportCriticalEvents);
                        _logger.LogTrace("Should process: new error={ShouldReportNewError} critical error={ShouldReportCriticalError} regression={ShouldReportRegression} new={ShouldReportNewEvent} critical={ShouldReportCriticalEvent}", shouldReportNewError, shouldReportCriticalError, shouldReportRegression, shouldReportNewEvent, shouldReportCriticalEvent);
                    }
                    var request = ev.GetRequestInfo();
                    // check for known bots if the user has elected to not report them
                    if (shouldReport && !String.IsNullOrEmpty(request?.UserAgent))
                    {
                        var botPatterns = project.Configuration.Settings.GetStringCollection(SettingsDictionary.KnownKeys.UserAgentBotPatterns).ToList();

                        var info = await _parser.ParseAsync(request.UserAgent).AnyContext();

                        if (info != null && info.Device.IsSpider || request.UserAgent.AnyWildcardMatches(botPatterns))
                        {
                            shouldReport = false;
                            if (shouldLog)
                            {
                                _logger.LogInformation("Skipping because event is from a bot {UserAgent}.", request.UserAgent);
                            }
                        }
                    }

                    if (!shouldReport)
                    {
                        continue;
                    }

                    bool processed;
                    switch (kv.Key)
                    {
                    case Project.NotificationIntegrations.Slack:
                        processed = await _slackService.SendEventNoticeAsync(ev, project, wi.IsNew, wi.IsRegression).AnyContext();

                        break;

                    default:
                        processed = await SendEmailNotificationAsync(kv.Key, project, ev, wi, shouldLog).AnyContext();

                        break;
                    }

                    if (shouldLog)
                    {
                        _logger.LogTrace("Finished processing notification: {Key}", kv.Key);
                    }
                    if (processed)
                    {
                        sent++;
                    }
                }

                // if we sent any notifications, mark the last time a notification for this stack was sent.
                if (sent > 0)
                {
                    await _cache.SetAsync(String.Concat("notify:stack-throttle:", ev.StackId), SystemClock.UtcNow, SystemClock.UtcNow.AddMinutes(15)).AnyContext();

                    if (shouldLog)
                    {
                        _logger.LogInformation("Notifications sent: event={id} stack={stack} count={SentCount}", ev.Id, ev.StackId, sent);
                    }
                }
            }
            return(JobResult.Success);
        }
Ejemplo n.º 15
0
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <EventPost> context)
        {
            var      queueEntry = context.QueueEntry;
            FileSpec fileInfo   = null;
            await _metricsClient.TimeAsync(async() => fileInfo = await _storage.GetFileInfoAsync(queueEntry.Value.FilePath).AnyContext(), MetricNames.PostsFileInfoTime).AnyContext();

            if (fileInfo == null)
            {
                await _metricsClient.TimeAsync(() => queueEntry.AbandonAsync(), MetricNames.PostsAbandonTime).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to retrieve post data info '{queueEntry.Value.FilePath}'."));
            }

            await _metricsClient.GaugeAsync(MetricNames.PostsMessageSize, fileInfo.Size).AnyContext();

            if (fileInfo.Size > GetMaximumEventPostFileSize())
            {
                await _metricsClient.TimeAsync(() => queueEntry.CompleteAsync(), MetricNames.PostsCompleteTime).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to process post data '{queueEntry.Value.FilePath}' ({fileInfo.Size} bytes): Maximum event post size limit ({Settings.Current.MaximumEventPostSize} bytes) reached."));
            }

            EventPostInfo ep = null;
            await _metricsClient.TimeAsync(async() => ep = await _storage.GetEventPostAndSetActiveAsync(queueEntry.Value.FilePath, _logger, context.CancellationToken).AnyContext(), MetricNames.PostsMarkFileActiveTime).AnyContext();

            if (ep == null)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to retrieve post data '{queueEntry.Value.FilePath}'."));
            }

            await _metricsClient.GaugeAsync(MetricNames.PostsCompressedSize, ep.Data.Length).AnyContext();

            bool isInternalProject = ep.ProjectId == Settings.Current.InternalProjectId;

            _logger.Info()
            .Message("Processing post: id={0} path={1} project={2} ip={3} v={4} agent={5}", queueEntry.Id, queueEntry.Value.FilePath, ep.ProjectId, ep.IpAddress, ep.ApiVersion, ep.UserAgent)
            .Property("Id", queueEntry.Id)
            .Property("ApiVersion", ep.ApiVersion)
            .Property("IpAddress", ep.IpAddress)
            .Property("Client", ep.UserAgent)
            .Tag("processing", "compressed", ep.ContentEncoding)
            .Value(ep.Data.Length)
            .Project(ep.ProjectId)
            .WriteIf(!isInternalProject);

            var project = await _projectRepository.GetByIdAsync(ep.ProjectId, o => o.Cache()).AnyContext();

            if (project == null)
            {
                _logger.Error().Message("Unable to process EventPost \"{0}\": Unable to load project: {1}", queueEntry.Value.FilePath, ep.ProjectId).Property("Id", queueEntry.Id).Project(ep.ProjectId).WriteIf(!isInternalProject);
                await CompleteEntryAsync(queueEntry, ep, SystemClock.UtcNow).AnyContext();

                return(JobResult.Success);
            }

            long maxEventPostSize = Settings.Current.MaximumEventPostSize;

            byte[] uncompressedData = ep.Data;
            if (!String.IsNullOrEmpty(ep.ContentEncoding))
            {
                _logger.Debug().Message("Decompressing EventPost: {0} ({1} bytes)", queueEntry.Id, ep.Data.Length).Property("Id", queueEntry.Id).Tag("decompressing", ep.ContentEncoding).Project(ep.ProjectId).WriteIf(!isInternalProject);
                maxEventPostSize = GetMaximumUncompressedEventPostSize();
                try {
                    await _metricsClient.TimeAsync(async() => {
                        uncompressedData = await uncompressedData.DecompressAsync(ep.ContentEncoding).AnyContext();
                    }, MetricNames.PostsDecompressionTime).AnyContext();
                } catch (Exception ex) {
                    await _metricsClient.CounterAsync(MetricNames.PostsDecompressionErrors).AnyContext();
                    await CompleteEntryAsync(queueEntry, ep, SystemClock.UtcNow).AnyContext();

                    return(JobResult.FailedWithMessage($"Unable to decompress EventPost data '{queueEntry.Value.FilePath}' ({ep.Data.Length} bytes compressed): {ex.Message}"));
                }
            }

            await _metricsClient.GaugeAsync(MetricNames.PostsUncompressedSize, fileInfo.Size).AnyContext();

            if (uncompressedData.Length > maxEventPostSize)
            {
                await CompleteEntryAsync(queueEntry, ep, SystemClock.UtcNow).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to process decompressed EventPost data '{queueEntry.Value.FilePath}' ({ep.Data.Length} bytes compressed, {uncompressedData.Length} bytes): Maximum uncompressed event post size limit ({maxEventPostSize} bytes) reached."));
            }

            _logger.Debug().Message("Processing uncompressed EventPost: {0}  ({1} bytes)", queueEntry.Id, uncompressedData.Length).Property("Id", queueEntry.Id).Tag("uncompressed").Value(uncompressedData.Length).Project(ep.ProjectId).WriteIf(!isInternalProject);
            var createdUtc = SystemClock.UtcNow;
            var events     = await ParseEventPostAsync(ep, createdUtc, uncompressedData, queueEntry.Id, isInternalProject).AnyContext();

            if (events == null || events.Count == 0)
            {
                await CompleteEntryAsync(queueEntry, ep, createdUtc).AnyContext();

                return(JobResult.Success);
            }

            if (context.CancellationToken.IsCancellationRequested)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();

                return(JobResult.Cancelled);
            }

            bool isSingleEvent = events.Count == 1;

            if (!isSingleEvent)
            {
                await _metricsClient.TimeAsync(async() => {
                    // Don't process all the events if it will put the account over its limits.
                    int eventsToProcess = await _organizationRepository.GetRemainingEventLimitAsync(project.OrganizationId).AnyContext();

                    // Add 1 because we already counted 1 against their limit when we received the event post.
                    if (eventsToProcess < Int32.MaxValue)
                    {
                        eventsToProcess += 1;
                    }

                    // Discard any events over there limit.
                    events = events.Take(eventsToProcess).ToList();

                    // Increment the count if greater than 1, since we already incremented it by 1 in the OverageHandler.
                    if (events.Count > 1)
                    {
                        await _organizationRepository.IncrementUsageAsync(project.OrganizationId, false, events.Count - 1, applyHourlyLimit: false).AnyContext();
                    }
                }, MetricNames.PostsUpdateEventLimitTime).AnyContext();
            }

            int errorCount    = 0;
            var eventsToRetry = new List <PersistentEvent>();

            try {
                var contexts = await _eventPipeline.RunAsync(events, ep).AnyContext();

                _logger.Debug().Message(() => $"Ran {contexts.Count} events through the pipeline: id={queueEntry.Id} success={contexts.Count(r => r.IsProcessed)} error={contexts.Count(r => r.HasError)}").Property("Id", queueEntry.Id).Value(contexts.Count).Project(ep.ProjectId).WriteIf(!isInternalProject);
                foreach (var ctx in contexts)
                {
                    if (ctx.IsCancelled)
                    {
                        continue;
                    }

                    if (!ctx.HasError)
                    {
                        continue;
                    }

                    _logger.Error().Exception(ctx.Exception).Message("Error processing EventPost \"{0}\": {1}", queueEntry.Value.FilePath, ctx.ErrorMessage).Property("Id", queueEntry.Id).Project(ep.ProjectId).WriteIf(!isInternalProject);
                    if (ctx.Exception is ValidationException)
                    {
                        continue;
                    }

                    errorCount++;
                    if (!isSingleEvent)
                    {
                        // Put this single event back into the queue so we can retry it separately.
                        eventsToRetry.Add(ctx.Event);
                    }
                }
            } catch (Exception ex) {
                _logger.Error().Exception(ex).Message("Error processing EventPost \"{0}\": {1}", queueEntry.Value.FilePath, ex.Message).Property("Id", queueEntry.Id).Project(ep.ProjectId).WriteIf(!isInternalProject);
                if (ex is ArgumentException || ex is DocumentNotFoundException)
                {
                    await CompleteEntryAsync(queueEntry, ep, createdUtc).AnyContext();

                    return(JobResult.Success);
                }

                errorCount++;
                if (!isSingleEvent)
                {
                    eventsToRetry.AddRange(events);
                }
            }

            if (eventsToRetry.Count > 0)
            {
                await _metricsClient.TimeAsync(() => RetryEvents(context, eventsToRetry, ep, queueEntry), MetricNames.PostsRetryTime).AnyContext();
            }

            if (isSingleEvent && errorCount > 0)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();
            }
            else
            {
                await CompleteEntryAsync(queueEntry, ep, createdUtc).AnyContext();
            }

            return(JobResult.Success);
        }
Ejemplo n.º 16
0
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <EventPost> context)
        {
            var    entry            = context.QueueEntry;
            var    ep               = entry.Value;
            string payloadPath      = Path.ChangeExtension(entry.Value.FilePath, ".payload");
            var    payloadTask      = _metrics.TimeAsync(() => _eventPostService.GetEventPostPayloadAsync(payloadPath, context.CancellationToken), MetricNames.PostsMarkFileActiveTime);
            var    projectTask      = _projectRepository.GetByIdAsync(ep.ProjectId, o => o.Cache());
            var    organizationTask = _organizationRepository.GetByIdAsync(ep.OrganizationId, o => o.Cache());

            var payload = await payloadTask.AnyContext();

            if (payload == null)
            {
                await Task.WhenAll(AbandonEntryAsync(entry), projectTask, organizationTask).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to retrieve payload '{payloadPath}'."));
            }

            _metrics.Gauge(MetricNames.PostsMessageSize, payload.LongLength);
            if (payload.LongLength > _maximumEventPostFileSize)
            {
                await Task.WhenAll(_metrics.TimeAsync(() => entry.CompleteAsync(), MetricNames.PostsCompleteTime), projectTask, organizationTask).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to process payload '{payloadPath}' ({payload.LongLength} bytes): Maximum event post size limit ({Settings.Current.MaximumEventPostSize} bytes) reached."));
            }

            using (_logger.BeginScope(new ExceptionlessState().Organization(ep.OrganizationId).Project(ep.ProjectId))) {
                _metrics.Gauge(MetricNames.PostsCompressedSize, payload.Length);

                bool isDebugLogLevelEnabled = _logger.IsEnabled(LogLevel.Debug);
                bool isInternalProject      = ep.ProjectId == Settings.Current.InternalProjectId;
                if (!isInternalProject && _logger.IsEnabled(LogLevel.Information))
                {
                    using (_logger.BeginScope(new ExceptionlessState().Tag("processing").Tag("compressed").Tag(ep.ContentEncoding).Value(payload.Length)))
                        _logger.LogInformation("Processing post: id={QueueEntryId} path={FilePath} project={project} ip={IpAddress} v={ApiVersion} agent={UserAgent}", entry.Id, payloadPath, ep.ProjectId, ep.IpAddress, ep.ApiVersion, ep.UserAgent);
                }

                var project = await projectTask.AnyContext();

                if (project == null)
                {
                    if (!isInternalProject)
                    {
                        _logger.LogError("Unable to process EventPost {FilePath}: Unable to load project: {Project}", payloadPath, ep.ProjectId);
                    }
                    await Task.WhenAll(CompleteEntryAsync(entry, ep, SystemClock.UtcNow), organizationTask).AnyContext();

                    return(JobResult.Success);
                }

                long maxEventPostSize = Settings.Current.MaximumEventPostSize;
                var  uncompressedData = payload;
                if (!String.IsNullOrEmpty(ep.ContentEncoding))
                {
                    if (!isInternalProject && isDebugLogLevelEnabled)
                    {
                        using (_logger.BeginScope(new ExceptionlessState().Tag("decompressing").Tag(ep.ContentEncoding)))
                            _logger.LogDebug("Decompressing EventPost: {QueueEntryId} ({CompressedBytes} bytes)", entry.Id, payload.Length);
                    }

                    maxEventPostSize = _maximumUncompressedEventPostSize;
                    try {
                        _metrics.Time(() => {
                            uncompressedData = uncompressedData.Decompress(ep.ContentEncoding);
                        }, MetricNames.PostsDecompressionTime);
                    } catch (Exception ex) {
                        _metrics.Counter(MetricNames.PostsDecompressionErrors);
                        await Task.WhenAll(CompleteEntryAsync(entry, ep, SystemClock.UtcNow), organizationTask).AnyContext();

                        return(JobResult.FailedWithMessage($"Unable to decompress EventPost data '{payloadPath}' ({payload.Length} bytes compressed): {ex.Message}"));
                    }
                }

                _metrics.Gauge(MetricNames.PostsUncompressedSize, payload.LongLength);
                if (uncompressedData.Length > maxEventPostSize)
                {
                    await Task.WhenAll(CompleteEntryAsync(entry, ep, SystemClock.UtcNow), organizationTask).AnyContext();

                    return(JobResult.FailedWithMessage($"Unable to process decompressed EventPost data '{payloadPath}' ({payload.Length} bytes compressed, {uncompressedData.Length} bytes): Maximum uncompressed event post size limit ({maxEventPostSize} bytes) reached."));
                }

                if (!isInternalProject && isDebugLogLevelEnabled)
                {
                    using (_logger.BeginScope(new ExceptionlessState().Tag("uncompressed").Value(uncompressedData.Length)))
                        _logger.LogDebug("Processing uncompressed EventPost: {QueueEntryId}  ({UncompressedBytes} bytes)", entry.Id, uncompressedData.Length);
                }

                var createdUtc = SystemClock.UtcNow;
                var events     = ParseEventPost(ep, payload, createdUtc, uncompressedData, entry.Id, isInternalProject);
                if (events == null || events.Count == 0)
                {
                    await Task.WhenAll(CompleteEntryAsync(entry, ep, createdUtc), organizationTask).AnyContext();

                    return(JobResult.Success);
                }

                if (context.CancellationToken.IsCancellationRequested)
                {
                    await Task.WhenAll(AbandonEntryAsync(entry), organizationTask).AnyContext();

                    return(JobResult.Cancelled);
                }

                var organization = await organizationTask.AnyContext();

                if (organization == null)
                {
                    if (!isInternalProject)
                    {
                        _logger.LogError("Unable to process EventPost {FilePath}: Unable to load organization: {OrganizationId}", payloadPath, project.OrganizationId);
                    }

                    await CompleteEntryAsync(entry, ep, SystemClock.UtcNow).AnyContext();

                    return(JobResult.Success);
                }

                bool isSingleEvent = events.Count == 1;
                if (!isSingleEvent)
                {
                    await _metrics.TimeAsync(async() => {
                        // Don't process all the events if it will put the account over its limits.
                        int eventsToProcess = await _usageService.GetRemainingEventLimitAsync(organization).AnyContext();

                        // Add 1 because we already counted 1 against their limit when we received the event post.
                        if (eventsToProcess < Int32.MaxValue)
                        {
                            eventsToProcess += 1;
                        }

                        // Discard any events over there limit.
                        events = events.Take(eventsToProcess).ToList();

                        // Increment the count if greater than 1, since we already incremented it by 1 in the OverageHandler.
                        if (events.Count > 1)
                        {
                            await _usageService.IncrementUsageAsync(organization, project, false, events.Count - 1, applyHourlyLimit: false).AnyContext();
                        }
                    }, MetricNames.PostsUpdateEventLimitTime).AnyContext();
                }

                int errorCount    = 0;
                var eventsToRetry = new List <PersistentEvent>();
                try {
                    var contexts = await _eventPipeline.RunAsync(events, organization, project, ep).AnyContext();

                    if (!isInternalProject && isDebugLogLevelEnabled)
                    {
                        using (_logger.BeginScope(new ExceptionlessState().Value(contexts.Count)))
                            _logger.LogDebug("Ran {@value} events through the pipeline: id={QueueEntryId} success={SuccessCount} error={ErrorCount}", contexts.Count, entry.Id, contexts.Count(r => r.IsProcessed), contexts.Count(r => r.HasError));
                    }

                    foreach (var ctx in contexts)
                    {
                        if (ctx.IsCancelled)
                        {
                            continue;
                        }

                        if (!ctx.HasError)
                        {
                            continue;
                        }

                        if (!isInternalProject)
                        {
                            _logger.LogError(ctx.Exception, "Error processing EventPost {QueueEntryId} {FilePath}: {Message}", entry.Id, payloadPath, ctx.ErrorMessage);
                        }
                        if (ctx.Exception is ValidationException)
                        {
                            continue;
                        }

                        errorCount++;
                        if (!isSingleEvent)
                        {
                            // Put this single event back into the queue so we can retry it separately.
                            eventsToRetry.Add(ctx.Event);
                        }
                    }
                } catch (Exception ex) {
                    if (!isInternalProject)
                    {
                        _logger.LogError(ex, "Error processing EventPost {QueueEntryId} {FilePath}: {Message}", entry.Id, payloadPath, ex.Message);
                    }
                    if (ex is ArgumentException || ex is DocumentNotFoundException)
                    {
                        await CompleteEntryAsync(entry, ep, createdUtc).AnyContext();

                        return(JobResult.Success);
                    }

                    errorCount++;
                    if (!isSingleEvent)
                    {
                        eventsToRetry.AddRange(events);
                    }
                }

                if (eventsToRetry.Count > 0)
                {
                    await _metrics.TimeAsync(() => RetryEventsAsync(eventsToRetry, ep, entry, project, isInternalProject), MetricNames.PostsRetryTime).AnyContext();
                }

                if (isSingleEvent && errorCount > 0)
                {
                    await AbandonEntryAsync(entry).AnyContext();
                }
                else
                {
                    await CompleteEntryAsync(entry, ep, createdUtc).AnyContext();
                }

                return(JobResult.Success);
            }
        }
        protected async override Task <JobResult> RunInternalAsync(CancellationToken token)
        {
            Log.Trace().Message("Process event notification job starting").Write();

            QueueEntry <EventNotification> queueEntry = null;

            try {
                queueEntry = _queue.Dequeue();
            } catch (Exception ex) {
                if (!(ex is TimeoutException))
                {
                    return(JobResult.FromException(ex, "An error occurred while trying to dequeue the next EventNotification: {0}", ex.Message));
                }
            }
            if (queueEntry == null)
            {
                return(JobResult.Success);
            }

            var eventNotification = queueEntry.Value;
            int emailsSent        = 0;

            Log.Trace().Message("Process notification: project={0} event={1} stack={2}", eventNotification.Event.ProjectId, eventNotification.Event.Id, eventNotification.Event.StackId).Write();

            var project = _projectRepository.GetById(eventNotification.Event.ProjectId, true);

            if (project == null)
            {
                queueEntry.Abandon();
                return(JobResult.FailedWithMessage("Could not load project {0}.", eventNotification.Event.ProjectId));
            }
            Log.Trace().Message("Loaded project: name={0}", project.Name).Write();

            var organization = _organizationRepository.GetById(project.OrganizationId, true);

            if (organization == null)
            {
                queueEntry.Abandon();
                return(JobResult.FailedWithMessage("Could not load organization {0}.", project.OrganizationId));
            }
            Log.Trace().Message("Loaded organization: name={0}", organization.Name).Write();

            var stack = _stackRepository.GetById(eventNotification.Event.StackId);

            if (stack == null)
            {
                queueEntry.Abandon();
                return(JobResult.FailedWithMessage("Could not load stack {0}.", eventNotification.Event.StackId));
            }

            if (!organization.HasPremiumFeatures)
            {
                queueEntry.Complete();
                Log.Trace().Message("Skipping because organization does not have premium features.").Write();
                return(JobResult.Success);
            }

            if (stack.DisableNotifications || stack.IsHidden)
            {
                queueEntry.Complete();
                Log.Trace().Message("Skipping because stack notifications are disabled or stack is hidden.").Write();
                return(JobResult.Success);
            }

            Log.Trace().Message("Loaded stack: title={0}", stack.Title).Write();
            int totalOccurrences = stack.TotalOccurrences;

            // after the first 2 occurrences, don't send a notification for the same stack more then once every 30 minutes
            var lastTimeSent = _cacheClient.Get <DateTime>(String.Concat("notify:stack-throttle:", eventNotification.Event.StackId));

            if (totalOccurrences > 2 &&
                !eventNotification.IsRegression &&
                lastTimeSent != DateTime.MinValue &&
                lastTimeSent > DateTime.Now.AddMinutes(-30))
            {
                queueEntry.Complete();
                Log.Info().Message("Skipping message because of stack throttling: last sent={0} occurrences={1}", lastTimeSent, totalOccurrences).Write();
                return(JobResult.Success);
            }

            // don't send more than 10 notifications for a given project every 30 minutes
            var    projectTimeWindow = TimeSpan.FromMinutes(30);
            string cacheKey          = String.Concat("notify:project-throttle:", eventNotification.Event.ProjectId, "-", DateTime.UtcNow.Floor(projectTimeWindow).Ticks);
            long   notificationCount = _cacheClient.Increment(cacheKey, 1, projectTimeWindow);

            if (notificationCount > 10 && !eventNotification.IsRegression)
            {
                queueEntry.Complete();
                Log.Info().Project(eventNotification.Event.ProjectId).Message("Skipping message because of project throttling: count={0}", notificationCount).Write();
                return(JobResult.Success);
            }

            foreach (var kv in project.NotificationSettings)
            {
                var settings = kv.Value;
                Log.Trace().Message("Processing notification: user={0}", kv.Key).Write();

                var user = _userRepository.GetById(kv.Key);
                if (user == null || String.IsNullOrEmpty(user.EmailAddress))
                {
                    Log.Error().Message("Could not load user {0} or blank email address {1}.", kv.Key, user != null ? user.EmailAddress : "").Write();
                    continue;
                }

                if (!user.IsEmailAddressVerified)
                {
                    Log.Info().Message("User {0} with email address {1} has not been verified.", kv.Key, user != null ? user.EmailAddress : "").Write();
                    continue;
                }

                if (!user.EmailNotificationsEnabled)
                {
                    Log.Trace().Message("User {0} with email address {1} has email notifications disabled.", kv.Key, user != null ? user.EmailAddress : "").Write();
                    continue;
                }

                if (!user.OrganizationIds.Contains(project.OrganizationId))
                {
                    // TODO: Should this notification setting be deleted?
                    Log.Error().Message("Unauthorized user: project={0} user={1} organization={2} event={3}", project.Id, kv.Key,
                                        project.OrganizationId, eventNotification.Event.Id).Write();
                    continue;
                }

                Log.Trace().Message("Loaded user: email={0}", user.EmailAddress).Write();

                bool shouldReportOccurrence    = settings.Mode != NotificationMode.None;
                bool shouldReportCriticalError = settings.ReportCriticalErrors && eventNotification.IsCritical;
                bool shouldReportRegression    = settings.ReportRegressions && eventNotification.IsRegression;

                Log.Trace().Message("Settings: mode={0} critical={1} regression={2} 404={3} bots={4}",
                                    settings.Mode, settings.ReportCriticalErrors,
                                    settings.ReportRegressions, settings.Report404Errors,
                                    settings.ReportKnownBotErrors).Write();
                Log.Trace().Message("Should process: occurrence={0} critical={1} regression={2}",
                                    shouldReportOccurrence, shouldReportCriticalError,
                                    shouldReportRegression).Write();

                if (settings.Mode == NotificationMode.New && !eventNotification.IsNew)
                {
                    shouldReportOccurrence = false;
                    Log.Trace().Message("Skipping because message is not new.").Write();
                }

                // check for 404s if the user has elected to not report them
                if (shouldReportOccurrence && settings.Report404Errors == false && eventNotification.Event.IsNotFound())
                {
                    shouldReportOccurrence = false;
                    Log.Trace().Message("Skipping because message is 404.").Write();
                }

                var requestInfo = eventNotification.Event.GetRequestInfo();
                // check for known bots if the user has elected to not report them
                if (shouldReportOccurrence && settings.ReportKnownBotErrors == false &&
                    requestInfo != null && !String.IsNullOrEmpty(requestInfo.UserAgent))
                {
                    ClientInfo info = null;
                    try {
                        info = Parser.GetDefault().Parse(requestInfo.UserAgent);
                    } catch (Exception ex) {
                        Log.Warn().Project(eventNotification.Event.ProjectId).Message("Unable to parse user agent {0}. Exception: {1}",
                                                                                      requestInfo.UserAgent, ex.Message).Write();
                    }

                    if (info != null && info.Device.IsSpider)
                    {
                        shouldReportOccurrence = false;
                        Log.Trace().Message("Skipping because message is bot.").Write();
                    }
                }

                // stack being set to send all will override all other settings
                if (!shouldReportOccurrence && !shouldReportCriticalError && !shouldReportRegression)
                {
                    continue;
                }

                var model = new EventNotificationModel(eventNotification)
                {
                    ProjectName      = project.Name,
                    TotalOccurrences = totalOccurrences
                };

                // don't send notifications in non-production mode to email addresses that are not on the outbound email list.
                if (Settings.Current.WebsiteMode != WebsiteMode.Production &&
                    !Settings.Current.AllowedOutboundAddresses.Contains(v => user.EmailAddress.ToLowerInvariant().Contains(v)))
                {
                    Log.Trace().Message("Skipping because email is not on the outbound list and not in production mode.").Write();
                    continue;
                }

                Log.Trace().Message("Sending email to {0}...", user.EmailAddress).Write();
                _mailer.SendNotice(user.EmailAddress, model);
                emailsSent++;
                Log.Trace().Message("Done sending email.").Write();
            }

            // if we sent any emails, mark the last time a notification for this stack was sent.
            if (emailsSent > 0)
            {
                _cacheClient.Set(String.Concat("notify:stack-throttle:", eventNotification.Event.StackId), DateTime.Now, DateTime.Now.AddMinutes(15));
                Log.Info().Message("Notifications sent: event={0} stack={1} count={2}", eventNotification.Event.Id, eventNotification.Event.StackId, emailsSent).Write();
            }

            queueEntry.Complete();

            return(JobResult.Success);
        }
Ejemplo n.º 18
0
        protected override async Task <JobResult> ProcessQueueEntryAsync(JobQueueEntryContext <EventPost> context)
        {
            var           queueEntry    = context.QueueEntry;
            EventPostInfo eventPostInfo = await _storage.GetEventPostAndSetActiveAsync(queueEntry.Value.FilePath, context.CancellationToken).AnyContext();

            if (eventPostInfo == null)
            {
                await queueEntry.AbandonAsync().AnyContext();

                await _storage.SetNotActiveAsync(queueEntry.Value.FilePath).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to retrieve post data '{queueEntry.Value.FilePath}'."));
            }

            bool isInternalProject = eventPostInfo.ProjectId == Settings.Current.InternalProjectId;

            Logger.Info().Message("Processing post: id={0} path={1} project={2} ip={3} v={4} agent={5}", queueEntry.Id, queueEntry.Value.FilePath, eventPostInfo.ProjectId, eventPostInfo.IpAddress, eventPostInfo.ApiVersion, eventPostInfo.UserAgent).WriteIf(!isInternalProject);

            List <PersistentEvent> events = null;

            try {
                _metricsClient.Time(() => {
                    events = ParseEventPost(eventPostInfo);
                    Logger.Info().Message("Parsed {0} events for post: id={1}", events.Count, queueEntry.Id).WriteIf(!isInternalProject);
                }, MetricNames.PostsParsingTime);
                await _metricsClient.CounterAsync(MetricNames.PostsParsed).AnyContext();

                await _metricsClient.GaugeAsync(MetricNames.PostsEventCount, events.Count).AnyContext();
            } catch (Exception ex) {
                await queueEntry.AbandonAsync().AnyContext();

                await _metricsClient.CounterAsync(MetricNames.PostsParseErrors).AnyContext();

                await _storage.SetNotActiveAsync(queueEntry.Value.FilePath).AnyContext();

                Logger.Error().Exception(ex).Message("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message).Write();
                return(JobResult.FromException(ex, $"An error occurred while processing the EventPost '{queueEntry.Id}': {ex.Message}"));
            }

            if (!events.Any() || context.CancellationToken.IsCancellationRequested)
            {
                await queueEntry.AbandonAsync().AnyContext();

                await _storage.SetNotActiveAsync(queueEntry.Value.FilePath).AnyContext();

                return(!events.Any() ? JobResult.Success : JobResult.Cancelled);
            }

            int  eventsToProcess = events.Count;
            bool isSingleEvent   = events.Count == 1;

            if (!isSingleEvent)
            {
                var project = await _projectRepository.GetByIdAsync(eventPostInfo.ProjectId, true).AnyContext();

                if (project == null)
                {
                    // NOTE: This could archive the data for a project that no longer exists.
                    Logger.Error().Project(eventPostInfo.ProjectId).Message($"Unable to process EventPost \"{queueEntry.Value.FilePath}\": Unable to load project: {eventPostInfo.ProjectId}").Write();
                    await CompleteEntryAsync(queueEntry, eventPostInfo, DateTime.UtcNow).AnyContext();

                    return(JobResult.Success);
                }

                // Don't process all the events if it will put the account over its limits.
                eventsToProcess = await _organizationRepository.GetRemainingEventLimitAsync(project.OrganizationId).AnyContext();

                // Add 1 because we already counted 1 against their limit when we received the event post.
                if (eventsToProcess < Int32.MaxValue)
                {
                    eventsToProcess += 1;
                }

                // Increment by count - 1 since we already incremented it by 1 in the OverageHandler.
                await _organizationRepository.IncrementUsageAsync(project.OrganizationId, false, events.Count - 1).AnyContext();
            }

            var errorCount = 0;
            var created    = DateTime.UtcNow;

            try {
                events.ForEach(e => e.CreatedUtc = created);
                var results = await _eventPipeline.RunAsync(events.Take(eventsToProcess).ToList()).AnyContext();

                Logger.Info().Message("Ran {0} events through the pipeline: id={1} project={2} success={3} error={4}", results.Count, queueEntry.Id, eventPostInfo.ProjectId, results.Count(r => r.IsProcessed), results.Count(r => r.HasError)).WriteIf(!isInternalProject);
                foreach (var eventContext in results)
                {
                    if (eventContext.IsCancelled)
                    {
                        continue;
                    }

                    if (!eventContext.HasError)
                    {
                        continue;
                    }

                    Logger.Error().Exception(eventContext.Exception).Project(eventPostInfo.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, eventContext.ErrorMessage).Write();
                    if (eventContext.Exception is ValidationException)
                    {
                        continue;
                    }

                    errorCount++;

                    if (!isSingleEvent)
                    {
                        // Put this single event back into the queue so we can retry it separately.
                        await _queue.EnqueueAsync(new EventPostInfo {
                            ApiVersion = eventPostInfo.ApiVersion,
                            Data       = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(eventContext.Event)),
                            IpAddress  = eventPostInfo.IpAddress,
                            MediaType  = eventPostInfo.MediaType,
                            CharSet    = eventPostInfo.CharSet,
                            ProjectId  = eventPostInfo.ProjectId,
                            UserAgent  = eventPostInfo.UserAgent
                        }, _storage, false, context.CancellationToken).AnyContext();
                    }
                }
            } catch (Exception ex) {
                Logger.Error().Exception(ex).Project(eventPostInfo.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, ex.Message).Write();
                if (ex is ArgumentException || ex is DocumentNotFoundException)
                {
                    await queueEntry.CompleteAsync().AnyContext();
                }
                else
                {
                    errorCount++;
                }
            }

            if (isSingleEvent && errorCount > 0)
            {
                await queueEntry.AbandonAsync().AnyContext();

                await _storage.SetNotActiveAsync(queueEntry.Value.FilePath).AnyContext();
            }
            else
            {
                await CompleteEntryAsync(queueEntry, eventPostInfo, created).AnyContext();
            }

            return(JobResult.Success);
        }
Ejemplo n.º 19
0
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <EventNotificationWorkItem> context)
        {
            var wi = context.QueueEntry.Value;
            var ev = await _eventRepository.GetByIdAsync(wi.EventId).AnyContext();

            if (ev == null)
            {
                return(JobResult.FailedWithMessage($"Could not load event: {wi.EventId}"));
            }

            bool shouldLog  = ev.ProjectId != Settings.Current.InternalProjectId;
            int  emailsSent = 0;

            _logger.Trace().Message(() => $"Process notification: project={ev.ProjectId} event={ev.Id} stack={ev.StackId}").WriteIf(shouldLog);

            var project = await _projectRepository.GetByIdAsync(ev.ProjectId, o => o.Cache()).AnyContext();

            if (project == null)
            {
                return(JobResult.FailedWithMessage($"Could not load project: {ev.ProjectId}."));
            }
            _logger.Trace().Message(() => $"Loaded project: name={project.Name}").WriteIf(shouldLog);

            if (context.CancellationToken.IsCancellationRequested)
            {
                return(JobResult.Cancelled);
            }

            // after the first 2 occurrences, don't send a notification for the same stack more then once every 30 minutes
            var lastTimeSentUtc = await _cache.GetAsync <DateTime>(String.Concat("notify:stack-throttle:", ev.StackId), DateTime.MinValue).AnyContext();

            if (wi.TotalOccurrences > 2 &&
                !wi.IsRegression &&
                lastTimeSentUtc != DateTime.MinValue &&
                lastTimeSentUtc > SystemClock.UtcNow.AddMinutes(-30))
            {
                _logger.Info().Message("Skipping message because of stack throttling: last sent={0} occurrences={1}", lastTimeSentUtc, wi.TotalOccurrences).WriteIf(shouldLog);
                return(JobResult.Success);
            }

            // don't send more than 10 notifications for a given project every 30 minutes
            var    projectTimeWindow = TimeSpan.FromMinutes(30);
            string cacheKey          = String.Concat("notify:project-throttle:", ev.ProjectId, "-", SystemClock.UtcNow.Floor(projectTimeWindow).Ticks);
            double notificationCount = await _cache.IncrementAsync(cacheKey, 1, projectTimeWindow).AnyContext();

            if (notificationCount > 10 && !wi.IsRegression)
            {
                _logger.Info().Project(ev.ProjectId).Message("Skipping message because of project throttling: count={0}", notificationCount).WriteIf(shouldLog);
                return(JobResult.Success);
            }

            foreach (var kv in project.NotificationSettings)
            {
                var settings = kv.Value;
                _logger.Trace().Message(() => $"Processing notification: user={kv.Key}").WriteIf(shouldLog);

                var user = await _userRepository.GetByIdAsync(kv.Key).AnyContext();

                if (String.IsNullOrEmpty(user?.EmailAddress))
                {
                    _logger.Error("Could not load user {0} or blank email address {1}.", kv.Key, user?.EmailAddress ?? "");
                    continue;
                }

                if (!user.IsEmailAddressVerified)
                {
                    _logger.Info().Message("User {0} with email address {1} has not been verified.", user.Id, user.EmailAddress).WriteIf(shouldLog);
                    continue;
                }

                if (!user.EmailNotificationsEnabled)
                {
                    _logger.Info().Message("User {0} with email address {1} has email notifications disabled.", user.Id, user.EmailAddress).WriteIf(shouldLog);
                    continue;
                }

                if (!user.OrganizationIds.Contains(project.OrganizationId))
                {
                    _logger.Error().Message("Unauthorized user: project={0} user={1} organization={2} event={3}", project.Id, kv.Key, project.OrganizationId, ev.Id).Write();
                    continue;
                }

                _logger.Trace().Message(() => $"Loaded user: email={user.EmailAddress}").WriteIf(shouldLog);

                bool isCritical                = ev.IsCritical();
                bool shouldReportNewError      = settings.ReportNewErrors && wi.IsNew && ev.IsError();
                bool shouldReportCriticalError = settings.ReportCriticalErrors && isCritical && ev.IsError();
                bool shouldReportRegression    = settings.ReportEventRegressions && wi.IsRegression;
                bool shouldReportNewEvent      = settings.ReportNewEvents && wi.IsNew;
                bool shouldReportCriticalEvent = settings.ReportCriticalEvents && isCritical;
                bool shouldReport              = shouldReportNewError || shouldReportCriticalError || shouldReportRegression || shouldReportNewEvent || shouldReportCriticalEvent;

                _logger.Trace().Message(() => $"Settings: newerror={settings.ReportNewErrors} criticalerror={settings.ReportCriticalErrors} regression={settings.ReportEventRegressions} new={settings.ReportNewEvents} critical={settings.ReportCriticalEvents}").WriteIf(shouldLog);
                _logger.Trace().Message(() => $"Should process: newerror={shouldReportNewError} criticalerror={shouldReportCriticalError} regression={shouldReportRegression} new={shouldReportNewEvent} critical={shouldReportCriticalEvent}").WriteIf(shouldLog);

                var request = ev.GetRequestInfo();
                // check for known bots if the user has elected to not report them
                if (shouldReport && !String.IsNullOrEmpty(request?.UserAgent))
                {
                    var botPatterns = project.Configuration.Settings.ContainsKey(SettingsDictionary.KnownKeys.UserAgentBotPatterns)
                        ? project.Configuration.Settings.GetStringCollection(SettingsDictionary.KnownKeys.UserAgentBotPatterns).ToList()
                        : new List <string>();

                    var info = await _parser.ParseAsync(request.UserAgent, ev.ProjectId).AnyContext();

                    if (info != null && info.Device.IsSpider || request.UserAgent.AnyWildcardMatches(botPatterns))
                    {
                        shouldReport = false;
                        _logger.Info().Message("Skipping because event is from a bot \"{0}\".", request.UserAgent).WriteIf(shouldLog);
                    }
                }

                if (!shouldReport)
                {
                    continue;
                }

                // don't send notifications in non-production mode to email addresses that are not on the outbound email list.
                if (Settings.Current.WebsiteMode != WebsiteMode.Production &&
                    !Settings.Current.AllowedOutboundAddresses.Contains(v => user.EmailAddress.ToLowerInvariant().Contains(v)))
                {
                    _logger.Info().Message("Skipping because email is not on the outbound list and not in production mode.").WriteIf(shouldLog);
                    continue;
                }

                _logger.Trace("Sending email to {0}...", user.EmailAddress);
                await _mailer.SendEventNoticeAsync(user, ev, project, wi.IsNew, wi.IsRegression, wi.TotalOccurrences).AnyContext();

                emailsSent++;
                _logger.Trace().Message(() => "Done sending email.").WriteIf(shouldLog);
            }

            // if we sent any emails, mark the last time a notification for this stack was sent.
            if (emailsSent > 0)
            {
                await _cache.SetAsync(String.Concat("notify:stack-throttle:", ev.StackId), SystemClock.UtcNow, SystemClock.UtcNow.AddMinutes(15)).AnyContext();

                _logger.Info().Message("Notifications sent: event={0} stack={1} count={2}", ev.Id, ev.StackId, emailsSent).WriteIf(shouldLog);
            }

            return(JobResult.Success);
        }
Ejemplo n.º 20
0
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <EventPost> context)
        {
            var queueEntry    = context.QueueEntry;
            var eventPostInfo = await _storage.GetEventPostAndSetActiveAsync(queueEntry.Value.FilePath, _logger, context.CancellationToken).AnyContext();

            if (eventPostInfo == null)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to retrieve post data '{queueEntry.Value.FilePath}'."));
            }

            bool isInternalProject = eventPostInfo.ProjectId == Settings.Current.InternalProjectId;

            _logger.Info().Message("Processing post: id={0} path={1} project={2} ip={3} v={4} agent={5}", queueEntry.Id, queueEntry.Value.FilePath, eventPostInfo.ProjectId, eventPostInfo.IpAddress, eventPostInfo.ApiVersion, eventPostInfo.UserAgent).WriteIf(!isInternalProject);

            var project = await _projectRepository.GetByIdAsync(eventPostInfo.ProjectId, true).AnyContext();

            if (project == null)
            {
                _logger.Error().Project(eventPostInfo.ProjectId).Message("Unable to process EventPost \"{0}\": Unable to load project: {1}", queueEntry.Value.FilePath, eventPostInfo.ProjectId).WriteIf(!isInternalProject);
                await CompleteEntryAsync(queueEntry, eventPostInfo, DateTime.UtcNow).AnyContext();

                return(JobResult.Success);
            }

            var createdUtc = DateTime.UtcNow;
            var events     = await ParseEventPostAsync(eventPostInfo, createdUtc, queueEntry.Id, isInternalProject).AnyContext();

            if (events == null || !events.Any())
            {
                await CompleteEntryAsync(queueEntry, eventPostInfo, createdUtc).AnyContext();

                return(JobResult.Success);
            }

            if (context.CancellationToken.IsCancellationRequested)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();

                return(JobResult.Cancelled);
            }

            bool isSingleEvent = events.Count == 1;

            if (!isSingleEvent)
            {
                // Don't process all the events if it will put the account over its limits.
                int eventsToProcess = await _organizationRepository.GetRemainingEventLimitAsync(project.OrganizationId).AnyContext();

                // Add 1 because we already counted 1 against their limit when we received the event post.
                if (eventsToProcess < Int32.MaxValue)
                {
                    eventsToProcess += 1;
                }

                // Discard any events over there limit.
                events = events.Take(eventsToProcess).ToList();

                // Increment by count - 1 since we already incremented it by 1 in the OverageHandler.
                await _organizationRepository.IncrementUsageAsync(project.OrganizationId, false, events.Count - 1, applyHourlyLimit : false).AnyContext();
            }

            int errorCount    = 0;
            var eventsToRetry = new List <PersistentEvent>();

            try {
                var results = await _eventPipeline.RunAsync(events, eventPostInfo).AnyContext();

                _logger.Info().Message(() => $"Ran {results.Count} events through the pipeline: id={queueEntry.Id} project={eventPostInfo.ProjectId} success={results.Count(r => r.IsProcessed)} error={results.Count(r => r.HasError)}").WriteIf(!isInternalProject);
                foreach (var eventContext in results)
                {
                    if (eventContext.IsCancelled)
                    {
                        continue;
                    }

                    if (!eventContext.HasError)
                    {
                        continue;
                    }

                    _logger.Error().Exception(eventContext.Exception).Project(eventPostInfo.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, eventContext.ErrorMessage).Write();

                    if (eventContext.Exception is ValidationException)
                    {
                        continue;
                    }

                    errorCount++;
                    if (!isSingleEvent)
                    {
                        // Put this single event back into the queue so we can retry it separately.
                        eventsToRetry.Add(eventContext.Event);
                    }
                }
            } catch (Exception ex) {
                _logger.Error().Exception(ex).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, ex.Message).Project(eventPostInfo.ProjectId).Write();
                if (ex is ArgumentException || ex is DocumentNotFoundException)
                {
                    await CompleteEntryAsync(queueEntry, eventPostInfo, createdUtc).AnyContext();

                    return(JobResult.Success);
                }

                errorCount++;
                if (!isSingleEvent)
                {
                    eventsToRetry.AddRange(events);
                }
            }

            foreach (var requeueEvent in eventsToRetry)
            {
                // Put this single event back into the queue so we can retry it separately.
                await _queue.EnqueueAsync(new EventPostInfo {
                    ApiVersion = eventPostInfo.ApiVersion,
                    Data       = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(requeueEvent)),
                    IpAddress  = eventPostInfo.IpAddress,
                    MediaType  = eventPostInfo.MediaType,
                    CharSet    = eventPostInfo.CharSet,
                    ProjectId  = eventPostInfo.ProjectId,
                    UserAgent  = eventPostInfo.UserAgent
                }, _storage, false, context.CancellationToken).AnyContext();
            }

            if (isSingleEvent && errorCount > 0)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();
            }
            else
            {
                await CompleteEntryAsync(queueEntry, eventPostInfo, createdUtc).AnyContext();
            }

            return(JobResult.Success);
        }
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <WebHookNotification> context)
        {
            var  body      = context.QueueEntry.Value;
            bool shouldLog = body.ProjectId != _appOptions.Value.InternalProjectId;

            using (_logger.BeginScope(new ExceptionlessState().Organization(body.OrganizationId).Project(body.ProjectId))) {
                if (shouldLog)
                {
                    _logger.LogTrace("Process web hook call: id={Id} project={1} url={Url}", context.QueueEntry.Id, body.ProjectId, body.Url);
                }

                if (!await IsEnabledAsync(body).AnyContext())
                {
                    _logger.LogInformation("Web hook cancelled: Web hook is disabled");
                    return(JobResult.Cancelled);
                }

                var  cache             = new ScopedCacheClient(_cacheClient, GetCacheKeyScope(body));
                long consecutiveErrors = await cache.GetAsync <long>(ConsecutiveErrorsCacheKey, 0).AnyContext();

                if (consecutiveErrors > 10)
                {
                    var lastAttempt = await cache.GetAsync(LastAttemptCacheKey, SystemClock.UtcNow).AnyContext();

                    var nextAttemptAllowedAt = lastAttempt.AddMinutes(15);
                    if (nextAttemptAllowedAt >= SystemClock.UtcNow)
                    {
                        _logger.LogInformation("Web hook cancelled due to {FailureCount} consecutive failed attempts. Will be allowed to try again at {NextAttempt}.", consecutiveErrors, nextAttemptAllowedAt);
                        return(JobResult.Cancelled);
                    }
                }

                bool successful = true;
                HttpResponseMessage response = null;
                try {
                    using (var timeoutCancellationTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(5))) {
                        using (var postCancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(context.CancellationToken, timeoutCancellationTokenSource.Token)) {
                            response = await _client.PostAsJsonAsync(body.Url, body.Data.ToJson(Formatting.Indented, _jsonSerializerSettings), postCancellationTokenSource.Token).AnyContext();

                            if (!response.IsSuccessStatusCode)
                            {
                                successful = false;
                            }
                            else if (consecutiveErrors > 0)
                            {
                                await cache.RemoveAllAsync(_cacheKeys).AnyContext();
                            }
                        }
                    }
                } catch (OperationCanceledException ex) {
                    successful = false;
                    if (shouldLog)
                    {
                        _logger.LogError(ex, "Timeout calling web hook: status={Status} org={organization} project={project} url={Url}", response?.StatusCode, body.OrganizationId, body.ProjectId, body.Url);
                    }
                    return(JobResult.Cancelled);
                } catch (Exception ex) {
                    successful = false;
                    if (shouldLog)
                    {
                        _logger.LogError(ex, "Error calling web hook: status={Status} org={organization} project={project} url={Url}", response?.StatusCode, body.OrganizationId, body.ProjectId, body.Url);
                    }
                    return(JobResult.FromException(ex));
                } finally {
                    if (successful)
                    {
                        _logger.LogInformation("Web hook POST complete: status={Status} org={organization} project={project} url={Url}", response?.StatusCode, body.OrganizationId, body.ProjectId, body.Url);
                    }
                    else if (response != null && (response.StatusCode == HttpStatusCode.Unauthorized || response.StatusCode == HttpStatusCode.Forbidden || response.StatusCode == HttpStatusCode.Gone))
                    {
                        _logger.LogWarning("Disabling Web hook instance {WebHookId} due to status code: status={Status} org={organization} project={project} url={Url}", body.Type == WebHookType.Slack ? "Slack" : body.WebHookId, response.StatusCode, body.OrganizationId, body.ProjectId, body.Url);
                        await DisableIntegrationAsync(body).AnyContext();

                        await cache.RemoveAllAsync(_cacheKeys).AnyContext();
                    }
                    else
                    {
                        var now = SystemClock.UtcNow;
                        await cache.SetAsync(LastAttemptCacheKey, now, TimeSpan.FromDays(3)).AnyContext();

                        consecutiveErrors = await cache.IncrementAsync(ConsecutiveErrorsCacheKey, TimeSpan.FromDays(3)).AnyContext();

                        DateTime firstAttempt;
                        if (consecutiveErrors == 1)
                        {
                            await cache.SetAsync(FirstAttemptCacheKey, now, TimeSpan.FromDays(3)).AnyContext();

                            firstAttempt = now;
                        }
                        else
                        {
                            firstAttempt = await cache.GetAsync(FirstAttemptCacheKey, now).AnyContext();
                        }

                        if (consecutiveErrors >= 10)
                        {
                            // don't retry any more
                            context.QueueEntry.MarkCompleted();

                            // disable if more than 10 consecutive errors over the course of multiple days
                            if (firstAttempt.IsBefore(now.SubtractDays(2)))
                            {
                                _logger.LogWarning("Disabling Web hook instance {WebHookId} due to too many consecutive failures.", body.Type == WebHookType.Slack ? "Slack" : body.WebHookId);
                                await DisableIntegrationAsync(body).AnyContext();

                                await cache.RemoveAllAsync(_cacheKeys).AnyContext();
                            }
                        }
                    }
                }
            }

            return(JobResult.Success);
        }
Ejemplo n.º 22
0
        protected async override Task <JobResult> RunInternalAsync(CancellationToken token)
        {
            QueueEntry <EventPost> queueEntry = null;

            try {
                queueEntry = _queue.Dequeue(TimeSpan.FromSeconds(1));
            } catch (Exception ex) {
                if (!(ex is TimeoutException))
                {
                    Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next EventPost: {0}", ex.Message).Write();
                    return(JobResult.FromException(ex));
                }
            }

            if (queueEntry == null)
            {
                return(JobResult.Success);
            }

            if (token.IsCancellationRequested)
            {
                queueEntry.Abandon();
                return(JobResult.Cancelled);
            }

            EventPostInfo eventPostInfo = _storage.GetEventPostAndSetActive(queueEntry.Value.FilePath);

            if (eventPostInfo == null)
            {
                queueEntry.Abandon();
                _storage.SetNotActive(queueEntry.Value.FilePath);
                return(JobResult.FailedWithMessage(String.Format("Unable to retrieve post data '{0}'.", queueEntry.Value.FilePath)));
            }

            bool isInternalProject = eventPostInfo.ProjectId == Settings.Current.InternalProjectId;

            _statsClient.Counter(MetricNames.PostsDequeued);
            Log.Info().Message("Processing post: id={0} path={1} project={2} ip={3} v={4} agent={5}", queueEntry.Id, queueEntry.Value.FilePath, eventPostInfo.ProjectId, eventPostInfo.IpAddress, eventPostInfo.ApiVersion, eventPostInfo.UserAgent).WriteIf(!isInternalProject);

            List <PersistentEvent> events = null;

            try {
                _statsClient.Time(() => {
                    events = ParseEventPost(eventPostInfo);
                    Log.Info().Message("Parsed {0} events for post: id={1}", events.Count, queueEntry.Id).WriteIf(!isInternalProject);
                }, MetricNames.PostsParsingTime);
                _statsClient.Counter(MetricNames.PostsParsed);
                _statsClient.Gauge(MetricNames.PostsEventCount, events.Count);
            } catch (Exception ex) {
                _statsClient.Counter(MetricNames.PostsParseErrors);
                queueEntry.Abandon();
                _storage.SetNotActive(queueEntry.Value.FilePath);

                Log.Error().Exception(ex).Message("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message).Write();
                return(JobResult.FromException(ex, String.Format("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message)));
            }

            if (token.IsCancellationRequested)
            {
                queueEntry.Abandon();
                return(JobResult.Cancelled);
            }

            if (events == null)
            {
                queueEntry.Abandon();
                _storage.SetNotActive(queueEntry.Value.FilePath);
                return(JobResult.Success);
            }

            int  eventsToProcess = events.Count;
            bool isSingleEvent   = events.Count == 1;

            if (!isSingleEvent)
            {
                var project = _projectRepository.GetById(eventPostInfo.ProjectId, true);
                // Don't process all the events if it will put the account over its limits.
                eventsToProcess = _organizationRepository.GetRemainingEventLimit(project.OrganizationId);

                // Add 1 because we already counted 1 against their limit when we received the event post.
                if (eventsToProcess < Int32.MaxValue)
                {
                    eventsToProcess += 1;
                }

                // Increment by count - 1 since we already incremented it by 1 in the OverageHandler.
                _organizationRepository.IncrementUsage(project.OrganizationId, false, events.Count - 1);
            }

            if (events == null)
            {
                queueEntry.Abandon();
                _storage.SetNotActive(queueEntry.Value.FilePath);
                return(JobResult.Success);
            }

            var errorCount = 0;
            var created    = DateTime.UtcNow;

            try {
                events.ForEach(e => e.CreatedUtc = created);
                var results = _eventPipeline.Run(events.Take(eventsToProcess).ToList());
                Log.Info().Message("Ran {0} events through the pipeline: id={1} project={2} success={3} error={4}", results.Count, queueEntry.Id, eventPostInfo.ProjectId, results.Count(r => r.IsProcessed), results.Count(r => r.HasError)).WriteIf(!isInternalProject);
                foreach (var eventContext in results)
                {
                    if (eventContext.IsCancelled)
                    {
                        continue;
                    }

                    if (!eventContext.HasError)
                    {
                        continue;
                    }

                    Log.Error().Exception(eventContext.Exception).Project(eventPostInfo.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, eventContext.ErrorMessage).Write();
                    if (eventContext.Exception is ValidationException)
                    {
                        continue;
                    }

                    errorCount++;

                    if (!isSingleEvent)
                    {
                        // Put this single event back into the queue so we can retry it separately.
                        _queue.Enqueue(new EventPostInfo {
                            ApiVersion = eventPostInfo.ApiVersion,
                            CharSet    = eventPostInfo.CharSet,
                            Data       = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(eventContext.Event)),
                            IpAddress  = eventPostInfo.IpAddress,
                            MediaType  = eventPostInfo.MediaType,
                            ProjectId  = eventPostInfo.ProjectId,
                            UserAgent  = eventPostInfo.UserAgent
                        }, _storage, false);
                    }
                }
            } catch (ArgumentException ex) {
                Log.Error().Exception(ex).Project(eventPostInfo.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, ex.Message).Write();
                queueEntry.Complete();
            } catch (Exception ex) {
                Log.Error().Exception(ex).Project(eventPostInfo.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, ex.Message).Write();
                errorCount++;
            }

            if (isSingleEvent && errorCount > 0)
            {
                queueEntry.Abandon();
                _storage.SetNotActive(queueEntry.Value.FilePath);
            }
            else
            {
                queueEntry.Complete();
                if (queueEntry.Value.ShouldArchive)
                {
                    _storage.CompleteEventPost(queueEntry.Value.FilePath, eventPostInfo.ProjectId, created, queueEntry.Value.ShouldArchive);
                }
                else
                {
                    _storage.DeleteFile(queueEntry.Value.FilePath);
                    _storage.SetNotActive(queueEntry.Value.FilePath);
                }
            }

            return(JobResult.Success);
        }
        protected override async Task <JobResult> ProcessQueueEntryAsync(JobQueueEntryContext <EventNotificationWorkItem> context)
        {
            var eventModel = await _eventRepository.GetByIdAsync(context.QueueEntry.Value.EventId).AnyContext();

            if (eventModel == null)
            {
                return(JobResult.FailedWithMessage($"Could not load event: {context.QueueEntry.Value.EventId}"));
            }

            var  eventNotification = new EventNotification(context.QueueEntry.Value, eventModel);
            bool shouldLog         = eventNotification.Event.ProjectId != Settings.Current.InternalProjectId;
            int  emailsSent        = 0;

            Logger.Trace().Message("Process notification: project={0} event={1} stack={2}", eventNotification.Event.ProjectId, eventNotification.Event.Id, eventNotification.Event.StackId).WriteIf(shouldLog);

            var project = await _projectRepository.GetByIdAsync(eventNotification.Event.ProjectId, true).AnyContext();

            if (project == null)
            {
                return(JobResult.FailedWithMessage($"Could not load project: {eventNotification.Event.ProjectId}."));
            }
            Logger.Trace().Message($"Loaded project: name={project.Name}").WriteIf(shouldLog);

            var organization = await _organizationRepository.GetByIdAsync(project.OrganizationId, true).AnyContext();

            if (organization == null)
            {
                return(JobResult.FailedWithMessage($"Could not load organization: {project.OrganizationId}"));
            }

            Logger.Trace().Message($"Loaded organization: {organization.Name}").WriteIf(shouldLog);

            var stack = await _stackRepository.GetByIdAsync(eventNotification.Event.StackId).AnyContext();

            if (stack == null)
            {
                return(JobResult.FailedWithMessage($"Could not load stack: {eventNotification.Event.StackId}"));
            }

            if (!organization.HasPremiumFeatures)
            {
                Logger.Info().Message("Skipping \"{0}\" because organization \"{1}\" does not have premium features.", eventNotification.Event.Id, eventNotification.Event.OrganizationId).WriteIf(shouldLog);
                return(JobResult.Success);
            }

            if (stack.DisableNotifications || stack.IsHidden)
            {
                Logger.Info().Message("Skipping \"{0}\" because stack \"{1}\" notifications are disabled or stack is hidden.", eventNotification.Event.Id, eventNotification.Event.StackId).WriteIf(shouldLog);
                return(JobResult.Success);
            }

            if (context.CancellationToken.IsCancellationRequested)
            {
                return(JobResult.Cancelled);
            }

            Logger.Trace().Message("Loaded stack: title={0}", stack.Title).WriteIf(shouldLog);
            int totalOccurrences = stack.TotalOccurrences;

            // after the first 2 occurrences, don't send a notification for the same stack more then once every 30 minutes
            var lastTimeSentUtc = await _cacheClient.GetAsync <DateTime>(String.Concat("notify:stack-throttle:", eventNotification.Event.StackId), DateTime.MinValue).AnyContext();

            if (totalOccurrences > 2 &&
                !eventNotification.IsRegression &&
                lastTimeSentUtc != DateTime.MinValue &&
                lastTimeSentUtc > DateTime.UtcNow.AddMinutes(-30))
            {
                Logger.Info().Message("Skipping message because of stack throttling: last sent={0} occurrences={1}", lastTimeSentUtc, totalOccurrences).WriteIf(shouldLog);
                return(JobResult.Success);
            }

            // don't send more than 10 notifications for a given project every 30 minutes
            var    projectTimeWindow = TimeSpan.FromMinutes(30);
            string cacheKey          = String.Concat("notify:project-throttle:", eventNotification.Event.ProjectId, "-", DateTime.UtcNow.Floor(projectTimeWindow).Ticks);
            long   notificationCount = await _cacheClient.IncrementAsync(cacheKey, 1, projectTimeWindow).AnyContext();

            if (notificationCount > 10 && !eventNotification.IsRegression)
            {
                Logger.Info().Project(eventNotification.Event.ProjectId).Message("Skipping message because of project throttling: count={0}", notificationCount).WriteIf(shouldLog);
                return(JobResult.Success);
            }

            if (context.CancellationToken.IsCancellationRequested)
            {
                return(JobResult.Cancelled);
            }

            foreach (var kv in project.NotificationSettings)
            {
                var settings = kv.Value;
                Logger.Trace().Message("Processing notification: user={0}", kv.Key).WriteIf(shouldLog);

                var user = await _userRepository.GetByIdAsync(kv.Key).AnyContext();

                if (String.IsNullOrEmpty(user?.EmailAddress))
                {
                    Logger.Error().Message("Could not load user {0} or blank email address {1}.", kv.Key, user != null ? user.EmailAddress : "").Write();
                    continue;
                }

                if (!user.IsEmailAddressVerified)
                {
                    Logger.Info().Message("User {0} with email address {1} has not been verified.", kv.Key, user != null ? user.EmailAddress : "").WriteIf(shouldLog);
                    continue;
                }

                if (!user.EmailNotificationsEnabled)
                {
                    Logger.Info().Message("User {0} with email address {1} has email notifications disabled.", kv.Key, user != null ? user.EmailAddress : "").WriteIf(shouldLog);
                    continue;
                }

                if (!user.OrganizationIds.Contains(project.OrganizationId))
                {
                    Logger.Error().Message("Unauthorized user: project={0} user={1} organization={2} event={3}", project.Id, kv.Key, project.OrganizationId, eventNotification.Event.Id).Write();
                    continue;
                }

                Logger.Trace().Message("Loaded user: email={0}", user.EmailAddress).WriteIf(shouldLog);

                bool shouldReportNewError      = settings.ReportNewErrors && eventNotification.IsNew && eventNotification.Event.IsError();
                bool shouldReportCriticalError = settings.ReportCriticalErrors && eventNotification.IsCritical && eventNotification.Event.IsError();
                bool shouldReportRegression    = settings.ReportEventRegressions && eventNotification.IsRegression;
                bool shouldReportNewEvent      = settings.ReportNewEvents && eventNotification.IsNew;
                bool shouldReportCriticalEvent = settings.ReportCriticalEvents && eventNotification.IsCritical;
                bool shouldReport = shouldReportNewError || shouldReportCriticalError || shouldReportRegression || shouldReportNewEvent || shouldReportCriticalEvent;

                Logger.Trace().Message("Settings: newerror={0} criticalerror={1} regression={2} new={3} critical={4}",
                                       settings.ReportNewErrors, settings.ReportCriticalErrors,
                                       settings.ReportEventRegressions, settings.ReportNewEvents, settings.ReportCriticalEvents).WriteIf(shouldLog);
                Logger.Trace().Message("Should process: newerror={0} criticalerror={1} regression={2} new={3} critical={4}",
                                       shouldReportNewError, shouldReportCriticalError,
                                       shouldReportRegression, shouldReportNewEvent, shouldReportCriticalEvent).WriteIf(shouldLog);

                var request = eventNotification.Event.GetRequestInfo();
                // check for known bots if the user has elected to not report them
                if (shouldReport && !String.IsNullOrEmpty(request?.UserAgent))
                {
                    var botPatterns = project.Configuration.Settings.ContainsKey(SettingsDictionary.KnownKeys.UserAgentBotPatterns)
                        ? project.Configuration.Settings.GetStringCollection(SettingsDictionary.KnownKeys.UserAgentBotPatterns).ToList()
                        : new List <string>();

                    var info = await _parser.ParseAsync(request.UserAgent, eventNotification.Event.ProjectId).AnyContext();

                    if (info != null && info.Device.IsSpider || request.UserAgent.AnyWildcardMatches(botPatterns))
                    {
                        shouldReport = false;
                        Logger.Info().Message("Skipping because event is from a bot \"{0}\".", request.UserAgent).WriteIf(shouldLog);
                    }
                }

                if (!shouldReport)
                {
                    continue;
                }

                var model = new EventNotificationModel(eventNotification)
                {
                    ProjectName      = project.Name,
                    TotalOccurrences = totalOccurrences
                };

                // don't send notifications in non-production mode to email addresses that are not on the outbound email list.
                if (Settings.Current.WebsiteMode != WebsiteMode.Production &&
                    !Settings.Current.AllowedOutboundAddresses.Contains(v => user.EmailAddress.ToLowerInvariant().Contains(v)))
                {
                    Logger.Info().Message("Skipping because email is not on the outbound list and not in production mode.").WriteIf(shouldLog);
                    continue;
                }

                Logger.Trace().Message("Sending email to {0}...", user.EmailAddress).Write();
                await _mailer.SendNoticeAsync(user.EmailAddress, model).AnyContext();

                emailsSent++;
                Logger.Trace().Message("Done sending email.").WriteIf(shouldLog);
            }

            // if we sent any emails, mark the last time a notification for this stack was sent.
            if (emailsSent > 0)
            {
                await _cacheClient.SetAsync(String.Concat("notify:stack-throttle:", eventNotification.Event.StackId), DateTime.UtcNow, DateTime.UtcNow.AddMinutes(15)).AnyContext();

                Logger.Info().Message("Notifications sent: event={0} stack={1} count={2}", eventNotification.Event.Id, eventNotification.Event.StackId, emailsSent).WriteIf(shouldLog);
            }

            return(JobResult.Success);
        }
Ejemplo n.º 24
0
        protected async override Task <JobResult> RunInternalAsync(CancellationToken token)
        {
            Log.Info().Message("Process events job starting").Write();

            QueueEntry <EventPostFileInfo> queueEntry = null;

            try {
                queueEntry = _queue.Dequeue(TimeSpan.FromSeconds(1));
            } catch (Exception ex) {
                if (!(ex is TimeoutException))
                {
                    Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next EventPost: {0}", ex.Message).Write();
                    return(JobResult.FromException(ex));
                }
            }
            if (queueEntry == null)
            {
                return(JobResult.Success);
            }

            EventPost eventPost = _storage.GetEventPostAndSetActive(queueEntry.Value.FilePath);

            if (eventPost == null)
            {
                queueEntry.Abandon();
                _storage.SetNotActive(queueEntry.Value.FilePath);
                return(JobResult.FailedWithMessage(String.Format("Unable to retrieve post data '{0}'.", queueEntry.Value.FilePath)));
            }

            _statsClient.Counter(StatNames.PostsDequeued);
            Log.Info().Message("Processing EventPost '{0}'.", queueEntry.Id).Write();

            List <PersistentEvent> events = null;

            try {
                _statsClient.Time(() => {
                    events = ParseEventPost(eventPost);
                }, StatNames.PostsParsingTime);
                _statsClient.Counter(StatNames.PostsParsed);
                _statsClient.Gauge(StatNames.PostsBatchSize, events.Count);
            } catch (Exception ex) {
                _statsClient.Counter(StatNames.PostsParseErrors);
                queueEntry.Abandon();
                _storage.SetNotActive(queueEntry.Value.FilePath);

                // TODO: Add the EventPost to the logged exception.
                Log.Error().Exception(ex).Message("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message).Write();
                return(JobResult.FromException(ex, String.Format("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message)));
            }

            if (events == null)
            {
                queueEntry.Abandon();
                _storage.SetNotActive(queueEntry.Value.FilePath);
                return(JobResult.Success);
            }

            int  eventsToProcess = events.Count;
            bool isSingleEvent   = events.Count == 1;

            if (!isSingleEvent)
            {
                var project = _projectRepository.GetById(eventPost.ProjectId, true);
                // Don't process all the events if it will put the account over its limits.
                eventsToProcess = _organizationRepository.GetRemainingEventLimit(project.OrganizationId);

                // Add 1 because we already counted 1 against their limit when we received the event post.
                if (eventsToProcess < Int32.MaxValue)
                {
                    eventsToProcess += 1;
                }

                // Increment by count - 1 since we already incremented it by 1 in the OverageHandler.
                _organizationRepository.IncrementUsage(project.OrganizationId, events.Count - 1);
            }
            int      errorCount = 0;
            DateTime created    = DateTime.UtcNow;

            foreach (PersistentEvent ev in events.Take(eventsToProcess))
            {
                try {
                    ev.CreatedUtc = created;
                    _eventPipeline.Run(ev);
                } catch (ValidationException ex) {
                    Log.Error().Exception(ex).Project(eventPost.ProjectId).Message("Event validation error occurred: {0}", ex.Message).Write();
                } catch (Exception ex) {
                    Log.Error().Exception(ex).Project(eventPost.ProjectId).Message("Error while processing event: {0}", ex.Message).Write();

                    if (!isSingleEvent)
                    {
                        // Put this single event back into the queue so we can retry it separately.
                        _queue.Enqueue(new EventPost {
                            ApiVersion      = eventPost.ApiVersion,
                            CharSet         = eventPost.CharSet,
                            ContentEncoding = "application/json",
                            Data            = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(ev)),
                            IpAddress       = eventPost.IpAddress,
                            MediaType       = eventPost.MediaType,
                            ProjectId       = eventPost.ProjectId,
                            UserAgent       = eventPost.UserAgent
                        }, _storage, false);
                    }

                    errorCount++;
                }
            }

            if (isSingleEvent && errorCount > 0)
            {
                queueEntry.Abandon();
                _storage.SetNotActive(queueEntry.Value.FilePath);
            }
            else
            {
                queueEntry.Complete();
                if (queueEntry.Value.ShouldArchive)
                {
                    _storage.CompleteEventPost(queueEntry.Value.FilePath, eventPost.ProjectId, created, queueEntry.Value.ShouldArchive);
                }
                else
                {
                    _storage.DeleteFile(queueEntry.Value.FilePath);
                    _storage.SetNotActive(queueEntry.Value.FilePath);
                }
            }

            return(JobResult.Success);
        }
        protected async override Task <JobResult> RunInternalAsync()
        {
            Log.Info().Message("Process events job starting").Write();
            int totalEventsProcessed = 0;
            int totalEventsToProcess = Context.GetWorkItemLimit();

            while (!CancelPending && (totalEventsToProcess == -1 || totalEventsProcessed < totalEventsToProcess))
            {
                QueueEntry <EventPost> queueEntry = null;
                try {
                    queueEntry = await _queue.DequeueAsync();
                } catch (Exception ex) {
                    if (!(ex is TimeoutException))
                    {
                        Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next EventPost: {0}", ex.Message).Write();
                        return(JobResult.FromException(ex));
                    }
                }
                if (queueEntry == null)
                {
                    continue;
                }

                _statsClient.Counter(StatNames.PostsDequeued);
                Log.Info().Message("Processing EventPost '{0}'.", queueEntry.Id).Write();

                List <PersistentEvent> events = null;
                try {
                    _statsClient.Time(() => {
                        events = ParseEventPost(queueEntry.Value);
                    }, StatNames.PostsParsingTime);
                    _statsClient.Counter(StatNames.PostsParsed);
                    _statsClient.Gauge(StatNames.PostsBatchSize, events.Count);
                } catch (Exception ex) {
                    _statsClient.Counter(StatNames.PostsParseErrors);
                    queueEntry.AbandonAsync().Wait();

                    // TODO: Add the EventPost to the logged exception.
                    Log.Error().Exception(ex).Message("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message).Write();
                    continue;
                }

                if (events == null)
                {
                    queueEntry.AbandonAsync().Wait();
                    continue;
                }

                int  eventsToProcess = events.Count;
                bool isSingleEvent   = events.Count == 1;
                if (!isSingleEvent)
                {
                    var project = _projectRepository.GetById(queueEntry.Value.ProjectId, true);
                    // Don't process all the events if it will put the account over its limits.
                    eventsToProcess = _organizationRepository.GetRemainingEventLimit(project.OrganizationId);

                    // Add 1 because we already counted 1 against their limit when we received the event post.
                    if (eventsToProcess < Int32.MaxValue)
                    {
                        eventsToProcess += 1;
                    }

                    // Increment by count - 1 since we already incremented it by 1 in the OverageHandler.
                    _organizationRepository.IncrementUsage(project.OrganizationId, events.Count - 1);
                }
                int errorCount = 0;
                foreach (PersistentEvent ev in events.Take(eventsToProcess))
                {
                    try {
                        _eventPipeline.Run(ev);
                        totalEventsProcessed++;
                        if (totalEventsToProcess > 0 && totalEventsProcessed >= totalEventsToProcess)
                        {
                            break;
                        }
                    } catch (ValidationException ex) {
                        Log.Error().Exception(ex).Project(queueEntry.Value.ProjectId).Message("Event validation error occurred: {0}", ex.Message).Write();
                    } catch (Exception ex) {
                        Log.Error().Exception(ex).Project(queueEntry.Value.ProjectId).Message("Error while processing event: {0}", ex.Message).Write();

                        if (!isSingleEvent)
                        {
                            // Put this single event back into the queue so we can retry it separately.
                            _queue.EnqueueAsync(new EventPost {
                                Data            = Encoding.UTF8.GetBytes(ev.ToJson()).Compress(),
                                ContentEncoding = "gzip",
                                ProjectId       = ev.ProjectId,
                                CharSet         = "utf-8",
                                MediaType       = "application/json",
                            }).Wait();
                        }

                        errorCount++;
                    }
                }

                if (isSingleEvent && errorCount > 0)
                {
                    queueEntry.AbandonAsync().Wait();
                }
                else
                {
                    queueEntry.CompleteAsync().Wait();
                }
            }

            return(JobResult.Success);
        }
Ejemplo n.º 26
0
        protected override async Task <JobResult> RunInternalAsync(JobContext context)
        {
            _lastRun = SystemClock.UtcNow;

            if (!_emailOptions.EnableDailySummary || _mailer == null)
            {
                return(JobResult.SuccessWithMessage("Summary notifications are disabled."));
            }

            var results = await _projectRepository.GetByNextSummaryNotificationOffsetAsync(9).AnyContext();

            while (results.Documents.Count > 0 && !context.CancellationToken.IsCancellationRequested)
            {
                _logger.LogTrace("Got {Count} projects to process. ", results.Documents.Count);

                var projectsToBulkUpdate      = new List <Project>(results.Documents.Count);
                var processSummariesNewerThan = SystemClock.UtcNow.Date.SubtractDays(2);
                foreach (var project in results.Documents)
                {
                    using (_logger.BeginScope(new ExceptionlessState().Organization(project.OrganizationId).Project(project.Id))) {
                        var utcStartTime = new DateTime(project.NextSummaryEndOfDayTicks - TimeSpan.TicksPerDay);
                        if (utcStartTime < processSummariesNewerThan)
                        {
                            _logger.LogInformation("Skipping daily summary older than two days for project: {Name}", project.Name);
                            projectsToBulkUpdate.Add(project);
                            continue;
                        }

                        var notification = new SummaryNotification {
                            Id           = project.Id,
                            UtcStartTime = utcStartTime,
                            UtcEndTime   = new DateTime(project.NextSummaryEndOfDayTicks - TimeSpan.TicksPerSecond)
                        };

                        bool summarySent = await SendSummaryNotificationAsync(project, notification).AnyContext();

                        if (summarySent)
                        {
                            await _projectRepository.IncrementNextSummaryEndOfDayTicksAsync(new[] { project }).AnyContext();

                            // Sleep so we are not hammering the backend as we just generated a report.
                            await SystemClock.SleepAsync(TimeSpan.FromSeconds(2.5)).AnyContext();
                        }
                        else
                        {
                            projectsToBulkUpdate.Add(project);
                        }
                    }
                }

                if (projectsToBulkUpdate.Count > 0)
                {
                    await _projectRepository.IncrementNextSummaryEndOfDayTicksAsync(projectsToBulkUpdate).AnyContext();

                    // Sleep so we are not hammering the backend
                    await SystemClock.SleepAsync(TimeSpan.FromSeconds(1)).AnyContext();
                }

                if (context.CancellationToken.IsCancellationRequested || !await results.NextPageAsync().AnyContext())
                {
                    break;
                }

                if (results.Documents.Count > 0)
                {
                    await context.RenewLockAsync().AnyContext();

                    _lastRun = SystemClock.UtcNow;
                }
            }

            return(JobResult.SuccessWithMessage("Successfully sent summary notifications."));
        }
Ejemplo n.º 27
0
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <EventPost> context)
        {
            var queueEntry = context.QueueEntry;
            var ep         = await _storage.GetEventPostAndSetActiveAsync(queueEntry.Value.FilePath, _logger, context.CancellationToken).AnyContext();

            if (ep == null)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to retrieve post data '{queueEntry.Value.FilePath}'."));
            }

            bool isInternalProject = ep.ProjectId == Settings.Current.InternalProjectId;

            _logger.Info()
            .Message("Processing post: id={0} path={1} project={2} ip={3} v={4} agent={5}", queueEntry.Id, queueEntry.Value.FilePath, ep.ProjectId, ep.IpAddress, ep.ApiVersion, ep.UserAgent)
            .Property("ApiVersion", ep.ApiVersion)
            .Property("IpAddress", ep.IpAddress)
            .Property("Client", ep.UserAgent)
            .Property("Project", ep.ProjectId)
            .Property("@stack", "event-posted")
            .WriteIf(!isInternalProject);

            var project = await _projectRepository.GetByIdAsync(ep.ProjectId, true).AnyContext();

            if (project == null)
            {
                _logger.Error().Project(ep.ProjectId).Message("Unable to process EventPost \"{0}\": Unable to load project: {1}", queueEntry.Value.FilePath, ep.ProjectId).WriteIf(!isInternalProject);
                await CompleteEntryAsync(queueEntry, ep, SystemClock.UtcNow).AnyContext();

                return(JobResult.Success);
            }

            long maxEventPostSize = Settings.Current.MaximumEventPostSize;

            byte[] uncompressedData = ep.Data;
            if (!String.IsNullOrEmpty(ep.ContentEncoding))
            {
                try {
                    // increase the absolute max just due to the content was compressed and might be a batch of events.
                    maxEventPostSize *= 10;
                    uncompressedData  = await uncompressedData.DecompressAsync(ep.ContentEncoding).AnyContext();
                } catch (Exception ex) {
                    await CompleteEntryAsync(queueEntry, ep, SystemClock.UtcNow).AnyContext();

                    return(JobResult.FailedWithMessage($"Unable to decompress post data '{queueEntry.Value.FilePath}' ({ep.Data.Length} bytes compressed): {ex.Message}"));
                }
            }

            if (uncompressedData.Length > maxEventPostSize)
            {
                await CompleteEntryAsync(queueEntry, ep, SystemClock.UtcNow).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to process decompressed post data '{queueEntry.Value.FilePath}' ({ep.Data.Length} bytes compressed, {uncompressedData.Length} bytes): Maximum uncompressed event post size limit ({maxEventPostSize} bytes) reached."));
            }

            var createdUtc = SystemClock.UtcNow;
            var events     = await ParseEventPostAsync(ep, createdUtc, uncompressedData, queueEntry.Id, isInternalProject).AnyContext();

            if (events == null || events.Count == 0)
            {
                await CompleteEntryAsync(queueEntry, ep, createdUtc).AnyContext();

                return(JobResult.Success);
            }

            if (context.CancellationToken.IsCancellationRequested)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();

                return(JobResult.Cancelled);
            }

            bool isSingleEvent = events.Count == 1;

            if (!isSingleEvent)
            {
                // Don't process all the events if it will put the account over its limits.
                int eventsToProcess = await _organizationRepository.GetRemainingEventLimitAsync(project.OrganizationId).AnyContext();

                // Add 1 because we already counted 1 against their limit when we received the event post.
                if (eventsToProcess < Int32.MaxValue)
                {
                    eventsToProcess += 1;
                }

                // Discard any events over there limit.
                events = events.Take(eventsToProcess).ToList();

                // Increment the count if greater than 1, since we already incremented it by 1 in the OverageHandler.
                if (events.Count > 1)
                {
                    await _organizationRepository.IncrementUsageAsync(project.OrganizationId, false, events.Count - 1, applyHourlyLimit : false).AnyContext();
                }
            }

            int errorCount    = 0;
            var eventsToRetry = new List <PersistentEvent>();

            try {
                var results = await _eventPipeline.RunAsync(events, ep).AnyContext();

                _logger.Info()
                .Message(() => $"Ran {results.Count} events through the pipeline: id={queueEntry.Id} project={ep.ProjectId} success={results.Count(r => r.IsProcessed)} error={results.Count(r => r.HasError)}")
                .Property("@value", results.Count)
                .Property("@stack", "event-processed")
                .Property("Project", ep.ProjectId)
                .WriteIf(!isInternalProject);

                foreach (var eventContext in results)
                {
                    if (eventContext.IsCancelled)
                    {
                        continue;
                    }

                    if (!eventContext.HasError)
                    {
                        continue;
                    }

                    _logger.Error().Exception(eventContext.Exception).Project(ep.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, eventContext.ErrorMessage).Write();

                    if (eventContext.Exception is ValidationException)
                    {
                        continue;
                    }

                    errorCount++;
                    if (!isSingleEvent)
                    {
                        // Put this single event back into the queue so we can retry it separately.
                        eventsToRetry.Add(eventContext.Event);
                    }
                }
            } catch (Exception ex) {
                _logger.Error().Exception(ex).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, ex.Message).Project(ep.ProjectId).Write();
                if (ex is ArgumentException || ex is DocumentNotFoundException)
                {
                    await CompleteEntryAsync(queueEntry, ep, createdUtc).AnyContext();

                    return(JobResult.Success);
                }

                errorCount++;
                if (!isSingleEvent)
                {
                    eventsToRetry.AddRange(events);
                }
            }

            foreach (var requeueEvent in eventsToRetry)
            {
                string contentEncoding = null;
                byte[] data            = requeueEvent.GetBytes(_jsonSerializerSettings);
                if (data.Length > 1000)
                {
                    data = await data.CompressAsync().AnyContext();

                    contentEncoding = "gzip";
                }

                // Put this single event back into the queue so we can retry it separately.
                await _queue.EnqueueAsync(new EventPostInfo {
                    ApiVersion      = ep.ApiVersion,
                    CharSet         = ep.CharSet,
                    ContentEncoding = contentEncoding,
                    Data            = data,
                    IpAddress       = ep.IpAddress,
                    MediaType       = ep.MediaType,
                    ProjectId       = ep.ProjectId,
                    UserAgent       = ep.UserAgent
                }, _storage, false, context.CancellationToken).AnyContext();
            }

            if (isSingleEvent && errorCount > 0)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();
            }
            else
            {
                await CompleteEntryAsync(queueEntry, ep, createdUtc).AnyContext();
            }

            return(JobResult.Success);
        }