Esempio n. 1
0
        protected override async Task <HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
        {
            if (!IsEventPost(request))
            {
                return(await base.SendAsync(request, cancellationToken));
            }

            if (Settings.Current.EventSubmissionDisabled)
            {
                return(CreateResponse(request, HttpStatusCode.ServiceUnavailable, "Service Unavailable"));
            }

            bool tooBig = false;

            if (request.Method == HttpMethod.Post && request.Content?.Headers != null)
            {
                long size = request.Content.Headers.ContentLength.GetValueOrDefault();
                await _metricsClient.GaugeAsync(MetricNames.PostsSize, size);

                if (size > Settings.Current.MaximumEventPostSize)
                {
                    _logger.Warn().Message("Event submission discarded for being too large: {0} bytes", size).Value(size).Tag(request.Content.Headers.ContentEncoding?.ToArray()).Project(request.GetDefaultProjectId()).Write();
                    await _metricsClient.CounterAsync(MetricNames.PostsDiscarded);

                    tooBig = true;
                }
            }

            bool overLimit = await _usageService.IncrementUsageAsync(request.GetDefaultOrganizationId(), request.GetDefaultProjectId(), tooBig);

            // block large submissions, client should break them up or remove some of the data.
            if (tooBig)
            {
                return(CreateResponse(request, HttpStatusCode.RequestEntityTooLarge, "Event submission discarded for being too large."));
            }

            if (overLimit)
            {
                await _metricsClient.CounterAsync(MetricNames.PostsBlocked);

                return(CreateResponse(request, HttpStatusCode.PaymentRequired, "Event limit exceeded."));
            }

            return(await base.SendAsync(request, cancellationToken));
        }
Esempio n. 2
0
        public async Task Invoke(HttpContext context)
        {
            if (!IsEventPost(context))
            {
                await _next(context);

                return;
            }

            if (_appOptions.Value.EventSubmissionDisabled)
            {
                context.Response.StatusCode = StatusCodes.Status503ServiceUnavailable;
                return;
            }

            string organizationId   = context.Request.GetDefaultOrganizationId();
            var    organizationTask = _organizationRepository.GetByIdAsync(organizationId, o => o.Cache());

            string projectId   = context.Request.GetDefaultProjectId();
            var    projectTask = _projectRepository.GetByIdAsync(projectId, o => o.Cache());

            bool tooBig = false;

            if (String.Equals(context.Request.Method, "POST", StringComparison.OrdinalIgnoreCase) && context.Request.Headers != null)
            {
                if (context.Request.Headers.ContentLength.HasValue && context.Request.Headers.ContentLength.Value <= 0)
                {
                    //_metricsClient.Counter(MetricNames.PostsBlocked);
                    context.Response.StatusCode = StatusCodes.Status411LengthRequired;
                    await Task.WhenAll(organizationTask, projectTask);

                    return;
                }

                long size = context.Request.Headers.ContentLength.GetValueOrDefault();
                if (size > 0)
                {
                    _metricsClient.Gauge(MetricNames.PostsSize, size);
                }

                if (size > _appOptions.Value.MaximumEventPostSize)
                {
                    if (_logger.IsEnabled(LogLevel.Warning))
                    {
                        using (_logger.BeginScope(new ExceptionlessState().Value(size).Tag(context.Request.Headers.TryGetAndReturn(Headers.ContentEncoding))))
                            _logger.LogWarning("Event submission discarded for being too large: {@value} bytes.", size);
                    }

                    _metricsClient.Counter(MetricNames.PostsDiscarded);
                    tooBig = true;
                }
            }

            var  organization = await organizationTask;
            var  project      = await projectTask;
            bool overLimit    = await _usageService.IncrementUsageAsync(organization, project, tooBig);

            // block large submissions, client should break them up or remove some of the data.
            if (tooBig)
            {
                context.Response.StatusCode = StatusCodes.Status413RequestEntityTooLarge;
                return;
            }

            if (overLimit)
            {
                _metricsClient.Counter(MetricNames.PostsBlocked);
                context.Response.StatusCode = StatusCodes.Status402PaymentRequired;
                return;
            }

            context.Request.SetOrganization(organization);
            context.Request.SetProject(project);
            await _next(context);
        }
Esempio n. 3
0
    public async Task CanIncrementUsageAsync()
    {
        var messageBus = GetService <IMessageBus>();

        var countdown = new AsyncCountdownEvent(2);
        await messageBus.SubscribeAsync <PlanOverage>(po => {
            _logger.LogInformation("Plan Overage for {organization} (Hourly: {IsHourly})", po.OrganizationId, po.IsHourly);
            countdown.Signal();
        });

        var organization = await _organizationRepository.AddAsync(new Organization { Name = "Test", MaxEventsPerMonth = 750, PlanId = _plans.SmallPlan.Id }, o => o.ImmediateConsistency());

        var project = await _projectRepository.AddAsync(new Project { Name = "Test", OrganizationId = organization.Id, NextSummaryEndOfDayTicks = SystemClock.UtcNow.Ticks }, o => o.ImmediateConsistency());

        Assert.InRange(organization.GetHourlyEventLimit(_plans), 1, 750);

        int totalToIncrement = organization.GetHourlyEventLimit(_plans) - 1;

        Assert.False(await _usageService.IncrementUsageAsync(organization, project, totalToIncrement));
        organization = await _organizationRepository.GetByIdAsync(organization.Id);

        await countdown.WaitAsync(TimeSpan.FromMilliseconds(150));

        Assert.Equal(2, countdown.CurrentCount);
        var organizationUsage = await _usageService.GetUsageAsync(organization);

        var projectUsage = await _usageService.GetUsageAsync(organization, project);

        Assert.Equal(totalToIncrement, organizationUsage.HourlyTotal);
        Assert.Equal(totalToIncrement, projectUsage.HourlyTotal);
        Assert.Equal(totalToIncrement, organizationUsage.MonthlyTotal);
        Assert.Equal(totalToIncrement, projectUsage.MonthlyTotal);
        Assert.Equal(0, organizationUsage.HourlyBlocked);
        Assert.Equal(0, projectUsage.HourlyBlocked);
        Assert.Equal(0, organizationUsage.MonthlyBlocked);
        Assert.Equal(0, projectUsage.MonthlyBlocked);

        Assert.True(await _usageService.IncrementUsageAsync(organization, project, 2));
        await countdown.WaitAsync(TimeSpan.FromMilliseconds(150));

        Assert.Equal(1, countdown.CurrentCount);

        organizationUsage = await _usageService.GetUsageAsync(organization);

        projectUsage = await _usageService.GetUsageAsync(organization, project);

        Assert.Equal(totalToIncrement + 2, organizationUsage.HourlyTotal);
        Assert.Equal(totalToIncrement + 2, projectUsage.HourlyTotal);
        Assert.Equal(totalToIncrement + 2, organizationUsage.MonthlyTotal);
        Assert.Equal(totalToIncrement + 2, projectUsage.MonthlyTotal);
        Assert.Equal(1, organizationUsage.HourlyBlocked);
        Assert.Equal(1, projectUsage.HourlyBlocked);
        Assert.Equal(1, organizationUsage.MonthlyBlocked);
        Assert.Equal(1, projectUsage.MonthlyBlocked);

        organization = await _organizationRepository.AddAsync(new Organization { Name = "Test", MaxEventsPerMonth = 750, PlanId = _plans.SmallPlan.Id }, o => o.ImmediateConsistency());

        project = await _projectRepository.AddAsync(new Project { Name = "Test", OrganizationId = organization.Id, NextSummaryEndOfDayTicks = SystemClock.UtcNow.Ticks }, o => o.ImmediateConsistency());

        await _cache.RemoveAllAsync();

        totalToIncrement = organization.GetHourlyEventLimit(_plans) + 20;
        Assert.True(await _usageService.IncrementUsageAsync(organization, project, totalToIncrement));

        await countdown.WaitAsync(TimeSpan.FromMilliseconds(150));

        Assert.Equal(0, countdown.CurrentCount);

        organizationUsage = await _usageService.GetUsageAsync(organization);

        projectUsage = await _usageService.GetUsageAsync(organization, project);

        Assert.Equal(totalToIncrement, organizationUsage.HourlyTotal);
        Assert.Equal(totalToIncrement, projectUsage.HourlyTotal);
        Assert.Equal(totalToIncrement, organizationUsage.MonthlyTotal);
        Assert.Equal(totalToIncrement, projectUsage.MonthlyTotal);
        Assert.Equal(20, organizationUsage.HourlyBlocked);
        Assert.Equal(20, projectUsage.HourlyBlocked);
        Assert.Equal(20, organizationUsage.MonthlyBlocked);
        Assert.Equal(20, projectUsage.MonthlyBlocked);
    }
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <EventPost> context)
        {
            var    entry            = context.QueueEntry;
            var    ep               = entry.Value;
            string payloadPath      = Path.ChangeExtension(entry.Value.FilePath, ".payload");
            var    payloadTask      = _metrics.TimeAsync(() => _eventPostService.GetEventPostPayloadAsync(payloadPath, context.CancellationToken), MetricNames.PostsMarkFileActiveTime);
            var    projectTask      = _projectRepository.GetByIdAsync(ep.ProjectId, o => o.Cache());
            var    organizationTask = _organizationRepository.GetByIdAsync(ep.OrganizationId, o => o.Cache());

            var payload = await payloadTask.AnyContext();

            if (payload == null)
            {
                await Task.WhenAll(AbandonEntryAsync(entry), projectTask, organizationTask).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to retrieve payload '{payloadPath}'."));
            }

            _metrics.Gauge(MetricNames.PostsMessageSize, payload.LongLength);
            if (payload.LongLength > _maximumEventPostFileSize)
            {
                await Task.WhenAll(_metrics.TimeAsync(() => entry.CompleteAsync(), MetricNames.PostsCompleteTime), projectTask, organizationTask).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to process payload '{payloadPath}' ({payload.LongLength} bytes): Maximum event post size limit ({Settings.Current.MaximumEventPostSize} bytes) reached."));
            }

            using (_logger.BeginScope(new ExceptionlessState().Organization(ep.OrganizationId).Project(ep.ProjectId))) {
                _metrics.Gauge(MetricNames.PostsCompressedSize, payload.Length);

                bool isDebugLogLevelEnabled = _logger.IsEnabled(LogLevel.Debug);
                bool isInternalProject      = ep.ProjectId == Settings.Current.InternalProjectId;
                if (!isInternalProject && _logger.IsEnabled(LogLevel.Information))
                {
                    using (_logger.BeginScope(new ExceptionlessState().Tag("processing").Tag("compressed").Tag(ep.ContentEncoding).Value(payload.Length)))
                        _logger.LogInformation("Processing post: id={QueueEntryId} path={FilePath} project={project} ip={IpAddress} v={ApiVersion} agent={UserAgent}", entry.Id, payloadPath, ep.ProjectId, ep.IpAddress, ep.ApiVersion, ep.UserAgent);
                }

                var project = await projectTask.AnyContext();

                if (project == null)
                {
                    if (!isInternalProject)
                    {
                        _logger.LogError("Unable to process EventPost {FilePath}: Unable to load project: {Project}", payloadPath, ep.ProjectId);
                    }
                    await Task.WhenAll(CompleteEntryAsync(entry, ep, SystemClock.UtcNow), organizationTask).AnyContext();

                    return(JobResult.Success);
                }

                long maxEventPostSize = Settings.Current.MaximumEventPostSize;
                var  uncompressedData = payload;
                if (!String.IsNullOrEmpty(ep.ContentEncoding))
                {
                    if (!isInternalProject && isDebugLogLevelEnabled)
                    {
                        using (_logger.BeginScope(new ExceptionlessState().Tag("decompressing").Tag(ep.ContentEncoding)))
                            _logger.LogDebug("Decompressing EventPost: {QueueEntryId} ({CompressedBytes} bytes)", entry.Id, payload.Length);
                    }

                    maxEventPostSize = _maximumUncompressedEventPostSize;
                    try {
                        _metrics.Time(() => {
                            uncompressedData = uncompressedData.Decompress(ep.ContentEncoding);
                        }, MetricNames.PostsDecompressionTime);
                    } catch (Exception ex) {
                        _metrics.Counter(MetricNames.PostsDecompressionErrors);
                        await Task.WhenAll(CompleteEntryAsync(entry, ep, SystemClock.UtcNow), organizationTask).AnyContext();

                        return(JobResult.FailedWithMessage($"Unable to decompress EventPost data '{payloadPath}' ({payload.Length} bytes compressed): {ex.Message}"));
                    }
                }

                _metrics.Gauge(MetricNames.PostsUncompressedSize, payload.LongLength);
                if (uncompressedData.Length > maxEventPostSize)
                {
                    await Task.WhenAll(CompleteEntryAsync(entry, ep, SystemClock.UtcNow), organizationTask).AnyContext();

                    return(JobResult.FailedWithMessage($"Unable to process decompressed EventPost data '{payloadPath}' ({payload.Length} bytes compressed, {uncompressedData.Length} bytes): Maximum uncompressed event post size limit ({maxEventPostSize} bytes) reached."));
                }

                if (!isInternalProject && isDebugLogLevelEnabled)
                {
                    using (_logger.BeginScope(new ExceptionlessState().Tag("uncompressed").Value(uncompressedData.Length)))
                        _logger.LogDebug("Processing uncompressed EventPost: {QueueEntryId}  ({UncompressedBytes} bytes)", entry.Id, uncompressedData.Length);
                }

                var createdUtc = SystemClock.UtcNow;
                var events     = ParseEventPost(ep, payload, createdUtc, uncompressedData, entry.Id, isInternalProject);
                if (events == null || events.Count == 0)
                {
                    await Task.WhenAll(CompleteEntryAsync(entry, ep, createdUtc), organizationTask).AnyContext();

                    return(JobResult.Success);
                }

                if (context.CancellationToken.IsCancellationRequested)
                {
                    await Task.WhenAll(AbandonEntryAsync(entry), organizationTask).AnyContext();

                    return(JobResult.Cancelled);
                }

                var organization = await organizationTask.AnyContext();

                if (organization == null)
                {
                    if (!isInternalProject)
                    {
                        _logger.LogError("Unable to process EventPost {FilePath}: Unable to load organization: {OrganizationId}", payloadPath, project.OrganizationId);
                    }

                    await CompleteEntryAsync(entry, ep, SystemClock.UtcNow).AnyContext();

                    return(JobResult.Success);
                }

                bool isSingleEvent = events.Count == 1;
                if (!isSingleEvent)
                {
                    await _metrics.TimeAsync(async() => {
                        // Don't process all the events if it will put the account over its limits.
                        int eventsToProcess = await _usageService.GetRemainingEventLimitAsync(organization).AnyContext();

                        // Add 1 because we already counted 1 against their limit when we received the event post.
                        if (eventsToProcess < Int32.MaxValue)
                        {
                            eventsToProcess += 1;
                        }

                        // Discard any events over there limit.
                        events = events.Take(eventsToProcess).ToList();

                        // Increment the count if greater than 1, since we already incremented it by 1 in the OverageHandler.
                        if (events.Count > 1)
                        {
                            await _usageService.IncrementUsageAsync(organization, project, false, events.Count - 1, applyHourlyLimit: false).AnyContext();
                        }
                    }, MetricNames.PostsUpdateEventLimitTime).AnyContext();
                }

                int errorCount    = 0;
                var eventsToRetry = new List <PersistentEvent>();
                try {
                    var contexts = await _eventPipeline.RunAsync(events, organization, project, ep).AnyContext();

                    if (!isInternalProject && isDebugLogLevelEnabled)
                    {
                        using (_logger.BeginScope(new ExceptionlessState().Value(contexts.Count)))
                            _logger.LogDebug("Ran {@value} events through the pipeline: id={QueueEntryId} success={SuccessCount} error={ErrorCount}", contexts.Count, entry.Id, contexts.Count(r => r.IsProcessed), contexts.Count(r => r.HasError));
                    }

                    foreach (var ctx in contexts)
                    {
                        if (ctx.IsCancelled)
                        {
                            continue;
                        }

                        if (!ctx.HasError)
                        {
                            continue;
                        }

                        if (!isInternalProject)
                        {
                            _logger.LogError(ctx.Exception, "Error processing EventPost {QueueEntryId} {FilePath}: {Message}", entry.Id, payloadPath, ctx.ErrorMessage);
                        }
                        if (ctx.Exception is ValidationException)
                        {
                            continue;
                        }

                        errorCount++;
                        if (!isSingleEvent)
                        {
                            // Put this single event back into the queue so we can retry it separately.
                            eventsToRetry.Add(ctx.Event);
                        }
                    }
                } catch (Exception ex) {
                    if (!isInternalProject)
                    {
                        _logger.LogError(ex, "Error processing EventPost {QueueEntryId} {FilePath}: {Message}", entry.Id, payloadPath, ex.Message);
                    }
                    if (ex is ArgumentException || ex is DocumentNotFoundException)
                    {
                        await CompleteEntryAsync(entry, ep, createdUtc).AnyContext();

                        return(JobResult.Success);
                    }

                    errorCount++;
                    if (!isSingleEvent)
                    {
                        eventsToRetry.AddRange(events);
                    }
                }

                if (eventsToRetry.Count > 0)
                {
                    await _metrics.TimeAsync(() => RetryEventsAsync(eventsToRetry, ep, entry, project, isInternalProject), MetricNames.PostsRetryTime).AnyContext();
                }

                if (isSingleEvent && errorCount > 0)
                {
                    await AbandonEntryAsync(entry).AnyContext();
                }
                else
                {
                    await CompleteEntryAsync(entry, ep, createdUtc).AnyContext();
                }

                return(JobResult.Success);
            }
        }
        public async Task CanIncrementUsageAsync()
        {
            var messageBus = GetService <IMessageBus>();

            var countdown = new AsyncCountdownEvent(2);
            await messageBus.SubscribeAsync <PlanOverage>(po => {
                _logger.Info($"Plan Overage for {po.OrganizationId} (Hourly: {po.IsHourly})");
                countdown.Signal();
            });

            var o = await _organizationRepository.AddAsync(new Organization { Name = "Test", MaxEventsPerMonth = 750, PlanId = BillingManager.SmallPlan.Id });

            var project = await _projectRepository.AddAsync(new Project { Name = "Test", OrganizationId = o.Id, NextSummaryEndOfDayTicks = SystemClock.UtcNow.Ticks }, opt => opt.Cache());

            await _configuration.Client.RefreshAsync(Indices.All);

            Assert.InRange(o.GetHourlyEventLimit(), 1, 750);

            int totalToIncrement = o.GetHourlyEventLimit() - 1;

            Assert.False(await _usageService.IncrementUsageAsync(o.Id, project.Id, false, totalToIncrement));
            await _configuration.Client.RefreshAsync(Indices.All);

            o = await _organizationRepository.GetByIdAsync(o.Id);

            await countdown.WaitAsync(TimeSpan.FromMilliseconds(150));

            Assert.Equal(2, countdown.CurrentCount);
            Assert.Equal(totalToIncrement, await _cache.GetAsync <long>(GetHourlyTotalCacheKey(o.Id), 0));
            Assert.Equal(totalToIncrement, await _cache.GetAsync <long>(GetHourlyTotalCacheKey(o.Id, project.Id), 0));
            Assert.Equal(totalToIncrement, await _cache.GetAsync <long>(GetMonthlyTotalCacheKey(o.Id), 0));
            Assert.Equal(totalToIncrement, await _cache.GetAsync <long>(GetMonthlyTotalCacheKey(o.Id, project.Id), 0));
            Assert.Equal(0, await _cache.GetAsync <long>(GetHourlyBlockedCacheKey(o.Id), 0));
            Assert.Equal(0, await _cache.GetAsync <long>(GetHourlyBlockedCacheKey(o.Id, project.Id), 0));
            Assert.Equal(0, await _cache.GetAsync <long>(GetMonthlyBlockedCacheKey(o.Id), 0));
            Assert.Equal(0, await _cache.GetAsync <long>(GetMonthlyBlockedCacheKey(o.Id, project.Id), 0));

            Assert.True(await _usageService.IncrementUsageAsync(o.Id, project.Id, false, 2));
            await _configuration.Client.RefreshAsync(Indices.All);

            o = await _organizationRepository.GetByIdAsync(o.Id);

            await countdown.WaitAsync(TimeSpan.FromMilliseconds(150));

            Assert.Equal(1, countdown.CurrentCount);
            Assert.Equal(totalToIncrement + 2, await _cache.GetAsync <long>(GetHourlyTotalCacheKey(o.Id), 0));
            Assert.Equal(totalToIncrement + 2, await _cache.GetAsync <long>(GetHourlyTotalCacheKey(o.Id, project.Id), 0));
            Assert.Equal(totalToIncrement + 2, await _cache.GetAsync <long>(GetMonthlyTotalCacheKey(o.Id), 0));
            Assert.Equal(totalToIncrement + 2, await _cache.GetAsync <long>(GetMonthlyTotalCacheKey(o.Id, project.Id), 0));
            Assert.Equal(1, await _cache.GetAsync <long>(GetHourlyBlockedCacheKey(o.Id), 0));
            Assert.Equal(1, await _cache.GetAsync <long>(GetHourlyBlockedCacheKey(o.Id, project.Id), 0));
            Assert.Equal(1, await _cache.GetAsync <long>(GetMonthlyBlockedCacheKey(o.Id), 0));
            Assert.Equal(1, await _cache.GetAsync <long>(GetMonthlyBlockedCacheKey(o.Id, project.Id), 0));

            o = await _organizationRepository.AddAsync(new Organization { Name = "Test", MaxEventsPerMonth = 750, PlanId = BillingManager.SmallPlan.Id });

            project = await _projectRepository.AddAsync(new Project { Name = "Test", OrganizationId = o.Id, NextSummaryEndOfDayTicks = SystemClock.UtcNow.Ticks }, opt => opt.Cache());

            await _configuration.Client.RefreshAsync(Indices.All);

            await _cache.RemoveAllAsync();

            totalToIncrement = o.GetHourlyEventLimit() + 20;
            Assert.True(await _usageService.IncrementUsageAsync(o.Id, project.Id, false, totalToIncrement));

            await countdown.WaitAsync(TimeSpan.FromMilliseconds(150));

            Assert.Equal(0, countdown.CurrentCount);
            Assert.Equal(totalToIncrement, await _cache.GetAsync <long>(GetHourlyTotalCacheKey(o.Id), 0));
            Assert.Equal(totalToIncrement, await _cache.GetAsync <long>(GetHourlyTotalCacheKey(o.Id, project.Id), 0));
            Assert.Equal(totalToIncrement, await _cache.GetAsync <long>(GetMonthlyTotalCacheKey(o.Id), 0));
            Assert.Equal(totalToIncrement, await _cache.GetAsync <long>(GetMonthlyTotalCacheKey(o.Id, project.Id), 0));
            Assert.Equal(20, await _cache.GetAsync <long>(GetHourlyBlockedCacheKey(o.Id), 0));
            Assert.Equal(20, await _cache.GetAsync <long>(GetHourlyBlockedCacheKey(o.Id, project.Id), 0));
            Assert.Equal(20, await _cache.GetAsync <long>(GetMonthlyBlockedCacheKey(o.Id), 0));
            Assert.Equal(20, await _cache.GetAsync <long>(GetMonthlyBlockedCacheKey(o.Id, project.Id), 0));
        }
Esempio n. 6
0
        protected override async Task <JobResult> ProcessQueueEntryAsync(QueueEntryContext <EventPost> context)
        {
            var      queueEntry = context.QueueEntry;
            FileSpec fileInfo   = null;
            await _metricsClient.TimeAsync(async() => fileInfo = await _storage.GetFileInfoAsync(queueEntry.Value.FilePath).AnyContext(), MetricNames.PostsFileInfoTime).AnyContext();

            if (fileInfo == null)
            {
                await _metricsClient.TimeAsync(() => queueEntry.AbandonAsync(), MetricNames.PostsAbandonTime).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to retrieve post data info '{queueEntry.Value.FilePath}'."));
            }

            await _metricsClient.GaugeAsync(MetricNames.PostsMessageSize, fileInfo.Size).AnyContext();

            if (fileInfo.Size > GetMaximumEventPostFileSize())
            {
                await _metricsClient.TimeAsync(() => queueEntry.CompleteAsync(), MetricNames.PostsCompleteTime).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to process post data '{queueEntry.Value.FilePath}' ({fileInfo.Size} bytes): Maximum event post size limit ({Settings.Current.MaximumEventPostSize} bytes) reached."));
            }

            EventPostInfo ep = null;
            await _metricsClient.TimeAsync(async() => ep = await _storage.GetEventPostAsync(queueEntry.Value.FilePath, _logger, context.CancellationToken).AnyContext(), MetricNames.PostsMarkFileActiveTime).AnyContext();

            if (ep == null)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to retrieve post data '{queueEntry.Value.FilePath}'."));
            }

            await _metricsClient.GaugeAsync(MetricNames.PostsCompressedSize, ep.Data.Length).AnyContext();

            bool isInternalProject = ep.ProjectId == Settings.Current.InternalProjectId;

            _logger.Info()
            .Message("Processing post: id={0} path={1} project={2} ip={3} v={4} agent={5}", queueEntry.Id, queueEntry.Value.FilePath, ep.ProjectId, ep.IpAddress, ep.ApiVersion, ep.UserAgent)
            .Property("Id", queueEntry.Id)
            .Property("ApiVersion", ep.ApiVersion)
            .Property("IpAddress", ep.IpAddress)
            .Property("Client", ep.UserAgent)
            .Tag("processing", "compressed", ep.ContentEncoding)
            .Value(ep.Data.Length)
            .Project(ep.ProjectId)
            .WriteIf(!isInternalProject);

            var project = await _projectRepository.GetByIdAsync(ep.ProjectId, o => o.Cache()).AnyContext();

            if (project == null)
            {
                _logger.Error().Message("Unable to process EventPost \"{0}\": Unable to load project: {1}", queueEntry.Value.FilePath, ep.ProjectId).Property("Id", queueEntry.Id).Project(ep.ProjectId).WriteIf(!isInternalProject);
                await CompleteEntryAsync(queueEntry, ep, SystemClock.UtcNow).AnyContext();

                return(JobResult.Success);
            }

            long maxEventPostSize = Settings.Current.MaximumEventPostSize;

            byte[] uncompressedData = ep.Data;
            if (!String.IsNullOrEmpty(ep.ContentEncoding))
            {
                _logger.Debug().Message("Decompressing EventPost: {0} ({1} bytes)", queueEntry.Id, ep.Data.Length).Property("Id", queueEntry.Id).Tag("decompressing", ep.ContentEncoding).Project(ep.ProjectId).WriteIf(!isInternalProject);
                maxEventPostSize = GetMaximumUncompressedEventPostSize();
                try {
                    await _metricsClient.TimeAsync(async() => {
                        uncompressedData = await uncompressedData.DecompressAsync(ep.ContentEncoding).AnyContext();
                    }, MetricNames.PostsDecompressionTime).AnyContext();
                } catch (Exception ex) {
                    await _metricsClient.CounterAsync(MetricNames.PostsDecompressionErrors).AnyContext();
                    await CompleteEntryAsync(queueEntry, ep, SystemClock.UtcNow).AnyContext();

                    return(JobResult.FailedWithMessage($"Unable to decompress EventPost data '{queueEntry.Value.FilePath}' ({ep.Data.Length} bytes compressed): {ex.Message}"));
                }
            }

            await _metricsClient.GaugeAsync(MetricNames.PostsUncompressedSize, fileInfo.Size).AnyContext();

            if (uncompressedData.Length > maxEventPostSize)
            {
                await CompleteEntryAsync(queueEntry, ep, SystemClock.UtcNow).AnyContext();

                return(JobResult.FailedWithMessage($"Unable to process decompressed EventPost data '{queueEntry.Value.FilePath}' ({ep.Data.Length} bytes compressed, {uncompressedData.Length} bytes): Maximum uncompressed event post size limit ({maxEventPostSize} bytes) reached."));
            }

            _logger.Debug().Message("Processing uncompressed EventPost: {0}  ({1} bytes)", queueEntry.Id, uncompressedData.Length).Property("Id", queueEntry.Id).Tag("uncompressed").Value(uncompressedData.Length).Project(ep.ProjectId).WriteIf(!isInternalProject);
            var createdUtc = SystemClock.UtcNow;
            var events     = await ParseEventPostAsync(ep, createdUtc, uncompressedData, queueEntry.Id, isInternalProject).AnyContext();

            if (events == null || events.Count == 0)
            {
                await CompleteEntryAsync(queueEntry, ep, createdUtc).AnyContext();

                return(JobResult.Success);
            }

            if (context.CancellationToken.IsCancellationRequested)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();

                return(JobResult.Cancelled);
            }

            bool isSingleEvent = events.Count == 1;

            if (!isSingleEvent)
            {
                await _metricsClient.TimeAsync(async() => {
                    // Don't process all the events if it will put the account over its limits.
                    int eventsToProcess = await _usageService.GetRemainingEventLimitAsync(project.OrganizationId).AnyContext();

                    // Add 1 because we already counted 1 against their limit when we received the event post.
                    if (eventsToProcess < Int32.MaxValue)
                    {
                        eventsToProcess += 1;
                    }

                    // Discard any events over there limit.
                    events = events.Take(eventsToProcess).ToList();

                    // Increment the count if greater than 1, since we already incremented it by 1 in the OverageHandler.
                    if (events.Count > 1)
                    {
                        await _usageService.IncrementUsageAsync(project.OrganizationId, project.Id, false, events.Count - 1, applyHourlyLimit: false).AnyContext();
                    }
                }, MetricNames.PostsUpdateEventLimitTime).AnyContext();
            }

            int errorCount    = 0;
            var eventsToRetry = new List <PersistentEvent>();

            try {
                var contexts = await _eventPipeline.RunAsync(events, ep).AnyContext();

                _logger.Debug().Message(() => $"Ran {contexts.Count} events through the pipeline: id={queueEntry.Id} success={contexts.Count(r => r.IsProcessed)} error={contexts.Count(r => r.HasError)}").Property("Id", queueEntry.Id).Value(contexts.Count).Project(ep.ProjectId).WriteIf(!isInternalProject);
                foreach (var ctx in contexts)
                {
                    if (ctx.IsCancelled)
                    {
                        continue;
                    }

                    if (!ctx.HasError)
                    {
                        continue;
                    }

                    _logger.Error().Exception(ctx.Exception).Message("Error processing EventPost \"{0}\": {1}", queueEntry.Value.FilePath, ctx.ErrorMessage).Property("Id", queueEntry.Id).Project(ep.ProjectId).WriteIf(!isInternalProject);
                    if (ctx.Exception is ValidationException)
                    {
                        continue;
                    }

                    errorCount++;
                    if (!isSingleEvent)
                    {
                        // Put this single event back into the queue so we can retry it separately.
                        eventsToRetry.Add(ctx.Event);
                    }
                }
            } catch (Exception ex) {
                _logger.Error().Exception(ex).Message("Error processing EventPost \"{0}\": {1}", queueEntry.Value.FilePath, ex.Message).Property("Id", queueEntry.Id).Project(ep.ProjectId).WriteIf(!isInternalProject);
                if (ex is ArgumentException || ex is DocumentNotFoundException)
                {
                    await CompleteEntryAsync(queueEntry, ep, createdUtc).AnyContext();

                    return(JobResult.Success);
                }

                errorCount++;
                if (!isSingleEvent)
                {
                    eventsToRetry.AddRange(events);
                }
            }

            if (eventsToRetry.Count > 0)
            {
                await _metricsClient.TimeAsync(() => RetryEvents(eventsToRetry, ep, queueEntry), MetricNames.PostsRetryTime).AnyContext();
            }

            if (isSingleEvent && errorCount > 0)
            {
                await AbandonEntryAsync(queueEntry).AnyContext();
            }
            else
            {
                await CompleteEntryAsync(queueEntry, ep, createdUtc).AnyContext();
            }

            return(JobResult.Success);
        }