Ejemplo n.º 1
0
        public virtual async Task CanConcurrentlyManageFiles()
        {
            await ResetAsync();

            IFileStorage storage = GetStorage();

            if (storage == null)
            {
                return;
            }

            using (storage) {
                const string queueFolder = "q";
                var          queueItems  = new BlockingCollection <int>();

                var info = await storage.GetFileInfoAsync("nope");

                Assert.Null(info);

                await Run.InParallel(10, async i => {
                    var ev = new PostInfo {
                        ApiVersion      = 2,
                        CharSet         = "utf8",
                        ContentEncoding = "application/json",
                        Data            = Encoding.UTF8.GetBytes("{}"),
                        IpAddress       = "127.0.0.1",
                        MediaType       = "gzip",
                        ProjectId       = i.ToString(),
                        UserAgent       = "test"
                    };

                    await storage.SaveObjectAsync(Path.Combine(queueFolder, i + ".json"), ev);
                    queueItems.Add(i);
                });

                Assert.Equal(10, (await storage.GetFileListAsync()).Count());

                await Run.InParallel(10, async i => {
                    string path   = Path.Combine(queueFolder, queueItems.Random() + ".json");
                    var eventPost = await storage.GetEventPostAndSetActiveAsync(Path.Combine(queueFolder, RandomData.GetInt(0, 25) + ".json"));
                    if (eventPost == null)
                    {
                        return;
                    }

                    if (RandomData.GetBool())
                    {
                        await storage.CompleteEventPost(path, eventPost.ProjectId, DateTime.UtcNow, true);
                    }
                    else
                    {
                        await storage.SetNotActiveAsync(path);
                    }
                });
            }
        }
Ejemplo n.º 2
0
        public void CanConcurrentlyManageFiles()
        {
            Reset();

            IFileStorage storage = GetStorage();

            if (storage == null)
            {
                return;
            }

            const string queueFolder   = "q";
            const string archiveFolder = "archive";
            var          queueItems    = new BlockingCollection <int>();

            Parallel.For(0, 25, i => {
                var ev = new EventPostInfo {
                    ApiVersion      = 2,
                    CharSet         = "utf8",
                    ContentEncoding = "application/json",
                    Data            = Encoding.UTF8.GetBytes("{}"),
                    IpAddress       = "127.0.0.1",
                    MediaType       = "gzip",
                    ProjectId       = i.ToString(),
                    UserAgent       = "test"
                };
                storage.SaveObject(Path.Combine(queueFolder, i + ".json"), ev);
                queueItems.Add(i);
            });
            Assert.Equal(25, storage.GetFileList().Count());

            Parallel.For(0, 50, i => {
                string path   = Path.Combine(queueFolder, queueItems.Random() + ".json");
                var eventPost = storage.GetEventPostAndSetActive(Path.Combine(queueFolder, RandomData.GetInt(0, 25) + ".json"));
                if (eventPost == null)
                {
                    return;
                }

                if (RandomData.GetBool())
                {
                    storage.CompleteEventPost(path, eventPost.ProjectId, DateTime.UtcNow, true);
                }
                else
                {
                    storage.SetNotActive(path);
                }
            });
        }
Ejemplo n.º 3
0
        protected async override Task <JobResult> RunInternalAsync(CancellationToken token)
        {
            Log.Info().Message("Process events job starting").Write();

            QueueEntry <EventPostFileInfo> queueEntry = null;

            try {
                queueEntry = _queue.Dequeue(TimeSpan.FromSeconds(1));
            } catch (Exception ex) {
                if (!(ex is TimeoutException))
                {
                    Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next EventPost: {0}", ex.Message).Write();
                    return(JobResult.FromException(ex));
                }
            }
            if (queueEntry == null)
            {
                return(JobResult.Success);
            }

            EventPost eventPost = _storage.GetEventPostAndSetActive(queueEntry.Value.FilePath);

            if (eventPost == null)
            {
                queueEntry.Abandon();
                _storage.SetNotActive(queueEntry.Value.FilePath);
                return(JobResult.FailedWithMessage(String.Format("Unable to retrieve post data '{0}'.", queueEntry.Value.FilePath)));
            }

            _statsClient.Counter(StatNames.PostsDequeued);
            Log.Info().Message("Processing EventPost '{0}'.", queueEntry.Id).Write();

            List <PersistentEvent> events = null;

            try {
                _statsClient.Time(() => {
                    events = ParseEventPost(eventPost);
                }, StatNames.PostsParsingTime);
                _statsClient.Counter(StatNames.PostsParsed);
                _statsClient.Gauge(StatNames.PostsBatchSize, events.Count);
            } catch (Exception ex) {
                _statsClient.Counter(StatNames.PostsParseErrors);
                queueEntry.Abandon();
                _storage.SetNotActive(queueEntry.Value.FilePath);

                // TODO: Add the EventPost to the logged exception.
                Log.Error().Exception(ex).Message("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message).Write();
                return(JobResult.FromException(ex, String.Format("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message)));
            }

            if (events == null)
            {
                queueEntry.Abandon();
                _storage.SetNotActive(queueEntry.Value.FilePath);
                return(JobResult.Success);
            }

            int  eventsToProcess = events.Count;
            bool isSingleEvent   = events.Count == 1;

            if (!isSingleEvent)
            {
                var project = _projectRepository.GetById(eventPost.ProjectId, true);
                // Don't process all the events if it will put the account over its limits.
                eventsToProcess = _organizationRepository.GetRemainingEventLimit(project.OrganizationId);

                // Add 1 because we already counted 1 against their limit when we received the event post.
                if (eventsToProcess < Int32.MaxValue)
                {
                    eventsToProcess += 1;
                }

                // Increment by count - 1 since we already incremented it by 1 in the OverageHandler.
                _organizationRepository.IncrementUsage(project.OrganizationId, events.Count - 1);
            }
            int      errorCount = 0;
            DateTime created    = DateTime.UtcNow;

            foreach (PersistentEvent ev in events.Take(eventsToProcess))
            {
                try {
                    ev.CreatedUtc = created;
                    _eventPipeline.Run(ev);
                } catch (ValidationException ex) {
                    Log.Error().Exception(ex).Project(eventPost.ProjectId).Message("Event validation error occurred: {0}", ex.Message).Write();
                } catch (Exception ex) {
                    Log.Error().Exception(ex).Project(eventPost.ProjectId).Message("Error while processing event: {0}", ex.Message).Write();

                    if (!isSingleEvent)
                    {
                        // Put this single event back into the queue so we can retry it separately.
                        _queue.Enqueue(new EventPost {
                            ApiVersion      = eventPost.ApiVersion,
                            CharSet         = eventPost.CharSet,
                            ContentEncoding = "application/json",
                            Data            = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(ev)),
                            IpAddress       = eventPost.IpAddress,
                            MediaType       = eventPost.MediaType,
                            ProjectId       = eventPost.ProjectId,
                            UserAgent       = eventPost.UserAgent
                        }, _storage, false);
                    }

                    errorCount++;
                }
            }

            if (isSingleEvent && errorCount > 0)
            {
                queueEntry.Abandon();
                _storage.SetNotActive(queueEntry.Value.FilePath);
            }
            else
            {
                queueEntry.Complete();
                if (queueEntry.Value.ShouldArchive)
                {
                    _storage.CompleteEventPost(queueEntry.Value.FilePath, eventPost.ProjectId, created, queueEntry.Value.ShouldArchive);
                }
                else
                {
                    _storage.DeleteFile(queueEntry.Value.FilePath);
                    _storage.SetNotActive(queueEntry.Value.FilePath);
                }
            }

            return(JobResult.Success);
        }
Ejemplo n.º 4
0
        protected async override Task <JobResult> RunInternalAsync(CancellationToken token)
        {
            QueueEntry <EventPost> queueEntry = null;

            try {
                queueEntry = _queue.Dequeue(TimeSpan.FromSeconds(1));
            } catch (Exception ex) {
                if (!(ex is TimeoutException))
                {
                    Log.Error().Exception(ex).Message("An error occurred while trying to dequeue the next EventPost: {0}", ex.Message).Write();
                    return(JobResult.FromException(ex));
                }
            }

            if (queueEntry == null)
            {
                return(JobResult.Success);
            }

            if (token.IsCancellationRequested)
            {
                queueEntry.Abandon();
                return(JobResult.Cancelled);
            }

            EventPostInfo eventPostInfo = _storage.GetEventPostAndSetActive(queueEntry.Value.FilePath);

            if (eventPostInfo == null)
            {
                queueEntry.Abandon();
                _storage.SetNotActive(queueEntry.Value.FilePath);
                return(JobResult.FailedWithMessage(String.Format("Unable to retrieve post data '{0}'.", queueEntry.Value.FilePath)));
            }

            bool isInternalProject = eventPostInfo.ProjectId == Settings.Current.InternalProjectId;

            _statsClient.Counter(MetricNames.PostsDequeued);
            Log.Info().Message("Processing post: id={0} path={1} project={2} ip={3} v={4} agent={5}", queueEntry.Id, queueEntry.Value.FilePath, eventPostInfo.ProjectId, eventPostInfo.IpAddress, eventPostInfo.ApiVersion, eventPostInfo.UserAgent).WriteIf(!isInternalProject);

            List <PersistentEvent> events = null;

            try {
                _statsClient.Time(() => {
                    events = ParseEventPost(eventPostInfo);
                    Log.Info().Message("Parsed {0} events for post: id={1}", events.Count, queueEntry.Id).WriteIf(!isInternalProject);
                }, MetricNames.PostsParsingTime);
                _statsClient.Counter(MetricNames.PostsParsed);
                _statsClient.Gauge(MetricNames.PostsEventCount, events.Count);
            } catch (Exception ex) {
                _statsClient.Counter(MetricNames.PostsParseErrors);
                queueEntry.Abandon();
                _storage.SetNotActive(queueEntry.Value.FilePath);

                Log.Error().Exception(ex).Message("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message).Write();
                return(JobResult.FromException(ex, String.Format("An error occurred while processing the EventPost '{0}': {1}", queueEntry.Id, ex.Message)));
            }

            if (token.IsCancellationRequested)
            {
                queueEntry.Abandon();
                return(JobResult.Cancelled);
            }

            if (events == null)
            {
                queueEntry.Abandon();
                _storage.SetNotActive(queueEntry.Value.FilePath);
                return(JobResult.Success);
            }

            int  eventsToProcess = events.Count;
            bool isSingleEvent   = events.Count == 1;

            if (!isSingleEvent)
            {
                var project = _projectRepository.GetById(eventPostInfo.ProjectId, true);
                // Don't process all the events if it will put the account over its limits.
                eventsToProcess = _organizationRepository.GetRemainingEventLimit(project.OrganizationId);

                // Add 1 because we already counted 1 against their limit when we received the event post.
                if (eventsToProcess < Int32.MaxValue)
                {
                    eventsToProcess += 1;
                }

                // Increment by count - 1 since we already incremented it by 1 in the OverageHandler.
                _organizationRepository.IncrementUsage(project.OrganizationId, false, events.Count - 1);
            }

            if (events == null)
            {
                queueEntry.Abandon();
                _storage.SetNotActive(queueEntry.Value.FilePath);
                return(JobResult.Success);
            }

            var errorCount = 0;
            var created    = DateTime.UtcNow;

            try {
                events.ForEach(e => e.CreatedUtc = created);
                var results = _eventPipeline.Run(events.Take(eventsToProcess).ToList());
                Log.Info().Message("Ran {0} events through the pipeline: id={1} project={2} success={3} error={4}", results.Count, queueEntry.Id, eventPostInfo.ProjectId, results.Count(r => r.IsProcessed), results.Count(r => r.HasError)).WriteIf(!isInternalProject);
                foreach (var eventContext in results)
                {
                    if (eventContext.IsCancelled)
                    {
                        continue;
                    }

                    if (!eventContext.HasError)
                    {
                        continue;
                    }

                    Log.Error().Exception(eventContext.Exception).Project(eventPostInfo.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, eventContext.ErrorMessage).Write();
                    if (eventContext.Exception is ValidationException)
                    {
                        continue;
                    }

                    errorCount++;

                    if (!isSingleEvent)
                    {
                        // Put this single event back into the queue so we can retry it separately.
                        _queue.Enqueue(new EventPostInfo {
                            ApiVersion = eventPostInfo.ApiVersion,
                            CharSet    = eventPostInfo.CharSet,
                            Data       = Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(eventContext.Event)),
                            IpAddress  = eventPostInfo.IpAddress,
                            MediaType  = eventPostInfo.MediaType,
                            ProjectId  = eventPostInfo.ProjectId,
                            UserAgent  = eventPostInfo.UserAgent
                        }, _storage, false);
                    }
                }
            } catch (ArgumentException ex) {
                Log.Error().Exception(ex).Project(eventPostInfo.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, ex.Message).Write();
                queueEntry.Complete();
            } catch (Exception ex) {
                Log.Error().Exception(ex).Project(eventPostInfo.ProjectId).Message("Error while processing event post \"{0}\": {1}", queueEntry.Value.FilePath, ex.Message).Write();
                errorCount++;
            }

            if (isSingleEvent && errorCount > 0)
            {
                queueEntry.Abandon();
                _storage.SetNotActive(queueEntry.Value.FilePath);
            }
            else
            {
                queueEntry.Complete();
                if (queueEntry.Value.ShouldArchive)
                {
                    _storage.CompleteEventPost(queueEntry.Value.FilePath, eventPostInfo.ProjectId, created, queueEntry.Value.ShouldArchive);
                }
                else
                {
                    _storage.DeleteFile(queueEntry.Value.FilePath);
                    _storage.SetNotActive(queueEntry.Value.FilePath);
                }
            }

            return(JobResult.Success);
        }