private async Task<JobResult> TryRunAsync(JobRunContext context) { try { return await RunInternalAsync(context).AnyContext(); } catch (Exception ex) { return JobResult.FromException(ex); } }
protected override async Task<JobResult> RunInternalAsync(JobRunContext context) { const int LIMIT = 50; var results = await _eventRepository.GetOpenSessionsAsync(GetStartOfInactivePeriod(), new PagingOptions().WithPage(1).WithLimit(LIMIT)).AnyContext(); while (results.Documents.Count > 0 && !context.CancellationToken.IsCancellationRequested) { var inactivePeriod = GetStartOfInactivePeriod(); var sessionsToUpdate = new List<PersistentEvent>(LIMIT); foreach (var sessionStart in results.Documents) { var lastActivityUtc = sessionStart.Date.UtcDateTime.AddSeconds((double)sessionStart.Value.GetValueOrDefault()); if (lastActivityUtc > inactivePeriod) continue; sessionStart.UpdateSessionStart(lastActivityUtc, true); sessionsToUpdate.Add(sessionStart); Debug.Assert(sessionStart.Value != null && sessionStart.Value >= 0, "Session start value cannot be a negative number."); } if (sessionsToUpdate.Count > 0) await _eventRepository.SaveAsync(sessionsToUpdate).AnyContext(); // Sleep so we are not hammering the backend. await Task.Delay(TimeSpan.FromSeconds(2.5)).AnyContext(); await results.NextPageAsync().AnyContext(); if (results.Documents.Count > 0) await context.JobLock.RenewAsync().AnyContext(); } return JobResult.Success; }
protected override async Task<JobResult> RunInternalAsync(JobRunContext context) { RunCount++; await Task.Delay(150, context.CancellationToken).AnyContext(); Assert.True(await _locker.IsLockedAsync("WithLockingJob").AnyContext()); return JobResult.Success; }
protected override Task<JobResult> RunInternalAsync(JobRunContext context) { RunCount++; Interlocked.Increment(ref GlobalRunCount); Logger.Trace().Message("HelloWorld Running: instance={0} runs={1} global={2}", _id, RunCount, GlobalRunCount).Write(); return Task.FromResult(JobResult.Success); }
protected override async Task<JobResult> RunInternalAsync(JobRunContext context) { RunCount++; Console.WriteLine("Hello World!"); await Task.Delay(100, context.CancellationToken).AnyContext(); return JobResult.Success; }
protected override async Task<JobResult> RunInternalAsync(JobRunContext context) { var results = await _organizationRepository.GetByRetentionDaysEnabledAsync(new PagingOptions().WithPage(1).WithLimit(100)).AnyContext(); while (results.Documents.Count > 0 && !context.CancellationToken.IsCancellationRequested) { foreach (var organization in results.Documents) { await EnforceEventCountLimitsAsync(organization).AnyContext(); // Sleep so we are not hammering the backend. await Task.Delay(TimeSpan.FromSeconds(5)).AnyContext(); } await results.NextPageAsync().AnyContext(); if (results.Documents.Count > 0) await context.JobLock.RenewAsync().AnyContext(); } return JobResult.Success; }
protected override async Task<JobResult> RunInternalAsync(JobRunContext context) { if (!Settings.Current.EnableDailySummary) return JobResult.SuccessWithMessage("Summary notifications are disabled."); if (_mailer == null) return JobResult.SuccessWithMessage("Summary notifications are disabled due to null mailer."); const int BATCH_SIZE = 25; var projects = (await _projectRepository.GetByNextSummaryNotificationOffsetAsync(9, BATCH_SIZE).AnyContext()).Documents; while (projects.Count > 0 && !context.CancellationToken.IsCancellationRequested) { var documentsUpdated = await _projectRepository.IncrementNextSummaryEndOfDayTicksAsync(projects.Select(p => p.Id).ToList()).AnyContext(); Logger.Info().Message("Got {0} projects to process. ", projects.Count).Write(); Debug.Assert(projects.Count == documentsUpdated); foreach (var project in projects) { var utcStartTime = new DateTime(project.NextSummaryEndOfDayTicks - TimeSpan.TicksPerDay); if (utcStartTime < DateTime.UtcNow.Date.SubtractDays(2)) { Logger.Info().Message("Skipping daily summary older than two days for project \"{0}\" with a start time of \"{1}\".", project.Id, utcStartTime).Write(); continue; } var notification = new SummaryNotification { Id = project.Id, UtcStartTime = utcStartTime, UtcEndTime = new DateTime(project.NextSummaryEndOfDayTicks - TimeSpan.TicksPerSecond) }; await ProcessSummaryNotificationAsync(notification).AnyContext(); // Sleep so were not hammering the database. await Task.Delay(TimeSpan.FromSeconds(1)); } projects = (await _projectRepository.GetByNextSummaryNotificationOffsetAsync(9, BATCH_SIZE).AnyContext()).Documents; if (projects.Count > 0) await context.JobLock.RenewAsync().AnyContext(); } return JobResult.SuccessWithMessage("Successfully sent summary notifications."); }
protected override async Task<JobResult> RunInternalAsync(JobRunContext context) { try { if (await _storage.ExistsAsync(MindMaxGeoIPService.GEO_IP_DATABASE_PATH).AnyContext()) { Logger.Info().Message("Deleting existing GeoIP database.").Write(); await _storage.DeleteFileAsync(MindMaxGeoIPService.GEO_IP_DATABASE_PATH, context.CancellationToken).AnyContext(); } Logger.Info().Message("Downloading GeoIP database.").Write(); var client = new HttpClient(); var file = await client.GetAsync("http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz", context.CancellationToken).AnyContext(); if (!file.IsSuccessStatusCode) return JobResult.FailedWithMessage("Unable to download GeoIP database."); Logger.Info().Message("Extracting GeoIP database").Write(); using (GZipStream decompressionStream = new GZipStream(await file.Content.ReadAsStreamAsync().AnyContext(), CompressionMode.Decompress)) await _storage.SaveFileAsync(MindMaxGeoIPService.GEO_IP_DATABASE_PATH, decompressionStream, context.CancellationToken).AnyContext(); } catch (Exception ex) { Logger.Error().Exception(ex).Message("An error occurred while downloading the GeoIP database.").Write(); return JobResult.FromException(ex); } Logger.Info().Message("Finished downloading GeoIP database.").Write(); return JobResult.Success; }
protected abstract Task <JobResult> RunInternalAsync(JobRunContext context);
protected abstract Task<JobResult> RunInternalAsync(JobRunContext context);
protected override Task<JobResult> RunInternalAsync(JobRunContext context) { RunCount++; return Task.FromResult(JobResult.Success); }
protected override async Task <JobResult> RunInternalAsync(JobRunContext context) { var linkedCancellationToken = CancellationTokenSource.CreateLinkedTokenSource(context.CancellationToken, TimeSpan.FromSeconds(30).ToCancellationToken()); IQueueEntry <T> queueEntry; try { queueEntry = await _queue.DequeueAsync(linkedCancellationToken.Token).AnyContext(); } catch (Exception ex) { return(JobResult.FromException(ex, $"Error trying to dequeue message: {ex.Message}")); } if (queueEntry == null) { return(JobResult.Success); } if (context.CancellationToken.IsCancellationRequested) { _logger.Info("Job was cancelled. Abandoning queue item: {queueEntryId}", queueEntry.Id); await queueEntry.AbandonAsync().AnyContext(); return(JobResult.Cancelled); } using (var lockValue = await GetQueueEntryLockAsync(queueEntry, context.CancellationToken).AnyContext()) { if (lockValue == null) { await queueEntry.AbandonAsync().AnyContext(); return(JobResult.SuccessWithMessage("Unable to acquire queue item lock.")); } _logger.Info("Processing {0} queue entry ({1}).", _queueEntryName, queueEntry.Id); try { var result = await ProcessQueueEntryAsync(new JobQueueEntryContext <T>(queueEntry, lockValue, context.CancellationToken)).AnyContext(); if (!AutoComplete) { return(result); } if (result.IsSuccess) { await queueEntry.CompleteAsync().AnyContext(); _logger.Info("Completed {0} queue entry ({1}).", _queueEntryName, queueEntry.Id); } else { await queueEntry.AbandonAsync().AnyContext(); _logger.Warn("Abandoned {0} queue entry ({1}).", _queueEntryName, queueEntry.Id); } return(result); } catch (Exception ex) { await queueEntry.AbandonAsync().AnyContext(); _logger.Error(ex, "Error processing {0} queue entry ({1}).", _queueEntryName, queueEntry.Id); throw; } } }