protected override async Task<JobResult> RunInternalAsync(JobContext context) { const int LIMIT = 100; var results = await _eventRepository.GetOpenSessionsAsync(GetStartOfInactivePeriod(), new PagingOptions().WithPage(1).WithLimit(LIMIT)).AnyContext(); while (results.Documents.Count > 0 && !context.CancellationToken.IsCancellationRequested) { var inactivePeriod = GetStartOfInactivePeriod(); var sessionsToUpdate = new List<PersistentEvent>(LIMIT); foreach (var sessionStart in results.Documents) { var lastActivityUtc = sessionStart.Date.UtcDateTime.AddSeconds((double)sessionStart.Value.GetValueOrDefault()); if (lastActivityUtc > inactivePeriod) continue; sessionStart.UpdateSessionStart(lastActivityUtc, true); sessionsToUpdate.Add(sessionStart); Debug.Assert(sessionStart.Value != null && sessionStart.Value >= 0, "Session start value cannot be a negative number."); } if (sessionsToUpdate.Count > 0) await _eventRepository.SaveAsync(sessionsToUpdate).AnyContext(); // Sleep so we are not hammering the backend. await Task.Delay(TimeSpan.FromSeconds(2.5)).AnyContext(); await results.NextPageAsync().AnyContext(); if (results.Documents.Count > 0) await context.RenewLockAsync().AnyContext(); } return JobResult.Success; }
protected override async Task<JobResult> RunInternalAsync(JobContext context) { RunCount++; await SystemClock.SleepAsync(150, context.CancellationToken).AnyContext(); Assert.True(await _locker.IsLockedAsync("WithLockingJob").AnyContext()); return JobResult.Success; }
protected override Task<JobResult> RunInternalAsync(JobContext context) { RunCount++; Interlocked.Increment(ref GlobalRunCount); _logger.Trace("HelloWorld Running: instance={0} runs={1} global={2}", _id, RunCount, GlobalRunCount); return Task.FromResult(JobResult.Success); }
protected override Task<JobResult> RunInternalAsync(JobContext context) { do { Interlocked.Increment(ref _iterationCount); if (context.CancellationToken.IsCancellationRequested) break; if (_iterationCount % 10000 == 0) _logger.Info("LongRunningJob Running: instance={0} iterations={1}", _id, IterationCount); } while (true); return Task.FromResult(JobResult.Success); }
protected override async Task<JobResult> RunInternalAsync(JobContext context) { const int LIMIT = 100; var results = await _eventRepository.GetOpenSessionsAsync(DateTime.UtcNow.SubtractMinutes(1), new PagingOptions().WithPage(1).WithLimit(LIMIT)).AnyContext(); while (results.Documents.Count > 0 && !context.CancellationToken.IsCancellationRequested) { var inactivePeriodUtc = DateTime.UtcNow.Subtract(DefaultInactivePeriod); var sessionsToUpdate = new List<PersistentEvent>(results.Documents.Count); var cacheKeysToRemove = new List<string>(results.Documents.Count * 2); foreach (var sessionStart in results.Documents) { var lastActivityUtc = sessionStart.Date.UtcDateTime.AddSeconds((double)sessionStart.Value.GetValueOrDefault()); var heartbeatResult = await GetHeartbeatAsync(sessionStart).AnyContext(); if (heartbeatResult != null && (heartbeatResult.Close || heartbeatResult.ActivityUtc > lastActivityUtc)) sessionStart.UpdateSessionStart(heartbeatResult.ActivityUtc, isSessionEnd: heartbeatResult.Close || heartbeatResult.ActivityUtc <= inactivePeriodUtc); else if (lastActivityUtc <= inactivePeriodUtc) sessionStart.UpdateSessionStart(lastActivityUtc, isSessionEnd: true); else continue; sessionsToUpdate.Add(sessionStart); if (heartbeatResult != null) { cacheKeysToRemove.Add(heartbeatResult.CacheKey); if (heartbeatResult.Close) cacheKeysToRemove.Add(heartbeatResult.CacheKey + "-close"); } Debug.Assert(sessionStart.Value != null && sessionStart.Value >= 0, "Session start value cannot be a negative number."); } if (sessionsToUpdate.Count > 0) await _eventRepository.SaveAsync(sessionsToUpdate).AnyContext(); if (cacheKeysToRemove.Count > 0) await _cacheClient.RemoveAllAsync(cacheKeysToRemove).AnyContext(); // Sleep so we are not hammering the backend. await Task.Delay(TimeSpan.FromSeconds(2.5)).AnyContext(); await results.NextPageAsync().AnyContext(); if (results.Documents.Count > 0) await context.RenewLockAsync().AnyContext(); } return JobResult.Success; }
protected override async Task<JobResult> RunInternalAsync(JobContext context) { var results = await _organizationRepository.GetByRetentionDaysEnabledAsync(new PagingOptions().WithPage(1).WithLimit(100)).AnyContext(); while (results.Documents.Count > 0 && !context.CancellationToken.IsCancellationRequested) { foreach (var organization in results.Documents) { await EnforceEventCountLimitsAsync(organization).AnyContext(); // Sleep so we are not hammering the backend. await Task.Delay(TimeSpan.FromSeconds(5)).AnyContext(); } await results.NextPageAsync().AnyContext(); if (results.Documents.Count > 0) await context.RenewLockAsync().AnyContext(); } return JobResult.Success; }
protected override async Task<JobResult> RunInternalAsync(JobContext context) { if (!Settings.Current.EnableDailySummary) return JobResult.SuccessWithMessage("Summary notifications are disabled."); if (_mailer == null) return JobResult.SuccessWithMessage("Summary notifications are disabled due to null mailer."); const int BATCH_SIZE = 25; var projects = (await _projectRepository.GetByNextSummaryNotificationOffsetAsync(9, BATCH_SIZE).AnyContext()).Documents; while (projects.Count > 0 && !context.CancellationToken.IsCancellationRequested) { var documentsUpdated = await _projectRepository.IncrementNextSummaryEndOfDayTicksAsync(projects).AnyContext(); _logger.Info("Got {0} projects to process. ", projects.Count); Debug.Assert(projects.Count == documentsUpdated); foreach (var project in projects) { var utcStartTime = new DateTime(project.NextSummaryEndOfDayTicks - TimeSpan.TicksPerDay); if (utcStartTime < DateTime.UtcNow.Date.SubtractDays(2)) { _logger.Info("Skipping daily summary older than two days for project \"{0}\" with a start time of \"{1}\".", project.Id, utcStartTime); continue; } var notification = new SummaryNotification { Id = project.Id, UtcStartTime = utcStartTime, UtcEndTime = new DateTime(project.NextSummaryEndOfDayTicks - TimeSpan.TicksPerSecond) }; await ProcessSummaryNotificationAsync(notification).AnyContext(); // Sleep so were not hammering the database. await Task.Delay(TimeSpan.FromSeconds(1)); } projects = (await _projectRepository.GetByNextSummaryNotificationOffsetAsync(9, BATCH_SIZE).AnyContext()).Documents; if (projects.Count > 0) await context.RenewLockAsync().AnyContext(); } return JobResult.SuccessWithMessage("Successfully sent summary notifications."); }
protected override async Task<JobResult> RunInternalAsync(JobContext context) { try { if (await _storage.ExistsAsync(MaxMindGeoIpService.GEO_IP_DATABASE_PATH).AnyContext()) { _logger.Info("Deleting existing GeoIP database."); await _storage.DeleteFileAsync(MaxMindGeoIpService.GEO_IP_DATABASE_PATH, context.CancellationToken).AnyContext(); } _logger.Info("Downloading GeoIP database."); var client = new HttpClient(); var file = await client.GetAsync("http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz", context.CancellationToken).AnyContext(); if (!file.IsSuccessStatusCode) return JobResult.FailedWithMessage("Unable to download GeoIP database."); _logger.Info("Extracting GeoIP database"); using (GZipStream decompressionStream = new GZipStream(await file.Content.ReadAsStreamAsync().AnyContext(), CompressionMode.Decompress)) await _storage.SaveFileAsync(MaxMindGeoIpService.GEO_IP_DATABASE_PATH, decompressionStream, context.CancellationToken).AnyContext(); } catch (Exception ex) { _logger.Error(ex, "An error occurred while downloading the GeoIP database."); return JobResult.FromException(ex); } _logger.Info("Finished downloading GeoIP database."); return JobResult.Success; }
protected override Task<JobResult> RunInternalAsync(JobContext context) { RunCount++; return Task.FromResult(JobResult.Success); }
protected abstract Task <JobResult> RunInternalAsync(JobContext context);
protected abstract Task<JobResult> RunInternalAsync(JobContext context);