private static void BackgroundJobsPostStart() { var jobs = new IJob[] { new UpdateStatisticsJob(TimeSpan.FromSeconds(10), () => new EntitiesContext(), timeout: TimeSpan.FromMinutes(5)), new WorkItemCleanupJob(TimeSpan.FromDays(1), () => new EntitiesContext(), timeout: TimeSpan.FromDays(4)), new LuceneIndexingJob(TimeSpan.FromMinutes(10), timeout: TimeSpan.FromMinutes(2)), }; var jobCoordinator = new WebFarmJobCoordinator(new EntityWorkItemRepository(() => new EntitiesContext())); _jobManager = new JobManager(jobs, jobCoordinator); _jobManager.Fail(e => ErrorLog.GetDefault(null).Log(new Error(e))); _jobManager.Start(); }
private void ConfigureJobManager(IAppBuilder app) { var jobs = new IJob[] { new BirthdateJob("BirthdateJob", TimeSpan.FromSeconds(1), Dal), new EventEndedJob("EventEndedJob", TimeSpan.FromSeconds(1), Dal) }; var manager = new JobManager(jobs, new SingleServerJobCoordinator()); manager.Start(); app.CreatePerOwinContext(() => manager); }
private static void BackgroundJobsPostStart() { var jobs = new IJob[] { new WorkItemCleanJob(TimeSpan.FromDays(1), () => new EntitiesContext(), TimeSpan.FromDays(4)), new CustomerCompanySetCommonJob(TimeSpan.FromDays(1),() => new EntitiesContext(), timeout: TimeSpan.FromMinutes(2)) }; var coordinator = new WebFarmJobCoordinator(new EntityWorkItemRepository(() => new EntitiesContext())); _jobManager = new JobManager(jobs, coordinator) { RestartSchedulerOnFailure = true }; _jobManager.Fail(ex => LogHelper.WriteLog("��̨�������", ex)); _jobManager.Start(); }
private static void BackgroundJobsPostStart() { var jobs = new IJob[] { new WorkItemCleanupJob(TimeSpan.FromDays(1), () => new CodeFirstContext(), timeout: TimeSpan.FromDays(4)), new LuceneIndexingJob(TimeSpan.FromMinutes(10), timeout: TimeSpan.FromMinutes(2)) }; var coordinator = new WebFarmJobCoordinator(new EntityWorkItemRepository(() => new CodeFirstContext())); _jobManager = new JobManager(jobs, coordinator) { RestartSchedulerOnFailure = true }; _jobManager.Fail(ex => Elmah.ErrorLog.GetDefault(null).Log(new Error(ex))); _jobManager.Start(); }
private static void BackgroundJobsPostStart(IAppConfiguration configuration) { var indexer = Container.Kernel.TryGet<IIndexingService>(); var jobs = new List<IJob>(); if (indexer != null) { indexer.RegisterBackgroundJobs(jobs, configuration); } if (!configuration.HasWorker) { jobs.Add( new UpdateStatisticsJob(TimeSpan.FromMinutes(5), () => new EntitiesContext(configuration.SqlConnectionString, readOnly: false), timeout: TimeSpan.FromMinutes(5))); } if (configuration.CollectPerfLogs) { jobs.Add(CreateLogFlushJob()); } if (jobs.AnySafe()) { var jobCoordinator = new NuGetJobCoordinator(); _jobManager = new JobManager(jobs, jobCoordinator) { RestartSchedulerOnFailure = true }; _jobManager.Fail(e => ErrorLog.GetDefault(null).Log(new Error(e))); _jobManager.Start(); } }
private static void BackgroundJobsPostStart(IAppConfiguration configuration) { var jobs = configuration.HasWorker ? new IJob[] { new LuceneIndexingJob(TimeSpan.FromMinutes(10), () => new EntitiesContext(configuration.SqlConnectionString, readOnly: true), timeout: TimeSpan.FromMinutes(2)) } : new IJob[] { // readonly: false workaround - let statistics background job write to DB in read-only mode since we don't care too much about losing that data new UpdateStatisticsJob(TimeSpan.FromMinutes(5), () => new EntitiesContext(configuration.SqlConnectionString, readOnly: false), timeout: TimeSpan.FromMinutes(5)), new LuceneIndexingJob(TimeSpan.FromMinutes(10), () => new EntitiesContext(configuration.SqlConnectionString, readOnly: true), timeout: TimeSpan.FromMinutes(2)) }; var jobCoordinator = new NuGetJobCoordinator(); _jobManager = new JobManager(jobs, jobCoordinator) { RestartSchedulerOnFailure = true }; _jobManager.Fail(e => ErrorLog.GetDefault(null).Log(new Error(e))); _jobManager.Start(); }
private static void BackgroundJobsPostStart() { var jobs = new List<IJob>(); var indexer = DependencyResolver.Current.GetService<IIndexingService>(); if (indexer != null) { indexer.RegisterBackgroundJobs(jobs); } jobs.Add(new UpdateStatisticsJob(TimeSpan.FromMinutes(5), () => new EntitiesContext(), timeout: TimeSpan.FromMinutes(5))); jobs.Add(new WorkItemCleanupJob(TimeSpan.FromDays(1), () => new EntitiesContext(), timeout: TimeSpan.FromDays(4))); var jobCoordinator = new WebFarmJobCoordinator(new EntityWorkItemRepository(() => new EntitiesContext())); _jobManager = new JobManager(jobs, jobCoordinator) { RestartSchedulerOnFailure = true }; _jobManager.Fail(e => ErrorLog.GetDefault(null).Log(new Error(e))); _jobManager.Start(); }
private static void BackgroundJobsPostStart(IAppConfiguration configuration) { var indexer = DependencyResolver.Current.GetService<IIndexingService>(); var jobs = new List<IJob>(); if (indexer != null) { indexer.RegisterBackgroundJobs(jobs, configuration); } if (!configuration.HasWorker) { jobs.Add( new UpdateStatisticsJob(TimeSpan.FromMinutes(5), () => new EntitiesContext(configuration.SqlConnectionString, readOnly: false), timeout: TimeSpan.FromMinutes(5))); } if (configuration.CollectPerfLogs) { jobs.Add(CreateLogFlushJob()); } if (configuration.StorageType == StorageType.AzureStorage) { var cloudDownloadCountService = DependencyResolver.Current.GetService<IDownloadCountService>() as CloudDownloadCountService; if (cloudDownloadCountService != null) { // Perform initial refresh + schedule new refreshes every 15 minutes HostingEnvironment.QueueBackgroundWorkItem(cancellationToken => cloudDownloadCountService.Refresh()); jobs.Add(new CloudDownloadCountServiceRefreshJob(TimeSpan.FromMinutes(15), cloudDownloadCountService)); } } if (jobs.AnySafe()) { var jobCoordinator = new NuGetJobCoordinator(); _jobManager = new JobManager(jobs, jobCoordinator) { RestartSchedulerOnFailure = true }; _jobManager.Fail(e => ErrorLog.GetDefault(null).Log(new Error(e))); _jobManager.Start(); } }