public async Task BackupAsync(RefToken actor) { if (currentJobToken != null) { throw new DomainException(T.Get("backups.alreadyRunning")); } if (state.Value.Jobs.Count >= MaxBackups) { throw new DomainException(T.Get("backups.maxReached", new { max = MaxBackups })); } var job = new BackupJob { Id = DomainId.NewGuid(), Started = clock.GetCurrentInstant(), Status = JobStatus.Started }; currentJobToken = new CancellationTokenSource(); currentJob = job; state.Value.Jobs.Insert(0, job); await state.WriteAsync(); Process(job, actor, currentJobToken.Token); }
public async Task <IActionResult> Configure(int?id = null) { ConfigureJobModel model = CreateModel <ConfigureJobModel>(); if (id.HasValue && id > 0) { BackupJob job = await backupJobRepository.Get(id.Value); if (job == null) { return(NotFound()); } var providers = job.Providers.OrderBy(p => p.Order); model.ID = job.ID; model.Name = job.Name; model.CronSchedule = await schedulerService.GetCronSchedule(id.Value); model.BackupProvider = providers.FirstOrDefault().Provider.ID; model.StorageProvider = providers.LastOrDefault().Provider.ID; model.TransformProviders = providers.Where(p => p.Provider.Type == ProviderType.Transform).Select(tp => tp.Provider.ID).ToArray(); model.ProviderInstances = job.Providers.ToDictionary(k => k.Provider.ID, v => v.ID); } return(View(model)); }
private void button1_Click(object sender, EventArgs e) { var vaultguid = comboBox1.Text.Substring(0, 38); var vault = app.LogInToVault(vaultguid); if (RemoveGarbageData(vault)) { return; } try { var bu = new BackupJob { VaultGUID = vaultguid, BackupType = MFBackupType.MFBackupTypeFull, OverwriteExistingFiles = true, TargetFile = @"C:\privatecloud\0installersource\templates\fullbackup\cscec8.mfb" }; app.VaultManagementOperations.BackupVault(bu); richTextBox1.AppendText(Environment.NewLine + "ok"); } catch (Exception ex) { richTextBox1.AppendText(Environment.NewLine + "error:" + ex.Message); } }
public JobTests() { var logger = NullLoggerFactory.Instance.CreateLogger <BackupJob>(); _client = new BackupServiceClient(); _job = new BackupJob(logger, _client, _clock); }
public JsonResult Backup(string source, string destination) { BackupJob job = new BackupJob() { Progress = 0, ScheduleTime = DateTime.Now, UserId = 1, Destination = destination, Source = source, StartTime = DateTime.Now, EndTime = new DateTime(1900, 1, 1) }; job.BackupJobId = dm.InsertData(job); Thread backupThread = new Thread(() => startBackup(job.BackupJobId, source, destination)); backupThread.Start(); var result = new { Id = job.BackupJobId, progress = 0 }; return(Json(result)); }
public BackupJob BackupGame(BackupSet set, bool forceAsynchronous = false) { Log.Info("adding backup job for " + set.name + " (" + backupQueue.Size() + " backups in queue)"); allBackupsCompleted = false; BackupJob job = new BackupJob(set); if (SAVE.configuration.asynchronous || forceAsynchronous) { Log.Info("adding backup job for " + set.name + " (" + backupQueue.Size() + " backups in queue)"); backupQueue.Enqueue(job); } else { Log.Info("synchronous backup to backup set '" + set.name); // wait for asynchronous backups to complete while (backupQueue.Size() > 0) { Thread.Sleep(100); } // do backup job.Backup(); // done allBackupsCompleted = true; } return(job); }
private void WaitUntilBackupJobCompleted(BackupJob job) { while (!job.IsCompleted() && !stopRequested) { Thread.Sleep(100); } }
private async Task RunHelpAsync(bool _isHelp, string commandName) { if (_isHelp || string.IsNullOrEmpty(commandName)) { var version = Assembly.GetExecutingAssembly().GetName().Version.ToString(); await Console.Out.WriteLineAsync($"Cli 命令行版本: {version.Substring(0, version.Length - 2)}"); await Console.Out.WriteLineAsync($"当前文件夹: {Settings.ContentRootPath}"); await Console.Out.WriteLineAsync($"Cli 命令行文件夹: {Assembly.GetExecutingAssembly().Location}"); await Console.Out.WriteLineAsync(); await CliUtils.PrintRowLine(); await CliUtils.PrintRow("Usage"); await CliUtils.PrintRowLine(); BackupJob.PrintUsage(); RestoreJob.PrintUsage(); await CliUtils.PrintRowLine(); await CliUtils.PrintRow("https://www.datory.io/docs/"); await CliUtils.PrintRowLine(); Console.ReadLine(); } else { Console.WriteLine($"'{commandName}' is not a siteserver command. See 'sitserver --help'"); } }
public async Task BackupAsync(RefToken actor) { if (currentTaskToken != null) { throw new DomainException("Another backup process is already running."); } if (state.Value.Jobs.Count >= MaxBackups) { throw new DomainException($"You cannot have more than {MaxBackups} backups."); } var job = new BackupJob { Id = Guid.NewGuid(), Started = clock.GetCurrentInstant(), Status = JobStatus.Started }; currentTaskToken = new CancellationTokenSource(); currentJob = job; state.Value.Jobs.Insert(0, job); await state.WriteAsync(); Process(job, actor, currentTaskToken.Token); }
public async Task TestExecuteAsync() { Assert.Equal("backup", BackupJob.CommandName); var context = new JobContextImpl(BackupJob.CommandName, new [] { "-d", "backup" }, null); await BackupJob.ExecuteAsync(context); Assert.Equal("backup", BackupJob.CommandName); }
//========================================================================= // // AJAX ACTIONS // //========================================================================= /// <summary> /// Starts a backup job on the contents of the blob with the given URI. /// </summary> public JsonResult Start(string uri) { var job = new BackupJob(new Uri(uri), RoleSettings.StorageCredentials); lock (BackupJobs.Jobs) { BackupJobs.Jobs.Add(job.Id, job); } job.Start(); return(Json(new { success = true, jobId = job.Id })); }
public void BackupWork() { Log.Info("backup thread running"); while (!stopRequested) { BackupJob job = backupQueue.Dequeue(); Log.Info("executing backup job " + job); job.Backup(); allBackupsCompleted = backupQueue.Size() == 0; } Log.Info("backup thread terminated"); }
private static async Task RunHelpAsync(bool isHelp, string commandName, Dictionary <string, Func <IJobContext, Task> > pluginJobs) { if (isHelp || string.IsNullOrEmpty(commandName)) { var version = Assembly.GetExecutingAssembly().GetName().Version.ToString(); await Console.Out.WriteLineAsync($"Cli 命令行版本: {version.Substring(0, version.Length - 2)}"); await Console.Out.WriteLineAsync($"当前文件夹: {CliUtils.PhysicalApplicationPath}"); await Console.Out.WriteLineAsync($"Cli 命令行文件夹: {Assembly.GetExecutingAssembly().Location}"); await Console.Out.WriteLineAsync(); await CliUtils.PrintRowLine(); await CliUtils.PrintRow("Usage"); await CliUtils.PrintRowLine(); var backupJob = new BackupJob(); var installJob = new InstallJob(); var restoreJob = new RestoreJob(); var syncJob = new SyncJob(); var updateJob = new UpdateJob(); var versionJob = new VersionJob(); backupJob.PrintUsage(); installJob.PrintUsage(); restoreJob.PrintUsage(); syncJob.PrintUsage(); updateJob.PrintUsage(); versionJob.PrintUsage(); if (pluginJobs != null && pluginJobs.Count > 0) { Console.WriteLine($"插件命令: {TranslateUtils.ObjectCollectionToString(pluginJobs.Keys)}"); Console.WriteLine(); } await CliUtils.PrintRowLine(); await CliUtils.PrintRow(CloudUtils.Root.DocsCliUrl); await CliUtils.PrintRowLine(); Console.ReadLine(); } else { Console.WriteLine($"'{commandName}' is not a siteserver command. See 'sitserver --help'"); } }
public async Task <BackupJob> Get(int id) { BackupJob job = await context .Jobs .Include(j => j.Providers) .ThenInclude(x => x.Provider) .Include(j => j.Providers) .ThenInclude(x => x.Values) .ThenInclude(x => x.Property) .FirstOrDefaultAsync(j => j.ID == id); return(job); }
private void chkJob_CheckedChanged(object sender, EventArgs e) { if (chkJob.Checked) { foreach (var item in databases.Where(x => x.backup)) { string[] interval = item.times.Split(','); BackupJob.StartdJobs(item.directory, item.name, interval); } } else { BackupJob.StopJobs(); } }
public bool Backup(MFilesVault vault) { var path = Path.Combine(Path.GetTempPath(), vault.Guid + ".mfb"); var ovault = MFServerUtility.GetVault(vault); var backup = new BackupJob { BackupType = MFBackupType.MFBackupTypeFull, TargetFile = path, VaultGUID = ovault.GetGUID(), OverwriteExistingFiles = true }; var app = MFServerUtility.ConnectToMfApp(vault); app.VaultManagementOperations.BackupVault(backup); return(true); }
public void SyncFolders(int JobId, FolderEntry source, FolderEntry destination) { List<BackupJobDetail> jobDetails = readSyncFolders(JobId, source, destination); BackupJob job = dm.SelectData(new BackupJob(), string.Format("WHERE BackupJobId = {0}", JobId)).FirstOrDefault(); int files = 0; long totalBytes = 0; foreach (BackupJobDetail jobDetail in jobDetails) { files++; totalBytes += jobDetail.FileSize; } double prog = 0; foreach (BackupJobDetail jobDetail in jobDetails) { jobDetail.DateSynced = DateTime.Now; File.Copy(jobDetail.FileNameFrom, jobDetail.FileNameTo, true); dm.InsertData(jobDetail); prog++; job.Progress = (decimal)Math.Round((prog / files) * 100, 2); if (files == prog) { job.EndTime = DateTime.Now; } dm.UpdateData(job); Progress(new SyncInfo() { FileCount = 1, FileSize = jobDetail.FileSize, FileName = jobDetail.FileNameTo }); } foreach (var item in destination.Files.Keys) { // delete file if not in source. if (!source.Files.ContainsKey(item)) { File.Delete(Path.Combine(destination.Path, destination.Files[item].FileName)); } } // destroy all folders that do not exist in the source. foreach (var item in destination.Folders) { if (!source.Folders.ContainsKey(item.Key)) { DeleteDirectoryRecursive(item.Value.Path); } // SyncFolders(JobId, item.Value, destination.Folders[item.Key]); } }
public void TestExcludeBackupJobs() { var results = new[] { 0, 1, 1, 2, 1, 2, 2, 3, 1, 2, 2, 3, 2, 3, 3, 4 }; for (int i = 0; i < 16; i++) { bool backupSqlServer = i / 8 % 2 == 1; bool backupDocumentDB = i / 4 % 2 == 1; bool backupMongoDB = i / 2 % 2 == 1; bool backupAzureStorage = i % 2 == 1; var backupjobs = GenerateBackupJobs(); BackupJob.ExcludeBackupJobs(backupjobs, backupSqlServer, backupDocumentDB, backupMongoDB, backupAzureStorage); Assert.AreEqual(results[i], backupjobs.Count, $"i: {i}, backupSqlServer: {backupSqlServer}, backupDocumentDB: {backupDocumentDB}, backupMongoDB: {backupMongoDB}, backupAzureStorage: {backupAzureStorage}"); } }
private async Task RemoveAsync(BackupJob job) { try { #pragma warning disable MA0040 // Flow the cancellation token await backupArchiveStore.DeleteAsync(job.Id); #pragma warning restore MA0040 // Flow the cancellation token } catch (Exception ex) { log.LogError(ex, "Failed to make remove with backup id '{backupId}'.", job.Id); } state.Value.Jobs.Remove(job); await state.WriteAsync(); }
private async Task RemoveAsync(BackupJob job) { try { await backupArchiveStore.DeleteAsync(job.Id); } catch (Exception ex) { log.LogError(ex, job.Id.ToString(), (logOperationId, w) => w .WriteProperty("action", "deleteBackup") .WriteProperty("status", "failed") .WriteProperty("operationId", logOperationId)); } state.Value.Jobs.Remove(job); await state.WriteAsync(); }
public async Task <IActionResult> Configure(int?id = null) { CreateJobModel model; if (id.HasValue && id > 0) { BackupJob job = await backupJobRepository.Get(id.Value); if (job == null) { return(NotFound()); } var providers = job.Providers.OrderBy(p => p.Order); model = CreateModel <ModifyJobModel>("Modify Schedule"); model.ID = job.ID; model.Name = job.Name; model.CronSchedule = await schedulerService.GetCronSchedule(id.Value); model.BackupProvider = providers.FirstOrDefault().Provider.ID; model.StorageProvider = providers.LastOrDefault().Provider.ID; model.TransformProvider = providers.Where(p => p.Provider.Type == ProviderType.Transform).Select(tp => tp.Provider.ID).ToArray(); } else { model = CreateModel <CreateJobModel>("Create Schedule"); } var backupProviders = await providerRepository.Get(ProviderType.Backup); model.BackupProviders = new SelectList(backupProviders, "ID", "Name"); var storageProviders = await providerRepository.Get(ProviderType.Storage); model.StorageProviders = new SelectList(storageProviders, "ID", "Name"); var transformProviders = await providerRepository.Get(ProviderType.Transform); model.TransformProviders = new SelectList(transformProviders, "ID", "Name"); return(View(model)); }
private async Task RemoveAsync(BackupJob job) { try { #pragma warning disable MA0040 // Flow the cancellation token await backupArchiveStore.DeleteAsync(job.Id); #pragma warning restore MA0040 // Flow the cancellation token } catch (Exception ex) { log.LogError(ex, job.Id.ToString(), (logOperationId, w) => w .WriteProperty("action", "deleteBackup") .WriteProperty("status", "failed") .WriteProperty("operationId", logOperationId)); } state.Value.Jobs.Remove(job); await state.WriteAsync(); }
/// <summary> /// Creates a backup of the blob with the given URI; returns whether successful. /// </summary> private static bool Backup(Uri snapshotUri) { if (!RoleEnvironment.IsAvailable) { Console.Write("To run a backup, the tool must be run from with an Azure deployed environment."); return(false); } if (snapshotUri == null) { Console.WriteLine("Snapshot URI cannot be null."); return(false); } Console.WriteLine("Starting backup on " + snapshotUri + "..."); var job = new BackupJob(snapshotUri, RoleSettings.StorageCredentials, true); job.StartBlocking(); // Runs for a while... return(true); }
public JsonResult Progress(int Id) { decimal progressPerc = 0; decimal completePerc = (decimal)100.00; try { BackupJob job = dm.SelectData(new BackupJob(), string.Format("WHERE BackupJobId = {0}", Id)).FirstOrDefault(); progressPerc = job.Progress; } catch (Exception ex) { progressPerc = completePerc; } bool complete = progressPerc == completePerc; var result = new { Complete = complete, Progress = progressPerc }; return(Json(result)); }
private static async Task RunHelpAsync(bool isHelp, string commandName) { if (isHelp || string.IsNullOrEmpty(commandName)) { var version = Assembly.GetExecutingAssembly().GetName().Version.ToString(); await Console.Out.WriteLineAsync($"Cli 命令行版本: {version.Substring(0, version.Length - 2)}"); await Console.Out.WriteLineAsync($"当前文件夹: {CliUtils.PhysicalApplicationPath}"); await Console.Out.WriteLineAsync($"Cli 命令行文件夹: {Assembly.GetExecutingAssembly().Location}"); await Console.Out.WriteLineAsync(); await CliUtils.PrintRowLine(); await CliUtils.PrintRow("Usage"); await CliUtils.PrintRowLine(); BackupJob.PrintUsage(); InstallJob.PrintUsage(); RestoreJob.PrintUsage(); UpdateJob.PrintUsage(); VersionJob.PrintUsage(); await CliUtils.PrintRowLine(); await CliUtils.PrintRow("https://www.siteserver.cn/docs/cli"); await CliUtils.PrintRowLine(); Console.ReadLine(); } else { Console.WriteLine($"'{commandName}' is not a siteserver command. See 'sitserver --help'"); } }
public async Task <int> AddOrUpdate(int?id, string name, IEnumerable <ProviderInstance> providerInstances) { BackupJob job = null; if (id != null && id > 0) { job = await context.Jobs.Include(j => j.Providers).FirstOrDefaultAsync(j => j.ID == id); job.HasChangedModel = false; job.Providers.Clear(); } else { job = new BackupJob(); context.Jobs.Add(job); } job.Name = name; job.Providers.AddRange(providerInstances); await context.SaveChangesAsync(); return(job.ID); }
public async Task <IActionResult> Details(int id) { BackupJob job = await backupJobRepository.Get(id); if (job == null) { return(NotFound()); } var providers = job.Providers.OrderBy(p => p.Order); JobDetailModel model = CreateModel <JobDetailModel>("Scheduled Backup"); model.ID = id; model.Name = job.Name; model.HasChangedModel = job.HasChangedModel; model.BackupProvider = providers.FirstOrDefault().Provider.Name; model.StorageProvider = providers.LastOrDefault().Provider.Name; model.TransformProviders = providers.Where(p => p.Provider.Type == ProviderType.Transform).Select(tp => tp.Provider.Name); model.CronSchedule = await schedulerService.GetCronSchedule(id); return(View(model)); }
public async Task Execute(IJobExecutionContext context) { int jobId = int.Parse(context.JobDetail.Key.Name); int historyId = 0; IDisposableList disposableList = new IDisposableList(); try { // load the job from the database and create a history point for this scheduled execution BackupJob job = await backupJobRepository.Get(jobId); DateTime?lastRun = await backupJobRepository.GetLastRun(jobId); historyId = await backupJobRepository.AddHistory(job.ID); // sort the providers so they are executed in the correct order var providers = job.Providers.OrderBy(p => p.Order); // load and create the backup and storage providers IBackupProvider backupProvider = await providerMappingService.CreateProvider <IBackupProvider>(providers.FirstOrDefault()); IStorageProvider storageProvider = await providerMappingService.CreateProvider <IStorageProvider>(providers.LastOrDefault()); disposableList.AddRange(new IDisposable[] { backupProvider, storageProvider }); // load all the transform providers List <ITransformProvider> transformProviders = new List <ITransformProvider>(); foreach (var tp in providers.Where(p => p.Provider.Type == ProviderType.Transform)) { transformProviders.Add(await providerMappingService.CreateProvider <ITransformProvider>(tp)); } disposableList.AddRange(transformProviders); // fetch all items from the backup providers var items = await backupProvider.GetItems(lastRun); List <List <TransformBackupItem> > transformExecuteList = new List <List <TransformBackupItem> >(transformProviders.Count()); Dictionary <ITransformProvider, IEnumerable <MappedBackupItem> > transformers = new Dictionary <ITransformProvider, IEnumerable <MappedBackupItem> >(transformProviders.Count()); for (int i = 0; i < transformProviders.Count(); i++) { if (i > 0) { var mappedItems = await transformProviders[i].MapInput(transformers.Last().Value.Select(x => x.Output)); transformers.Add(transformProviders[i], mappedItems); List <TransformBackupItem> subTransformExecuteList = new List <TransformBackupItem>(); foreach (var mappedItem in mappedItems) { int currentMappedIndex = i; Func <Stream, Task> action = async(stream) => { //Dictionary<BackupItem, Stream> dictionary = new Dictionary<BackupItem, Stream>(); //foreach (var backupItem in mappedItem.Input) //{ // PassThroughStream backupItemStream = disposableList.CreateAndAdd<PassThroughStream>(); // dictionary.Add(backupItem, backupItemStream); //} //Task transformTask = transformProviders[currentMappedIndex].TransformItem(mappedItem.Output, stream, dictionary); //foreach (var kvp in dictionary) //{ // PassThroughStream passThroughStream = kvp.Value as PassThroughStream; // var transformBackupItem = transformExecuteList[currentMappedIndex - 1].FirstOrDefault(x => x.MappedBackupItem.Output == kvp.Key); // await transformBackupItem.Execute(passThroughStream); // passThroughStream.SetComplete(); //} //transformTask.Wait(); Dictionary <BackupItem, Stream> dictionary = new Dictionary <BackupItem, Stream>(); foreach (var backupItem in mappedItem.Input) { ByteBufferStream ms = disposableList.CreateAndAdd <ByteBufferStream>(); var transformBackupItem = transformExecuteList[currentMappedIndex - 1].FirstOrDefault(x => x.MappedBackupItem.Output == backupItem); await transformBackupItem.Execute(ms); dictionary.Add(backupItem, ms); } await transformProviders[currentMappedIndex].TransformItem(mappedItem.Output, stream, dictionary); }; subTransformExecuteList.Add(new TransformBackupItem { MappedBackupItem = mappedItem, Execute = action }); } transformExecuteList.Add(subTransformExecuteList); } else { var mappedItems = await transformProviders[i].MapInput(items); transformers.Add(transformProviders[i], mappedItems); List <TransformBackupItem> subTransformExecuteList = new List <TransformBackupItem>(); foreach (var mappedItem in mappedItems) { int currentMappedIndex = i; Func <Stream, Task> action = async(stream) => { Dictionary <BackupItem, Stream> dictionary = new Dictionary <BackupItem, Stream>(); foreach (var backupItem in mappedItem.Input) { Stream itemStream = await disposableList.CreateAndAdd(async() => await backupProvider.OpenRead(backupItem)); dictionary.Add(backupItem, itemStream); } await transformProviders[currentMappedIndex].TransformItem(mappedItem.Output, stream, dictionary); }; subTransformExecuteList.Add(new TransformBackupItem { MappedBackupItem = mappedItem, Execute = action }); } transformExecuteList.Add(subTransformExecuteList); } } if (transformProviders.Count() > 0) { foreach (var mappedItem in transformExecuteList.Last()) { using (WaitableByteBufferStream outputStream = new WaitableByteBufferStream()) { Task <bool> storeItem = storageProvider.StoreItem(mappedItem.MappedBackupItem.Output, outputStream); await mappedItem.Execute(outputStream); outputStream.SetComplete(); storeItem.Wait(); } } } else { foreach (var item in items) { using (Stream itemStream = await backupProvider.OpenRead(item)) { await storageProvider.StoreItem(item, itemStream); } } } await backupJobRepository.UpdateHistory(historyId, ExitCode.Success, "Backup completed successfully."); } catch (Exception ex) { logger.LogError(ex, $"Backup failed with ID {jobId} and History ID {historyId}."); if (historyId > 0) { await backupJobRepository.UpdateHistory(historyId, ExitCode.Failed, $"Backup failed with message: {ex.Message} ({ex.GetType()})"); } } finally { foreach (IDisposable item in disposableList) { try { item?.Dispose(); } catch (NotImplementedException) { // ignore this exception } catch (Exception ex) { // log every other error logger.LogError(ex, $"Failed to dispose item."); } } disposableList.Clear(); disposableList = null; } }
private async Task ProcessAsync(BackupJob job, RefToken actor, CancellationToken ct) { var handlers = CreateHandlers(); var lastTimestamp = job.Started; try { var appId = DomainId.Create(Key); using (var stream = backupArchiveLocation.OpenStream(job.Id)) { using (var writer = await backupArchiveLocation.OpenWriterAsync(stream)) { await writer.WriteVersionAsync(); var userMapping = new UserMapping(actor); var context = new BackupContext(appId, userMapping, writer); await eventStore.QueryAsync(async storedEvent => { var @event = eventDataFormatter.Parse(storedEvent); if (@event.Payload is SquidexEvent squidexEvent && squidexEvent.Actor != null) { context.UserMapping.Backup(squidexEvent.Actor); } foreach (var handler in handlers) { await handler.BackupEventAsync(@event, context); } writer.WriteEvent(storedEvent); job.HandledEvents = writer.WrittenEvents; job.HandledAssets = writer.WrittenAttachments; lastTimestamp = await WritePeriodically(lastTimestamp); }, GetFilter(), null, ct); foreach (var handler in handlers) { ct.ThrowIfCancellationRequested(); await handler.BackupAsync(context); } foreach (var handler in handlers) { ct.ThrowIfCancellationRequested(); await handler.CompleteBackupAsync(context); } await userMapping.StoreAsync(writer, userResolver); } stream.Position = 0; ct.ThrowIfCancellationRequested(); await backupArchiveStore.UploadAsync(job.Id, stream, ct); } job.Status = JobStatus.Completed; } catch (OperationCanceledException) { await RemoveAsync(job); } catch (Exception ex) { log.LogError(ex, job.Id.ToString(), (ctx, w) => w .WriteProperty("action", "makeBackup") .WriteProperty("status", "failed") .WriteProperty("backupId", ctx)); job.Status = JobStatus.Failed; } finally { job.Stopped = clock.GetCurrentInstant(); await state.WriteAsync(); currentJobToken?.Dispose(); currentJobToken = null; currentJob = null; } }
private void Process(BackupJob job, RefToken actor, CancellationToken ct) { ProcessAsync(job, actor, ct).Forget(); }