public async Task Start(JobDefinition <DefinitionSequence> jobDefinition, FactoryStartOptions options) { if (status != null) { throw new JobFacInvalidRunStatusException($"Sequence has already been started (instance {jobInstanceKey})"); } jobDefinition.ThrowIfInvalid(); steps = await definitionRepo.GetStepsForSequence(jobDefinition.Id); if (steps.Count == 0) { throw new JobFacInvalidDataException($"Unable to retrieve steps for sequence {jobDefinition.Id}"); } this.jobDefinition = jobDefinition; status = new JobStatus <StatusSequence> { Key = jobInstanceKey, StartOptions = options, RunStatus = RunStatus.StartRequested, LastUpdated = DateTimeOffset.UtcNow, }; await historyRepo.InsertStatus(status); await StartNextStep(); }
public void Schedule(string workKey, Action work, TimeSpan interval, string description = null) { Func <Task> factory = () => Task.Factory.StartNew(work); var definition = new JobDefinition(workKey, factory, Schedules.Interval(interval), description); Schedule(definition); }
private void StartJob(JobDefinition jobDefinition, bool runNow = false) { this.EventReporter.Trace("Creating " + jobDefinition.JobName); var jobType = this.TypeLoader.LoadType(jobDefinition.AssemblyName, jobDefinition.ClassName); var configType = jobType.BaseType.GetGenericArguments()[0]; var wrapperType = typeof(JobWrapper <,>).MakeGenericType(jobType, configType); var jobDetail = new JobDetailImpl(jobDefinition.JobName, wrapperType); jobDetail.JobDataMap.Add("Configuration", jobDefinition.Configuration); if (runNow) { var trigger = new SimpleTriggerImpl(jobDefinition.JobName + "Trigger", DateBuilder.FutureDate(0, IntervalUnit.Minute), null, 1, TimeSpan.FromMinutes(int.MaxValue)); this.Scheduler.ScheduleJob(jobDetail, trigger); } else if (jobDefinition.Schedule is JobSimpleSchedule) { var schedule = (JobSimpleSchedule)jobDefinition.Schedule; var trigger = new SimpleTriggerImpl(jobDefinition.JobName + "Trigger", DateBuilder.FutureDate(schedule.DelayStartMinutes, IntervalUnit.Minute), null, SimpleTriggerImpl.RepeatIndefinitely, TimeSpan.FromMinutes(schedule.IntervalMinutes)); this.Scheduler.ScheduleJob(jobDetail, trigger); } else if (jobDefinition.Schedule is JobCronSchedule) { var schedule = (JobCronSchedule)jobDefinition.Schedule; var trigger = new CronTriggerImpl(jobDefinition.JobName + "Trigger", jobDefinition.JobName + "Group", schedule.CronScheduleExpression); this.Scheduler.ScheduleJob(jobDetail, trigger); } this.EventReporter.Trace("Done Creating " + jobDefinition.JobName); }
public void Schedule(JobDefinition definition) { if (!_work.IsJobRegisterd(definition.JobKey)) { _work.Register(definition); } }
//JAVA TO C# CONVERTER TODO TASK: Most Java annotations will not have direct .NET equivalent attributes: //ORIGINAL LINE: @Test public void createMonitorJobByIdsAndQuery() public virtual void createMonitorJobByIdsAndQuery() { // given HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery().decisionDefinitionKey(DECISION); Batch batch = historyService.deleteHistoricDecisionInstancesAsync(decisionInstanceIds, query, null); // when helper.executeSeedJob(batch); // then the seed job definition still exists but the seed job is removed JobDefinition seedJobDefinition = helper.getSeedJobDefinition(batch); assertNotNull(seedJobDefinition); Job seedJob = helper.getSeedJob(batch); assertNull(seedJob); // and a monitor job definition and job exists JobDefinition monitorJobDefinition = helper.getMonitorJobDefinition(batch); assertNotNull(monitorJobDefinition); Job monitorJob = helper.getMonitorJob(batch); assertNotNull(monitorJob); }
//JAVA TO C# CONVERTER TODO TASK: Most Java annotations will not have direct .NET equivalent attributes: //ORIGINAL LINE: @Test public void createDeletionJobsByIdsAndQuery() public virtual void createDeletionJobsByIdsAndQuery() { // given rule.ProcessEngineConfiguration.BatchJobsPerSeed = 5; HistoricDecisionInstanceQuery query = historyService.createHistoricDecisionInstanceQuery().decisionDefinitionKey(DECISION); Batch batch = historyService.deleteHistoricDecisionInstancesAsync(decisionInstanceIds, query, null); JobDefinition seedJobDefinition = helper.getSeedJobDefinition(batch); JobDefinition deletionJobDefinition = helper.getExecutionJobDefinition(batch); // when helper.executeSeedJob(batch); // then IList <Job> deletionJobs = helper.getJobsForDefinition(deletionJobDefinition); assertEquals(5, deletionJobs.Count); foreach (Job deletionJob in deletionJobs) { assertEquals(deletionJobDefinition.Id, deletionJob.JobDefinitionId); assertEquals(currentTime, deletionJob.Duedate); assertNull(deletionJob.ProcessDefinitionId); assertNull(deletionJob.ProcessDefinitionKey); assertNull(deletionJob.ProcessInstanceId); assertNull(deletionJob.ExecutionId); } // and the seed job still exists Job seedJob = helper.getJobForDefinition(seedJobDefinition); assertNotNull(seedJob); }
public virtual void Initialize() { var currentJobs = All(); logger.Debug("Initializing jobs. Available: {0} Existing:{1}", _jobs.Count(), currentJobs.Count); foreach (var currentJob in currentJobs) { if (!_jobs.Any(c => c.GetType().ToString() == currentJob.TypeName)) { logger.Debug("Removing job from database '{0}'", currentJob.Name); _database.Delete(currentJob); } } foreach (var job in _jobs) { var jobDefinition = currentJobs.SingleOrDefault(c => c.TypeName == job.GetType().ToString()); if (jobDefinition == null) { jobDefinition = new JobDefinition(); jobDefinition.TypeName = job.GetType().ToString(); jobDefinition.LastExecution = DateTime.Now; } jobDefinition.Enable = job.DefaultInterval.TotalSeconds > 0; jobDefinition.Name = job.Name; jobDefinition.Interval = Convert.ToInt32(job.DefaultInterval.TotalMinutes); SaveDefinition(jobDefinition); } }
public virtual void testSetJobDefinitionPriorityWithCascade() { // given a process instance with a job with default priority and a corresponding job definition ProcessInstance instance = runtimeService.createProcessInstanceByKey("asyncTaskProcess").startBeforeActivity("task").execute(); Job job = managementService.createJobQuery().singleResult(); JobDefinition jobDefinition = managementService.createJobDefinitionQuery().jobDefinitionId(job.JobDefinitionId).singleResult(); // when I set the job definition's priority managementService.setOverridingJobPriorityForJobDefinition(jobDefinition.Id, 52, true); // then the job definition's priority value has changed JobDefinition updatedDefinition = managementService.createJobDefinitionQuery().jobDefinitionId(jobDefinition.Id).singleResult(); assertEquals(52, (long)updatedDefinition.OverridingJobPriority); // the existing job's priority has changed as well Job updatedExistingJob = managementService.createJobQuery().singleResult(); assertEquals(52, updatedExistingJob.Priority); // and a new job of that definition receives the updated priority runtimeService.createProcessInstanceModification(instance.Id).startBeforeActivity("task").execute(); Job newJob = getJobThatIsNot(updatedExistingJob); assertEquals(52, newJob.Priority); }
/// <summary> /// Create new ScheduledJobInvocationInfo object with update information and /// update the job definition object. /// </summary> private void UpdateJobInvocationInfo() { Dictionary <string, object> parameters = UpdateParameters(); string name = _definition.Name; string command; if (ScriptBlock != null) { command = ScriptBlock.ToString(); } else if (FilePath != null) { command = FilePath; } else { command = _definition.InvocationInfo.Command; } JobDefinition jobDefinition = new JobDefinition(typeof(ScheduledJobSourceAdapter), command, name); jobDefinition.ModuleName = ModuleName; JobInvocationInfo jobInvocationInfo = new ScheduledJobInvocationInfo(jobDefinition, parameters); _definition.UpdateJobInvocationInfo(jobInvocationInfo, false); }
public virtual void testSuspensionByJobDefinitionId_shouldSuspendJob() { // given // a running process instance with a failed job IDictionary <string, object> @params = new Dictionary <string, object>(); @params["fail"] = true; runtimeService.startProcessInstanceByKey("suspensionProcess", @params); // the job definition JobDefinition jobDefinition = managementService.createJobDefinitionQuery().singleResult(); // the failed job JobQuery jobQuery = managementService.createJobQuery(); Job job = jobQuery.singleResult(); assertFalse(job.Suspended); // when // the job will be suspended managementService.suspendJobByJobDefinitionId(jobDefinition.Id); // then // the job should be suspended assertEquals(0, jobQuery.active().count()); assertEquals(1, jobQuery.suspended().count()); Job suspendedJob = jobQuery.suspended().singleResult(); assertEquals(job.Id, suspendedJob.Id); assertEquals(jobDefinition.Id, suspendedJob.JobDefinitionId); assertTrue(suspendedJob.Suspended); }
/// <summary> /// Provides a chance to load the current job definition. /// It is called when the document processor tab is initialized /// </summary> /// <param name="jd">current job definition</param> public override void OnLoad(JobDefinition jd) { System.Windows.Forms.Control myDocProcessorControl = null; // If the MyDocProcessorControls was already constructed use it. // The ExtendedProperties is a in memory hash map for application development. // It persists as long as the job defintion exists. It is a good place to // store the controls for the document processors. // jd.CheckOutFiles = true; if (jd.ExtendedProperties.Contains(DocumentProcessorGuid)) { myDocProcessorControl = (System.Windows.Forms.Control)jd.ExtendedProperties[DocumentProcessorGuid]; } // First time, construct a MyDocProcessorControls and store it in the ExtendedProperties // using the DocumentProcessorGuid as a unique key. if (null == myDocProcessorControl) { myDocProcessorControl = new UserInterface(jd, DocumentProcessorGuid); jd.ExtendedProperties.Add(DocumentProcessorGuid, myDocProcessorControl); } }
public override void Execute( JobDefinition job, Action<int, int> notifyProgress, Action<string> updateStatus, IRepository repositoryService, IJobManager jobManager) { var allFiles = job.Files.ToDictionary(file => file, _ => false); var totalItems = job.Tasks.Count; var currentItem = 0; foreach (var task in job.Tasks) { updateStatus(string.Format("Processing files used by task \"{0}\"...", task.Name)); allFiles[task.EntryPoint.Assembly] = true; foreach (var file in task.EntryPoint.References) allFiles[file] = true; foreach (var file in task.InputFiles) allFiles[file] = true; foreach (var file in task.OutputFiles) allFiles[file] = true; currentItem++; notifyProgress(currentItem, totalItems); } updateStatus("Building file list..."); job.Files = allFiles.Where(pair => pair.Value) .Select(pair => pair.Key) .ToList(); }
public static JobDefinition GetJobDefinition(this IJobDefinitionsOperations operations, HybridDataManagementClient client, string dataSourceName, string dataSinkName, string resourceGroupName, string dataManagerName, string runLocation, UserConfirmation userConfirmation, string deviceName, string containerName, string[] volumeNames, BackupChoice backupChoice, string fileNameFilter = null, string[] rootDirectories = null, AzureStorageType azureStorageType = AzureStorageType.Blob, bool isDirectoryMode = false) { var jobDefinition = new JobDefinition(); jobDefinition.DataSinkId = client.DataStores.Get(dataStoreName: dataSinkName, resourceGroupName: resourceGroupName, dataManagerName: dataManagerName).Id; jobDefinition.DataSourceId = client.DataStores.Get(dataStoreName: dataSourceName, resourceGroupName: resourceGroupName, dataManagerName: dataManagerName).Id; RunLocation parsedRunLocation = RunLocation.None; if (Enum.TryParse(runLocation, true, out parsedRunLocation)) { jobDefinition.RunLocation = parsedRunLocation; } jobDefinition.State = State.Enabled; jobDefinition.UserConfirmation = userConfirmation; jobDefinition.DataServiceInput = GetDataServiceInput(deviceName, containerName, volumeNames, backupChoice, fileNameFilter, rootDirectories, azureStorageType, isDirectoryMode); return(jobDefinition); }
public void CanDeploy_Job_UnderWebApplication() { WithExpectedUnsupportedCSOMnO365RunnerExceptions(() => { // OOTB job with 2 parameters for the constructor var webAppJobDefinition = new JobDefinition { Name = Rnd.String(), Title = Rnd.String(), ScheduleString = "yearly at jan 1 09:00:00", JobType = "Microsoft.SharePoint.Administration.SPDeadSiteDeleteJobDefinition, Microsoft.SharePoint", ConstructorParams = new Collection <JobDefinitionCtorParams>() { JobDefinitionCtorParams.JobName, JobDefinitionCtorParams.WebApplication } }; var model = SPMeta2Model .NewWebApplicationModel(webApp => { webApp.AddJob(webAppJobDefinition); }); TestModel(model); }); }
public virtual void testSetJobRetriesByDefinitionUnlocksInconsistentJobs() { // given a job definition //JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final': //ORIGINAL LINE: final org.camunda.bpm.engine.management.JobDefinition jobDefinition = managementService.createJobDefinitionQuery().singleResult(); JobDefinition jobDefinition = managementService.createJobDefinitionQuery().singleResult(); // and an inconsistent job that is never again picked up by a job executor CommandExecutor commandExecutor = processEngineConfiguration.CommandExecutorTxRequired; commandExecutor.execute(new CommandAnonymousInnerClass2(this, jobDefinition)); // when the job retries are reset managementService.setJobRetriesByJobDefinitionId(jobDefinition.Id, 3); // then the job can be picked up again JobEntity job = (JobEntity)managementService.createJobQuery().singleResult(); assertNotNull(job); assertNull(job.LockOwner); assertNull(job.LockExpirationTime); assertEquals(3, job.Retries); deleteJobAndIncidents(job); }
public virtual void testActivationByJobDefinitionIdUsingBuilder() { // given // a running process instance with a failed job runtimeService.startProcessInstanceByKey("suspensionProcess", Variables.createVariables().putValue("fail", true)); // suspended job definitions and corresponding jobs managementService.suspendJobDefinitionByProcessDefinitionKey("suspensionProcess", true); // the failed job JobQuery jobQuery = managementService.createJobQuery(); assertEquals(1, jobQuery.suspended().count()); JobDefinition jobDefinition = managementService.createJobDefinitionQuery().singleResult(); // when // the job will be activated managementService.updateJobSuspensionState().byJobDefinitionId(jobDefinition.Id).activate(); // then // the job should be active assertEquals(1, jobQuery.active().count()); assertEquals(0, jobQuery.suspended().count()); }
public void ConvertFullDefinitionToJob() { var jobDefinition = new JobDefinition() { Providers = new List <string>() { "TestMessage" }, When = "TestMessage.IntValue == 3", Id = "TestJobDefinition", Do = new List <CommandDefinition>() { new CommandDefinition() { Command = "DoSomething", Execution = new ExecutionStrategyDefinition() { Mode = ExecutionMode.Default }, Parameters = new Dictionary <string, object>() { { "Param1", "blabla" } }, Type = "TestCommand" } } }; //var job = jobDefinition.ToJob(new Dictionary<string, Type>() { { "TestMessage", typeof(TestMessage) } }); //Assert.IsNotNull(job.Condition); //Assert.AreEqual(1, job.Commands.Count()); //Assert.AreEqual("TestJobDefinition", job.Id); //Assert.AreEqual("blabla", job.Commands.FirstOrDefault().Parameters["Param1"]); }
internal bool RemoveCachedActivity(Guid instanceId) { JobDefinition jobDefinition = new JobDefinition(null, null, null); jobDefinition.InstanceId = instanceId; return(this.RemoveCachedActivity(jobDefinition)); }
private static CreateJobValidationModel CreateNewCreateJobValidationModel() { JobCreateModel jobCreateModel = new JobCreateModel { JobDefinitionId = "job-def-1", Trigger = new Trigger { EntityId = "spec-1" }, SpecificationId = "spec-1", MessageBody = "body", Properties = new Dictionary <string, string> { { "prop-1", "property 1" }, { "prop-2", "property 2" } }, InvokerUserId = "authorId", InvokerUserDisplayName = "authorname" }; JobDefinition jobDefinition = new JobDefinition { Id = "job-def-1", RequireEntityId = true, RequireSpecificationId = true, RequireMessageBody = true, RequireMessageProperties = new[] { "prop-1", "prop-2" } }; return(new CreateJobValidationModel { JobCreateModel = jobCreateModel, JobDefinition = jobDefinition }); }
public async Task SaveDefinitionExitsEarlyIfValidationFails() { JobDefinition jobDefinition = NewJobDefinition(); string invalidProperty = NewRandomString(); string validationFailureMessage = NewRandomString(); GivenTheJobsRepositoryReturnsTheStatusCode(jobDefinition, HttpStatusCode.OK); GivenTheJobDefinitionValidationResult(jobDefinition, NewValidationResult(_ => _.WithValidationFailures(NewValidationFailure(vf => vf.WithPropertyName(invalidProperty) .WithErrorMessage(validationFailureMessage))))); BadRequestObjectResult actionResult = (await WhenTheJobDefinitionIsSaved(jobDefinition)) as BadRequestObjectResult; SerializableError serializableError = actionResult?.Value as SerializableError; serializableError .Should() .NotBeNull(); serializableError[invalidProperty] .Should() .BeEquivalentTo(new [] { validationFailureMessage }); ThenNoJobDefinitionsWereSaved(); AndTheCacheWasNotInvalidated(); }
public async Task <IActionResult> SaveDefinition(JobDefinition definition) { try { Guard.ArgumentNotNull(definition, nameof(definition)); ValidationResult validationResult = await _validator.ValidateAsync(definition); if (!validationResult.IsValid) { return(validationResult.AsBadRequest()); } HttpStatusCode result = await _jobDefinitionsRepositoryPolicy.ExecuteAsync(() => _jobDefinitionsRepository.SaveJobDefinition(definition)); if (!result.IsSuccess()) { int statusCode = (int)result; _logger.Error($"Failed to save json file: {definition.Id} to cosmos db with status {statusCode}"); return(new StatusCodeResult(statusCode)); } } catch (Exception exception) { _logger.Error(exception, $"Exception occurred writing job definition {definition?.Id} to cosmos db"); throw; } await _cachePolicy.ExecuteAsync(() => _cacheProvider.RemoveAsync <List <JobDefinition> >(CacheKeys.JobDefinitions)); return(new NoContentResult()); }
private void StartJob(JobDefinition job) { this.EventReporter.Trace("Creating job: " + job.JobName); if (job.Schedule is JobCronSchedule) { } else if (job.Schedule is JobSimpleSchedule) { var type = this.TypeLoader.LoadType(job.AssemblyName, job.ClassName); var genericJobWrapperType = typeof(JobWrapper <,>); var combinedJobType = genericJobWrapperType.MakeGenericType(type); var jobDetail = new JobDetailImpl(job.JobName, combinedJobType); var simpleSchedule = (JobSimpleSchedule)job.Schedule; ITrigger trigger; if (simpleSchedule.DelayStartMinutes != 0) { trigger = new SimpleTriggerImpl(job.JobName + "Trigger", DateBuilder.FutureDate(simpleSchedule.DelayStartMinutes, IntervalUnit.Minute), null, SimpleTriggerImpl.RepeatIndefinitely, TimeSpan.FromMinutes(simpleSchedule.IntervalMinutes)); } else { trigger = new SimpleTriggerImpl(job.JobName + "Trigger", null, SimpleTriggerImpl.RepeatIndefinitely, TimeSpan.FromMinutes(simpleSchedule.IntervalMinutes)); } this.Scheduler.ScheduleJob(jobDetail, trigger); } this.EventReporter.Trace("Done Creating " + job.JobName); }
public virtual void testDoNotCreateNewIncident() { startProcessInstance(PROCESS_DEFINITION_KEY); ProcessInstance pi = runtimeService.createProcessInstanceQuery().singleResult(); HistoricIncidentQuery query = historyService.createHistoricIncidentQuery().processInstanceId(pi.Id); HistoricIncident incident = query.singleResult(); assertNotNull(incident); JobDefinition jobDefinition = managementService.createJobDefinitionQuery().singleResult(); // set retries to 1 by job definition id managementService.setJobRetriesByJobDefinitionId(jobDefinition.Id, 1); // the incident still exists HistoricIncident tmp = query.singleResult(); assertEquals(incident.Id, tmp.Id); assertNull(tmp.EndTime); assertTrue(tmp.Open); // execute the available job (should fail again) executeAvailableJobs(); // the incident still exists and there // should be not a new incident assertEquals(1, query.count()); tmp = query.singleResult(); assertEquals(incident.Id, tmp.Id); assertNull(tmp.EndTime); assertTrue(tmp.Open); }
public virtual void testSetJobDefinitionPriorityOverridesBpmnPriority() { // given a process instance with a job with default priority and a corresponding job definition ProcessInstance instance = runtimeService.createProcessInstanceByKey("jobPrioProcess").startBeforeActivity("task2").execute(); Job job = managementService.createJobQuery().singleResult(); JobDefinition jobDefinition = managementService.createJobDefinitionQuery().jobDefinitionId(job.JobDefinitionId).singleResult(); // when I set the job definition's priority managementService.setOverridingJobPriorityForJobDefinition(jobDefinition.Id, 62); // then the job definition's priority value has changed JobDefinition updatedDefinition = managementService.createJobDefinitionQuery().jobDefinitionId(jobDefinition.Id).singleResult(); assertEquals(62, (long)updatedDefinition.OverridingJobPriority); // the existing job's priority is still the value as given in the BPMN XML Job updatedExistingJob = managementService.createJobQuery().singleResult(); assertEquals(5, updatedExistingJob.Priority); // and a new job of that definition receives the updated priority // meaning that the updated priority overrides the priority specified in the BPMN XML runtimeService.createProcessInstanceModification(instance.Id).startBeforeActivity("task2").execute(); Job newJob = getJobThatIsNot(updatedExistingJob); assertEquals(62, newJob.Priority); }
public void DiscoverParametersTest() { var jd = new JobDefinition(); jd.WorkflowTypeName = typeof(Jhu.Graywulf.Jobs.Query.SqlQueryJob).AssemblyQualifiedName; jd.DiscoverWorkflowParameters(); }
public JobItem Create(JobDefinition definition) { var stateProvider = _context.State; var state = stateProvider.Retrieve(definition.JobKey); // State could be null! return(new JobItem(_context, definition, state)); }
private ScheduledJobDefinition CreateFilePathDefinition() { JobDefinition jobDefinition = new JobDefinition(typeof(ScheduledJobSourceAdapter), this.FilePath, this._name); jobDefinition.ModuleName = "PSScheduledJob"; Dictionary<string, object> strs = this.CreateCommonParameters(); if (this.FilePath.EndsWith(".ps1", StringComparison.OrdinalIgnoreCase)) { Collection<PathInfo> resolvedPSPathFromPSPath = base.SessionState.Path.GetResolvedPSPathFromPSPath(this.FilePath); if (resolvedPSPathFromPSPath.Count == 1) { strs.Add("FilePath", resolvedPSPathFromPSPath[0].Path); JobInvocationInfo scheduledJobInvocationInfo = new ScheduledJobInvocationInfo(jobDefinition, strs); ScheduledJobDefinition scheduledJobDefinition = new ScheduledJobDefinition(scheduledJobInvocationInfo, this.Trigger, this.ScheduledJobOption, this._credential); return scheduledJobDefinition; } else { string str = StringUtil.Format(ScheduledJobErrorStrings.InvalidFilePath, new object[0]); Exception runtimeException = new RuntimeException(str); ErrorRecord errorRecord = new ErrorRecord(runtimeException, "InvalidFilePathParameterForRegisterScheduledJobDefinition", ErrorCategory.InvalidArgument, this); base.WriteError(errorRecord); return null; } } else { string str1 = StringUtil.Format(ScheduledJobErrorStrings.InvalidFilePathFile, new object[0]); Exception exception = new RuntimeException(str1); ErrorRecord errorRecord1 = new ErrorRecord(exception, "InvalidFilePathParameterForRegisterScheduledJobDefinition", ErrorCategory.InvalidArgument, this); base.WriteError(errorRecord1); return null; } }
public virtual void testRedeployOverridesSetJobDefinitionPriority() { // given a process instance with a job with default priority and a corresponding job definition runtimeService.createProcessInstanceByKey("jobPrioProcess").startBeforeActivity("task2").execute(); Job job = managementService.createJobQuery().singleResult(); JobDefinition jobDefinition = managementService.createJobDefinitionQuery().jobDefinitionId(job.JobDefinitionId).singleResult(); // when I set the job definition's priority managementService.setOverridingJobPriorityForJobDefinition(jobDefinition.Id, 72, true); // then the job definition's priority value has changed JobDefinition updatedDefinition = managementService.createJobDefinitionQuery().jobDefinitionId(jobDefinition.Id).singleResult(); assertEquals(72, (long)updatedDefinition.OverridingJobPriority); // the existing job's priority has changed as well Job updatedExistingJob = managementService.createJobQuery().singleResult(); assertEquals(72, updatedExistingJob.Priority); // if the process definition is redeployed string secondDeploymentId = repositoryService.createDeployment().addClasspathResource("org/camunda/bpm/engine/test/api/mgmt/jobPrioProcess.bpmn20.xml").deploy().Id; // then a new job will have the priority from the BPMN xml ProcessInstance secondInstance = runtimeService.createProcessInstanceByKey("jobPrioProcess").startBeforeActivity("task2").execute(); Job newJob = managementService.createJobQuery().processInstanceId(secondInstance.Id).singleResult(); assertEquals(5, newJob.Priority); repositoryService.deleteDeployment(secondDeploymentId, true); }
internal Activity GetActivity(JobDefinition definition, string xaml, string[] dependentWorkflows) { bool flag = false; IEnumerable <string> emptyEnumerable; JobDefinition jobDefinition = definition; string empty = string.Empty; string[] strArrays = dependentWorkflows; if (strArrays != null) { emptyEnumerable = (IEnumerable <string>)strArrays; } else { emptyEnumerable = WorkflowJobDefinition.EmptyEnumerable; } WorkflowJobDefinition workflowJobDefinition = new WorkflowJobDefinition(jobDefinition, empty, emptyEnumerable, string.Empty, xaml); Activity activityFromCache = this.GetActivityFromCache(workflowJobDefinition, out flag); Activity activity = activityFromCache; if (activityFromCache == null) { activity = this.CompileActivityAndSaveInCache(workflowJobDefinition, null, null, out flag); } Activity activity1 = activity; return(activity1); }
public JobItem Create(JobDefinition definition) { var stateProvider = _context.State; var state = stateProvider.Retrieve(definition.JobKey); // State could be null! return new JobItem(_context, definition, state); }
} // end PopulateTree /// <summary> /// "Des-checa" un nodo correspondiente a un JobDefinition /// </summary> /// <param name="jobDefinition">El <see cref="JobDefinition"/> a buscar</param> private void UnCheck(JobDefinition jobDefinition) { // Las aplicaciones foreach (TreeNode applicationNode in this.jobsTreeView.Nodes[0].Nodes) { // Los grupos foreach (TreeNode groupNode in applicationNode.Nodes) { // Los jobs foreach (TreeNode jobNode in groupNode.Nodes) { // Si el job es el seleccionado if (((JobDefinition)jobNode.Tag) == jobDefinition) { // Lo "des-checamos" jobNode.Checked = false; } // end if } // end foreach } // end foreach } // end foreach } // end UnCheck
/// <summary> /// Provides a chance to persist the current job defintion. /// It is called when a job is saved through the Save As dialog. /// </summary> /// <param name="jd"></param> public override void OnSave(JobDefinition jd) { UserInterface myDocProcessorControls; // Getting your window control. myDocProcessorControls = (UserInterface)jd.ExtendedProperties[DocumentProcessorGuid]; if (null != myDocProcessorControls) { // collect the config data from the windows control panel and store it // in a MyDocProcConfigData object. ConfigData myDocProcConfigData = new ConfigData(); myDocProcConfigData.PWUser = myDocProcessorControls.PWUser; myDocProcConfigData.PWPassword = myDocProcessorControls.PWPassword; myDocProcConfigData.MDLAppName = myDocProcessorControls.MDLAppName; myDocProcConfigData.AppKeyin = myDocProcessorControls.AppKeyin; myDocProcConfigData.PWLoginCMD = myDocProcessorControls.PWLoginCMD; myDocProcConfigData.MSKeyin4 = myDocProcessorControls.MSKeyin4; myDocProcConfigData.MSKeyin5 = myDocProcessorControls.MSKeyin5; // Permanently store the configuration data collected in the // MyDocProcConfigData in the job definition. jd.SetCustomData(DocumentProcessorGuid, myDocProcConfigData.ToXmlElement()); // jd.CheckOutFiles = true; } }
public override void Execute( JobDefinition job, Action<int, int> notifyProgress, Action<string> updateStatus, IRepository repositoryService, IJobManager jobManager) { updateStatus("Linking files..."); }
public override void Execute( JobDefinition job, Action<int, int> notifyProgress, Action<string> updateStatus, IRepository repositoryService, IJobManager jobManager) { updateStatus("Creating job..."); job.Id = jobManager.CreateJob(job.Name).Id; }
public override void Execute( JobDefinition job, Action<int, int> notifyProgress, Action<string> updateStatus, IRepository repositoryService, IJobManager jobManager) { updateStatus("Validating job's graph..."); var index = 0; var count = job.Tasks.Count; if (!job.Tasks.All(task => { notifyProgress(++index, count); return RecoursiveCheckGraph(task, ImmutableList<Guid>.Empty); })) throw new Exception("Job is invalid: graph contains cycles."); }
public override void Execute( JobDefinition job, Action<int, int> notifyProgress, Action<string> updateStatus, IRepository repositoryService, IJobManager jobManager) { var index = 0; var count = job.Files.Count; foreach (var file in job.Files) { index++; notifyProgress(index, count); if(file is CreateFileRequest) updateStatus(string.Format("Creating file: {0}...", file.Name)); else updateStatus(string.Format("Uploading file: {0}...", file.Name)); file.SaveFile(repositoryService); } }
public override void Execute( JobDefinition job, Action<int, int> notifyProgress, Action<string> updateStatus, IRepository repositoryService, IJobManager jobManager) { var tasks = (from t in job.Tasks select new {Task = t, Dependencies = t.Dependencies.Count}).ToList(); while (tasks.Count() > 0) { var task = (from t in tasks where t.Dependencies == 0 select t.Task).First(); updateStatus(string.Format("Creating task \"{0}\"...", task.Name)); tasks = (from t in tasks where t.Task != task let dependencyList = t.Task.Dependencies let dependencyCount = t.Dependencies select new { Task = t.Task, Dependencies = dependencyList.Contains(task) ? dependencyCount - 1 : dependencyCount }).ToList(); task.Id = jobManager.CreateTask(task.ToContract()); notifyProgress(tasks.Count(), job.Tasks.Count); } }
public override void Execute( JobDefinition job, Action<int, int> notifyProgress, Action<string> updateStatus, IRepository repositoryService, IJobManager jobManager) { updateStatus("Executing job..."); JobExecutingView dialog = null; Owner.Synch(() => dialog = new JobExecutingView(string.Format("Executing job {0}...", job))); jobManager.OpenJob(job.Id); jobManager.StartJob(); var isWorking = true; dialog.OnCancelled += (s, e) => { lock (this) { if (!isWorking) return; isWorking = false; } jobManager.CancelJob(); }; Owner.Synch(() => dialog.Show()); var state = default(JobState); while (isWorking) { state = jobManager.QueryJobState(job.Id); if (state == JobState.Completed || state == JobState.Failed) { lock (this) { lock (this) isWorking = false; break; } } Thread.Sleep(100); } Owner.Synch(() => dialog.Close()); if (state == JobState.Completed) { return; // MessageBox.Show(string.Format("Job {0} completed!", job)); } if (state == JobState.Failed) { var errorReport = jobManager.GetErrorReport(job.Id); Owner.Synch(() => ErrorReportView.ShowReport(errorReport)); throw new StageFailedException("Job failed!"); //MessageBox.Show(string.Format("Job {0} failed!", job)); } }
/// <summary> /// Converts job definition result to PowerShell job definition. /// </summary> /// <param name="jobsDefinitionList">Job definition.</param> /// <returns>PowerShell job definition.</returns> internal static PSSchedulerJobDefinition ConvertJobDefinitionToPS(JobDefinition jobDefinition) { if (jobDefinition == null) { throw new ArgumentNullException(paramName: "jobDefinition"); } var psSchedulerJobDefinition = new PSSchedulerJobDefinition() { ResourceGroupName = jobDefinition.Id.Split('/')[4], JobCollectionName = jobDefinition.Name.Split('/')[0], JobName = jobDefinition.Name.Split('/')[1], Status = jobDefinition.Properties.State.ToString(), StartTime = jobDefinition.Properties.StartTime, Recurrence = Converter.ConvertRecurrenceToString(jobDefinition.Properties.Recurrence), EndSchedule = Converter.GetEndSchedule(jobDefinition.Properties.Recurrence), }; if (jobDefinition.Properties.Status != null) { psSchedulerJobDefinition.Lastrun = jobDefinition.Properties.Status.LastExecutionTime; psSchedulerJobDefinition.Nextrun = jobDefinition.Properties.Status.NextExecutionTime; psSchedulerJobDefinition.Failures = jobDefinition.Properties.Status.FailureCount; psSchedulerJobDefinition.Faults = jobDefinition.Properties.Status.FaultedCount; psSchedulerJobDefinition.Executions = jobDefinition.Properties.Status.ExecutionCount; } psSchedulerJobDefinition.JobAction = Converter.GetSchedulerJobActionDetails(jobDefinition.Properties.Action); psSchedulerJobDefinition.JobErrorAction = Converter.GetSchedulerJobErrorActionDetails(jobDefinition.Properties.Action.ErrorAction); return psSchedulerJobDefinition; }
internal Job(JobDefinition jobDefinition) { _jobDefinition = jobDefinition; }
private static void ProcessJob(JobDefinition jobDefinition) { IBatchElement job = new Job(jobDefinition); job.Execute(); }
/// <summary> /// Adds/Updates definitions for a job /// </summary> /// <param name="definitions">Settings to be added/updated</param> public virtual void SaveDefinition(JobDefinition definitions) { if (definitions.Id == 0) { logger.Trace("Adding job definitions for {0}", definitions.Name); _database.Insert(definitions); } else { logger.Trace("Updating job definitions for {0}", definitions.Name); _database.Update(definitions); } }