public async Task DeleteOutputAsync(string jobId, CancellationToken cancel) { if (_workerConfiguration.Service is null || _workerConfiguration.Area is null) { return; } var identifier = new JobIdentifier( jobId, _workerConfiguration.Service, _workerConfiguration.Area); var job = await _jobRepository.GetAsync(identifier, cancel); if (job is null) { return; } using var scope = _scopeFactory.CreateScope(job); var worker = scope.GetWorker <TWorker>(); if (job.Input is null) { return; } var input = _inputSerializer.Deserialize(job.Input); TOutput?output = default; if (job.Output != null) { output = _outputSerializer.Deserialize(job.Output); } await worker.DeleteOutputAsync(input, output, cancel); }
public UpdateAssignedWorkDisplayWhenJobAssigned(JobIdentifier jobId, IObservable <WorkshopEvent> workshopEvents, IDisplayAssignedWork displayAssignedWork) { workshopEvents .OfType <WorkshopEvent, WorkshopEvent.JobAssigned>() .Where(jobAssigned => jobAssigned.JobId == jobId) .Select(jobAssigned => $"Assigned to {jobAssigned.WorkerId.ToString()}") .Subscribe(assignmentText => displayAssignedWork.AssignedWork = assignmentText); }
public async Task <TOutput?> GetJobOutputAsync(string id, CancellationToken cancel) { JobIdentifier identifier = CreateJobIdentifier(id); var job = await _repository.GetAsync(identifier, cancel); if (job is null || !job.Header.State.IsSuccess()) { return(default);
private void CompleteWorkOnJobAfterDelay(JobIdentifier jobId, float startTime, float delaySeconds) => Observable.EveryUpdate() .Take(TimeSpan.FromSeconds(delaySeconds)) .Select(_ => (Time.time - startTime) / delaySeconds) .Select(progress => Mathf.Clamp01(progress) % 1f) .Subscribe( _displayProgress.ShowProgress, () => CompleteWorkOnJob(jobId) );
public void QuartzId_IsConstructedCorrectly() { var jobId1 = new JobIdentifier(); jobId1.Group = "SOD"; jobId1.JobName = "VIPJob"; Assert.AreEqual("SOD.VIPJob", jobId1.QuartzId); }
public Task <Job?> GetAsync(JobIdentifier id, CancellationToken cancel) { Job?job = null; if (_jobs.ContainsKey(id.Id)) { job = _jobs[id.Id]; } return(Task.FromResult(job)); }
public void CanSerializeAndDeserializeCorrectly() { var jobId1 = new JobIdentifier(); jobId1.Group = "SOD"; jobId1.JobName = "VIPJob"; var serializedJobId = JsonConvert.SerializeObject(jobId1); var deserializedJobId = JsonConvert.DeserializeObject <JobIdentifier>(serializedJobId); Assert.AreEqual(jobId1, deserializedJobId, "JobIdentifers are not equivalent"); }
public UpdateAssignedWorkDisplayWhenJobAddedOrUnassigned(JobIdentifier jobId, IObservable <WorkshopEvent> workshopEvents, IDisplayAssignedWork displayAssignedWork) { var addedJobIds = workshopEvents .OfType <WorkshopEvent, WorkshopEvent.JobAdded>() .Select(jobAdded => jobAdded.Job.Id); var unassignedJobIds = workshopEvents .OfType <WorkshopEvent, WorkshopEvent.JobUnassigned>() .Select(jobUnassigned => jobUnassigned.JobId); addedJobIds.Merge(unassignedJobIds) .Where(eventJobId => eventJobId == jobId) .Subscribe(_ => displayAssignedWork.AssignedWork = "Unassigned"); }
public UpdateJobCompletionWhenJobAddedOrStatusUpdated(JobIdentifier jobId, IObservable <WorkshopEvent> workshopEvents, IDisplayJobCompletion displayJobCompletion) { var addedStatus = workshopEvents .OfType <WorkshopEvent, WorkshopEvent.JobAdded>() .Where(jobAdded => jobAdded.Job.Id == jobId) .Select(jobAdded => jobAdded.Job.Status); var updatedStatus = workshopEvents .OfType <WorkshopEvent, WorkshopEvent.JobStatusUpdated>() .Where(jobStatusUpdated => jobStatusUpdated.JobId == jobId) .Select(jobStatusUpdated => jobStatusUpdated.NewStatus); addedStatus .Merge(updatedStatus) .Select(GetPercentageComplete) .Subscribe(percentageComplete => displayJobCompletion.PercentComplete = percentageComplete); }
public DropdownOption(JobIdentifier jobId) : base(jobId.ToString()) { JobId = jobId.ToMaybe(); }
public StartWork(JobIdentifier jobId) { JobId = jobId; }
private IEnumerable <Job2> CreateJobsFromWorkflows(IEnumerable <Job2> workflowJobs, bool returnParents) { object obj = null; string str = null; string str1 = null; Guid guid; bool item; DynamicActivity workflow; bool flag; ContainerParentJob containerParentJob; Dictionary <Guid, Job2> guids = new Dictionary <Guid, Job2>(); List <Job2> job2s = new List <Job2>(); if (workflowJobs != null) { foreach (Job2 workflowJob in workflowJobs) { PSWorkflowJob pSWorkflowJob = workflowJob as PSWorkflowJob; PSWorkflowInstance pSWorkflowInstance = pSWorkflowJob.PSWorkflowInstance; if (!pSWorkflowInstance.JobStateRetrieved || pSWorkflowInstance.PSWorkflowContext.JobMetadata == null || pSWorkflowInstance.PSWorkflowContext.JobMetadata.Count == 0 || !WorkflowJobSourceAdapter.GetJobInfoFromMetadata(pSWorkflowInstance, out str1, out str, out guid) || !pSWorkflowInstance.PSWorkflowContext.JobMetadata.TryGetValue("ParentInstanceId", out obj)) { continue; } Guid guid1 = (Guid)obj; if (returnParents && !guids.ContainsKey(guid1)) { if (!pSWorkflowInstance.PSWorkflowContext.JobMetadata.TryGetValue("ParentName", out obj)) { continue; } string str2 = (string)obj; if (!pSWorkflowInstance.PSWorkflowContext.JobMetadata.TryGetValue("ParentCommand", out obj)) { continue; } string str3 = (string)obj; JobIdentifier jobIdentifier = base.RetrieveJobIdForReuse(guid1); if (jobIdentifier != null) { containerParentJob = new ContainerParentJob(str3, str2, jobIdentifier, "PSWorkflowJob"); } else { containerParentJob = new ContainerParentJob(str3, str2, guid1, "PSWorkflowJob"); } ContainerParentJob containerParentJob1 = containerParentJob; if (pSWorkflowInstance.PSWorkflowContext.JobMetadata.ContainsKey("ParentSessionId")) { pSWorkflowInstance.PSWorkflowContext.JobMetadata["ParentSessionId"] = containerParentJob1.Id; } guids.Add(guid1, containerParentJob1); } if (pSWorkflowInstance.PSWorkflowContext.JobMetadata.ContainsKey("Id")) { pSWorkflowInstance.PSWorkflowContext.JobMetadata["Id"] = workflowJob.Id; } if (pSWorkflowInstance.PSWorkflowContext.JobMetadata.ContainsKey("ProcessId")) { pSWorkflowInstance.PSWorkflowContext.JobMetadata["ProcessId"] = Process.GetCurrentProcess().Id; } workflowJob.StartParameters = new List <CommandParameterCollection>(); CommandParameterCollection commandParameterCollection = new CommandParameterCollection(); WorkflowJobSourceAdapter.AddStartParametersFromCollection(pSWorkflowInstance.PSWorkflowContext.WorkflowParameters, commandParameterCollection); WorkflowJobSourceAdapter.AddStartParametersFromCollection(pSWorkflowInstance.PSWorkflowContext.PSWorkflowCommonParameters, commandParameterCollection); if (!pSWorkflowInstance.PSWorkflowContext.JobMetadata.ContainsKey("WorkflowTakesPrivateMetadata")) { if (pSWorkflowInstance.PSWorkflowDefinition != null) { workflow = pSWorkflowInstance.PSWorkflowDefinition.Workflow as DynamicActivity; } else { workflow = null; } DynamicActivity dynamicActivity = workflow; if (dynamicActivity == null) { flag = false; } else { flag = dynamicActivity.Properties.Contains("PSPrivateMetadata"); } item = flag; } else { item = (bool)pSWorkflowInstance.PSWorkflowContext.JobMetadata["WorkflowTakesPrivateMetadata"]; } if (pSWorkflowInstance.PSWorkflowContext.PrivateMetadata != null && pSWorkflowInstance.PSWorkflowContext.PrivateMetadata.Count > 0 && !item) { Hashtable hashtables = new Hashtable(); foreach (KeyValuePair <string, object> privateMetadatum in pSWorkflowInstance.PSWorkflowContext.PrivateMetadata) { hashtables.Add(privateMetadatum.Key, privateMetadatum.Value); } commandParameterCollection.Add(new CommandParameter("PSPrivateMetadata", hashtables)); } workflowJob.StartParameters.Add(commandParameterCollection); if (!returnParents) { job2s.Add(workflowJob); } else { ((ContainerParentJob)guids[guid1]).AddChildJob(workflowJob); } if (pSWorkflowJob.WorkflowInstanceLoaded) { continue; } pSWorkflowJob.RestoreFromWorkflowInstance(pSWorkflowInstance); } if (returnParents) { foreach (Job2 value in guids.Values) { PSSQMAPI.InitiateWorkflowStateDataTracking(value); } job2s.AddRange(guids.Values); } return(job2s); } else { return(job2s); } }
public void Setup(JobIdentifier job) => _jobNameText.text = job.ToString();
private void OnJobAdded(JobIdentifier jobId) => _jobPanels[jobId] = _jobPanelFactory.Create(jobId);
public Task DeleteAsync(JobIdentifier id, CancellationToken cancel) { _jobs.TryRemove(id.Id, out _); return(Task.CompletedTask); }
public void StartSomeJob_FailsWithUnknownJob(JobIdentifier someJob) { Act_StartWork(someJob) .Assert_FailsWith(WorkshopError.UnknownJob); }
private void CompleteWorkOnJob(JobIdentifier jobId) => _workshopCommands.Enqueue(new WorkshopCommand.CompleteWork(jobId, QuantityOfWork.Unit));
public CompleteWork(JobIdentifier jobId, QuantityOfWork quantity) { JobId = jobId; Quantity = quantity; }
private void CompleteWorkOnJobAfterDelay(JobIdentifier jobId, TimeSpan delay) => CompleteWorkOnJobAfterDelay(jobId, Time.time, (float)delay.TotalSeconds);
private void StartWorkOnJob(JobIdentifier jobId) => _workshopCommands.Enqueue(new WorkshopCommand.StartWork(jobId));
private void WorkOnJob(JobIdentifier jobId) { StartWorkOnJob(jobId); CompleteWorkOnJobAfterDelay(jobId, _delay); }
public void AddJobOption(JobIdentifier jobId) => AddJobOption(new DropdownOption(jobId));
public AssignJob(JobIdentifier jobId, WorkerIdentifier workerId) { JobId = jobId; WorkerId = workerId; }
protected Maybe <WorkshopError> Act_AssignJob(WorkerIdentifier workerId, JobIdentifier jobId) => _sut.HandleCommand(new WorkshopCommand.AssignJob(jobId, workerId));
private IEnumerable <Job2> CreateJobsFromWorkflows(IEnumerable <Job2> workflowJobs, bool returnParents) { // Jobs in this collection correspond to the ContainerParentJob objects. PSWorkflowJob objects // are children of these. var reconstructedParentJobs = new Dictionary <Guid, Job2>(); var jobs = new List <Job2>(); if (workflowJobs == null) { return(jobs); } // If a workflow instance has incomplete metadata, we do not create the job for it. foreach (var job in workflowJobs) { var wfjob = job as PSWorkflowJob; Debug.Assert(wfjob != null, "Job supplied must be of type PSWorkflowJob"); PSWorkflowInstance instance = wfjob.PSWorkflowInstance; Dbg.Assert(instance != null, "PSWorkflowInstance should be reconstructed before attempting to rehydrate job"); if (!instance.JobStateRetrieved || instance.PSWorkflowContext.JobMetadata == null || instance.PSWorkflowContext.JobMetadata.Count == 0) { continue; } object data; string name, command; Guid instanceId; if (!GetJobInfoFromMetadata(instance, out command, out name, out instanceId)) { continue; } if (!instance.PSWorkflowContext.JobMetadata.TryGetValue(Constants.JobMetadataParentInstanceId, out data)) { continue; } var parentInstanceId = (Guid)data; // If the parent job is needed, find or create it now so that the ID is sequentially lower. if (returnParents && !reconstructedParentJobs.ContainsKey(parentInstanceId)) { if (!instance.PSWorkflowContext.JobMetadata.TryGetValue(Constants.JobMetadataParentName, out data)) { continue; } var parentName = (string)data; if (!instance.PSWorkflowContext.JobMetadata.TryGetValue(Constants.JobMetadataParentCommand, out data)) { continue; } var parentCommand = (string)data; JobIdentifier parentId = RetrieveJobIdForReuse(parentInstanceId); ContainerParentJob parentJob = parentId != null ? new ContainerParentJob(parentCommand, parentName, parentId, AdapterTypeName) : new ContainerParentJob(parentCommand, parentName, parentInstanceId, AdapterTypeName); // update job metadata with new parent session Id--needed for filtering. // The pid in the metadata has already been updated at this point. Dbg.Assert( instance.PSWorkflowContext.JobMetadata.ContainsKey(Constants.JobMetadataParentSessionId), "Job Metadata for instance incomplete."); if (instance.PSWorkflowContext.JobMetadata.ContainsKey(Constants.JobMetadataParentSessionId)) { instance.PSWorkflowContext.JobMetadata[Constants.JobMetadataParentSessionId] = parentJob.Id; } reconstructedParentJobs.Add(parentInstanceId, parentJob); } // update job metadata with new session Id--needed for filtering. Dbg.Assert(instance.PSWorkflowContext.JobMetadata.ContainsKey(Constants.JobMetadataSessionId), "Job Metadata for instance incomplete."); Dbg.Assert(instance.PSWorkflowContext.JobMetadata.ContainsKey(Constants.JobMetadataPid), "Job Metadata for instance incomplete."); if (instance.PSWorkflowContext.JobMetadata.ContainsKey(Constants.JobMetadataSessionId)) { instance.PSWorkflowContext.JobMetadata[Constants.JobMetadataSessionId] = job.Id; } if (instance.PSWorkflowContext.JobMetadata.ContainsKey(Constants.JobMetadataPid)) { instance.PSWorkflowContext.JobMetadata[Constants.JobMetadataPid] = Process.GetCurrentProcess().Id; } job.StartParameters = new List <CommandParameterCollection>(); CommandParameterCollection commandParameterCollection = new CommandParameterCollection(); AddStartParametersFromCollection(instance.PSWorkflowContext.WorkflowParameters, commandParameterCollection); AddStartParametersFromCollection(instance.PSWorkflowContext.PSWorkflowCommonParameters, commandParameterCollection); bool takesPSPrivateMetadata; if (instance.PSWorkflowContext.JobMetadata.ContainsKey(Constants.WorkflowTakesPrivateMetadata)) { takesPSPrivateMetadata = (bool)instance.PSWorkflowContext.JobMetadata[Constants.WorkflowTakesPrivateMetadata]; } else { DynamicActivity da = instance.PSWorkflowDefinition != null ? instance.PSWorkflowDefinition.Workflow as DynamicActivity : null; takesPSPrivateMetadata = da != null && da.Properties.Contains(Constants.PrivateMetadata); } // If there is Private Metadata and it is not included in the "Input" collection, add it now. if (instance.PSWorkflowContext.PrivateMetadata != null && instance.PSWorkflowContext.PrivateMetadata.Count > 0 && !takesPSPrivateMetadata) { Hashtable privateMetadata = new Hashtable(); foreach (var pair in instance.PSWorkflowContext.PrivateMetadata) { privateMetadata.Add(pair.Key, pair.Value); } commandParameterCollection.Add(new CommandParameter(Constants.PrivateMetadata, privateMetadata)); } job.StartParameters.Add(commandParameterCollection); if (returnParents) { ((ContainerParentJob)reconstructedParentJobs[parentInstanceId]).AddChildJob(job); } else { jobs.Add(job); } if (!wfjob.WorkflowInstanceLoaded) { // RestoreFromWorkflowInstance sets the job state. Because we've used AddChildJob, the parent's state will be // updated automatically. wfjob.RestoreFromWorkflowInstance(instance); } } if (returnParents) { jobs.AddRange(reconstructedParentJobs.Values); } return(jobs); }
protected Maybe <WorkshopError> Act_StartWork(JobIdentifier jobId) => _sut.HandleCommand(new WorkshopCommand.StartWork(jobId));
public void CompleteWorkOnSomeJobWithSomeQuantity_FailsWithUnknownJob(JobIdentifier someJob, QuantityOfWork someQuantity) { Act_CompleteWork(someJob, someQuantity) .Assert_FailsWith(WorkshopError.UnknownJob); }
protected Maybe <WorkshopError> Act_CompleteWork(JobIdentifier jobId, QuantityOfWork quantity) => _sut.HandleCommand(new WorkshopCommand.CompleteWork(jobId, quantity));